1# coding: utf-8
2"""
3Package resource API
4--------------------
5
6A resource is a logical file contained within a package, or a logical
7subdirectory thereof.  The package resource API expects resource names
8to have their path parts separated with ``/``, *not* whatever the local
9path separator is.  Do not use os.path operations to manipulate resource
10names being passed into the API.
11
12The package resource API is designed to work with normal filesystem packages,
13.egg files, and unpacked .egg files.  It can also work in a limited way with
14.zip files and with custom PEP 302 loaders that support the ``get_data()``
15method.
16"""
17
18from __future__ import absolute_import
19
20import sys
21import os
22import io
23import time
24import re
25import types
26import zipfile
27import zipimport
28import warnings
29import stat
30import functools
31import pkgutil
32import operator
33import platform
34import collections
35import plistlib
36import email.parser
37import errno
38import tempfile
39import textwrap
40import itertools
41import inspect
42import ntpath
43import posixpath
44from pkgutil import get_importer
45
46try:
47    import _imp
48except ImportError:
49    # Python 3.2 compatibility
50    import imp as _imp
51
52try:
53    FileExistsError
54except NameError:
55    FileExistsError = OSError
56
57from pkg_resources.extern import six
58from pkg_resources.extern.six.moves import map, filter
59
60# capture these to bypass sandboxing
61from os import utime
62try:
63    from os import mkdir, rename, unlink
64    WRITE_SUPPORT = True
65except ImportError:
66    # no write support, probably under GAE
67    WRITE_SUPPORT = False
68
69from os import open as os_open
70from os.path import isdir, split
71
72try:
73    import importlib.machinery as importlib_machinery
74    # access attribute to force import under delayed import mechanisms.
75    importlib_machinery.__name__
76except ImportError:
77    importlib_machinery = None
78
79from pkg_resources.extern import appdirs
80from pkg_resources.extern import packaging
81__import__('pkg_resources.extern.packaging.version')
82__import__('pkg_resources.extern.packaging.specifiers')
83__import__('pkg_resources.extern.packaging.requirements')
84__import__('pkg_resources.extern.packaging.markers')
85
86
87__metaclass__ = type
88
89
90if (3, 0) < sys.version_info < (3, 5):
91    raise RuntimeError("Python 3.5 or later is required")
92
93if six.PY2:
94    # Those builtin exceptions are only defined in Python 3
95    PermissionError = None
96    NotADirectoryError = None
97
98# declare some globals that will be defined later to
99# satisfy the linters.
100require = None
101working_set = None
102add_activation_listener = None
103resources_stream = None
104cleanup_resources = None
105resource_dir = None
106resource_stream = None
107set_extraction_path = None
108resource_isdir = None
109resource_string = None
110iter_entry_points = None
111resource_listdir = None
112resource_filename = None
113resource_exists = None
114_distribution_finders = None
115_namespace_handlers = None
116_namespace_packages = None
117
118
119class PEP440Warning(RuntimeWarning):
120    """
121    Used when there is an issue with a version or specifier not complying with
122    PEP 440.
123    """
124
125
126def parse_version(v):
127    try:
128        return packaging.version.Version(v)
129    except packaging.version.InvalidVersion:
130        return packaging.version.LegacyVersion(v)
131
132
133_state_vars = {}
134
135
136def _declare_state(vartype, **kw):
137    globals().update(kw)
138    _state_vars.update(dict.fromkeys(kw, vartype))
139
140
141def __getstate__():
142    state = {}
143    g = globals()
144    for k, v in _state_vars.items():
145        state[k] = g['_sget_' + v](g[k])
146    return state
147
148
149def __setstate__(state):
150    g = globals()
151    for k, v in state.items():
152        g['_sset_' + _state_vars[k]](k, g[k], v)
153    return state
154
155
156def _sget_dict(val):
157    return val.copy()
158
159
160def _sset_dict(key, ob, state):
161    ob.clear()
162    ob.update(state)
163
164
165def _sget_object(val):
166    return val.__getstate__()
167
168
169def _sset_object(key, ob, state):
170    ob.__setstate__(state)
171
172
173_sget_none = _sset_none = lambda *args: None
174
175
176def get_supported_platform():
177    """Return this platform's maximum compatible version.
178
179    distutils.util.get_platform() normally reports the minimum version
180    of macOS that would be required to *use* extensions produced by
181    distutils.  But what we want when checking compatibility is to know the
182    version of macOS that we are *running*.  To allow usage of packages that
183    explicitly require a newer version of macOS, we must also know the
184    current version of the OS.
185
186    If this condition occurs for any other platform with a version in its
187    platform strings, this function should be extended accordingly.
188    """
189    plat = get_build_platform()
190    m = macosVersionString.match(plat)
191    if m is not None and sys.platform == "darwin":
192        try:
193            plat = 'macosx-%s-%s' % ('.'.join(_macos_vers()[:2]), m.group(3))
194        except ValueError:
195            # not macOS
196            pass
197    return plat
198
199
200__all__ = [
201    # Basic resource access and distribution/entry point discovery
202    'require', 'run_script', 'get_provider', 'get_distribution',
203    'load_entry_point', 'get_entry_map', 'get_entry_info',
204    'iter_entry_points',
205    'resource_string', 'resource_stream', 'resource_filename',
206    'resource_listdir', 'resource_exists', 'resource_isdir',
207
208    # Environmental control
209    'declare_namespace', 'working_set', 'add_activation_listener',
210    'find_distributions', 'set_extraction_path', 'cleanup_resources',
211    'get_default_cache',
212
213    # Primary implementation classes
214    'Environment', 'WorkingSet', 'ResourceManager',
215    'Distribution', 'Requirement', 'EntryPoint',
216
217    # Exceptions
218    'ResolutionError', 'VersionConflict', 'DistributionNotFound',
219    'UnknownExtra', 'ExtractionError',
220
221    # Warnings
222    'PEP440Warning',
223
224    # Parsing functions and string utilities
225    'parse_requirements', 'parse_version', 'safe_name', 'safe_version',
226    'get_platform', 'compatible_platforms', 'yield_lines', 'split_sections',
227    'safe_extra', 'to_filename', 'invalid_marker', 'evaluate_marker',
228
229    # filesystem utilities
230    'ensure_directory', 'normalize_path',
231
232    # Distribution "precedence" constants
233    'EGG_DIST', 'BINARY_DIST', 'SOURCE_DIST', 'CHECKOUT_DIST', 'DEVELOP_DIST',
234
235    # "Provider" interfaces, implementations, and registration/lookup APIs
236    'IMetadataProvider', 'IResourceProvider', 'FileMetadata',
237    'PathMetadata', 'EggMetadata', 'EmptyProvider', 'empty_provider',
238    'NullProvider', 'EggProvider', 'DefaultProvider', 'ZipProvider',
239    'register_finder', 'register_namespace_handler', 'register_loader_type',
240    'fixup_namespace_packages', 'get_importer',
241
242    # Warnings
243    'PkgResourcesDeprecationWarning',
244
245    # Deprecated/backward compatibility only
246    'run_main', 'AvailableDistributions',
247]
248
249
250class ResolutionError(Exception):
251    """Abstract base for dependency resolution errors"""
252
253    def __repr__(self):
254        return self.__class__.__name__ + repr(self.args)
255
256
257class VersionConflict(ResolutionError):
258    """
259    An already-installed version conflicts with the requested version.
260
261    Should be initialized with the installed Distribution and the requested
262    Requirement.
263    """
264
265    _template = "{self.dist} is installed but {self.req} is required"
266
267    @property
268    def dist(self):
269        return self.args[0]
270
271    @property
272    def req(self):
273        return self.args[1]
274
275    def report(self):
276        return self._template.format(**locals())
277
278    def with_context(self, required_by):
279        """
280        If required_by is non-empty, return a version of self that is a
281        ContextualVersionConflict.
282        """
283        if not required_by:
284            return self
285        args = self.args + (required_by,)
286        return ContextualVersionConflict(*args)
287
288
289class ContextualVersionConflict(VersionConflict):
290    """
291    A VersionConflict that accepts a third parameter, the set of the
292    requirements that required the installed Distribution.
293    """
294
295    _template = VersionConflict._template + ' by {self.required_by}'
296
297    @property
298    def required_by(self):
299        return self.args[2]
300
301
302class DistributionNotFound(ResolutionError):
303    """A requested distribution was not found"""
304
305    _template = ("The '{self.req}' distribution was not found "
306                 "and is required by {self.requirers_str}")
307
308    @property
309    def req(self):
310        return self.args[0]
311
312    @property
313    def requirers(self):
314        return self.args[1]
315
316    @property
317    def requirers_str(self):
318        if not self.requirers:
319            return 'the application'
320        return ', '.join(self.requirers)
321
322    def report(self):
323        return self._template.format(**locals())
324
325    def __str__(self):
326        return self.report()
327
328
329class UnknownExtra(ResolutionError):
330    """Distribution doesn't have an "extra feature" of the given name"""
331
332
333_provider_factories = {}
334
335PY_MAJOR = '{}.{}'.format(*sys.version_info)
336EGG_DIST = 3
337BINARY_DIST = 2
338SOURCE_DIST = 1
339CHECKOUT_DIST = 0
340DEVELOP_DIST = -1
341
342
343def register_loader_type(loader_type, provider_factory):
344    """Register `provider_factory` to make providers for `loader_type`
345
346    `loader_type` is the type or class of a PEP 302 ``module.__loader__``,
347    and `provider_factory` is a function that, passed a *module* object,
348    returns an ``IResourceProvider`` for that module.
349    """
350    _provider_factories[loader_type] = provider_factory
351
352
353def get_provider(moduleOrReq):
354    """Return an IResourceProvider for the named module or requirement"""
355    if isinstance(moduleOrReq, Requirement):
356        return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0]
357    try:
358        module = sys.modules[moduleOrReq]
359    except KeyError:
360        __import__(moduleOrReq)
361        module = sys.modules[moduleOrReq]
362    loader = getattr(module, '__loader__', None)
363    return _find_adapter(_provider_factories, loader)(module)
364
365
366def _macos_vers(_cache=[]):
367    if not _cache:
368        version = platform.mac_ver()[0]
369        # fallback for MacPorts
370        if version == '':
371            plist = '/System/Library/CoreServices/SystemVersion.plist'
372            if os.path.exists(plist):
373                if hasattr(plistlib, 'readPlist'):
374                    plist_content = plistlib.readPlist(plist)
375                    if 'ProductVersion' in plist_content:
376                        version = plist_content['ProductVersion']
377
378        _cache.append(version.split('.'))
379    return _cache[0]
380
381
382def _macos_arch(machine):
383    return {'PowerPC': 'ppc', 'Power_Macintosh': 'ppc'}.get(machine, machine)
384
385
386def get_build_platform():
387    """Return this platform's string for platform-specific distributions
388
389    XXX Currently this is the same as ``distutils.util.get_platform()``, but it
390    needs some hacks for Linux and macOS.
391    """
392    from sysconfig import get_platform
393
394    plat = get_platform()
395    if sys.platform == "darwin" and not plat.startswith('macosx-'):
396        try:
397            version = _macos_vers()
398            machine = os.uname()[4].replace(" ", "_")
399            return "macosx-%d.%d-%s" % (
400                int(version[0]), int(version[1]),
401                _macos_arch(machine),
402            )
403        except ValueError:
404            # if someone is running a non-Mac darwin system, this will fall
405            # through to the default implementation
406            pass
407    return plat
408
409
410macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)")
411darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)")
412# XXX backward compat
413get_platform = get_build_platform
414
415
416def compatible_platforms(provided, required):
417    """Can code for the `provided` platform run on the `required` platform?
418
419    Returns true if either platform is ``None``, or the platforms are equal.
420
421    XXX Needs compatibility checks for Linux and other unixy OSes.
422    """
423    if provided is None or required is None or provided == required:
424        # easy case
425        return True
426
427    # macOS special cases
428    reqMac = macosVersionString.match(required)
429    if reqMac:
430        provMac = macosVersionString.match(provided)
431
432        # is this a Mac package?
433        if not provMac:
434            # this is backwards compatibility for packages built before
435            # setuptools 0.6. All packages built after this point will
436            # use the new macOS designation.
437            provDarwin = darwinVersionString.match(provided)
438            if provDarwin:
439                dversion = int(provDarwin.group(1))
440                macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2))
441                if dversion == 7 and macosversion >= "10.3" or \
442                        dversion == 8 and macosversion >= "10.4":
443                    return True
444            # egg isn't macOS or legacy darwin
445            return False
446
447        # are they the same major version and machine type?
448        if provMac.group(1) != reqMac.group(1) or \
449                provMac.group(3) != reqMac.group(3):
450            return False
451
452        # is the required OS major update >= the provided one?
453        if int(provMac.group(2)) > int(reqMac.group(2)):
454            return False
455
456        return True
457
458    # XXX Linux and other platforms' special cases should go here
459    return False
460
461
462def run_script(dist_spec, script_name):
463    """Locate distribution `dist_spec` and run its `script_name` script"""
464    ns = sys._getframe(1).f_globals
465    name = ns['__name__']
466    ns.clear()
467    ns['__name__'] = name
468    require(dist_spec)[0].run_script(script_name, ns)
469
470
471# backward compatibility
472run_main = run_script
473
474
475def get_distribution(dist):
476    """Return a current distribution object for a Requirement or string"""
477    if isinstance(dist, six.string_types):
478        dist = Requirement.parse(dist)
479    if isinstance(dist, Requirement):
480        dist = get_provider(dist)
481    if not isinstance(dist, Distribution):
482        raise TypeError("Expected string, Requirement, or Distribution", dist)
483    return dist
484
485
486def load_entry_point(dist, group, name):
487    """Return `name` entry point of `group` for `dist` or raise ImportError"""
488    return get_distribution(dist).load_entry_point(group, name)
489
490
491def get_entry_map(dist, group=None):
492    """Return the entry point map for `group`, or the full entry map"""
493    return get_distribution(dist).get_entry_map(group)
494
495
496def get_entry_info(dist, group, name):
497    """Return the EntryPoint object for `group`+`name`, or ``None``"""
498    return get_distribution(dist).get_entry_info(group, name)
499
500
501class IMetadataProvider:
502    def has_metadata(name):
503        """Does the package's distribution contain the named metadata?"""
504
505    def get_metadata(name):
506        """The named metadata resource as a string"""
507
508    def get_metadata_lines(name):
509        """Yield named metadata resource as list of non-blank non-comment lines
510
511       Leading and trailing whitespace is stripped from each line, and lines
512       with ``#`` as the first non-blank character are omitted."""
513
514    def metadata_isdir(name):
515        """Is the named metadata a directory?  (like ``os.path.isdir()``)"""
516
517    def metadata_listdir(name):
518        """List of metadata names in the directory (like ``os.listdir()``)"""
519
520    def run_script(script_name, namespace):
521        """Execute the named script in the supplied namespace dictionary"""
522
523
524class IResourceProvider(IMetadataProvider):
525    """An object that provides access to package resources"""
526
527    def get_resource_filename(manager, resource_name):
528        """Return a true filesystem path for `resource_name`
529
530        `manager` must be an ``IResourceManager``"""
531
532    def get_resource_stream(manager, resource_name):
533        """Return a readable file-like object for `resource_name`
534
535        `manager` must be an ``IResourceManager``"""
536
537    def get_resource_string(manager, resource_name):
538        """Return a string containing the contents of `resource_name`
539
540        `manager` must be an ``IResourceManager``"""
541
542    def has_resource(resource_name):
543        """Does the package contain the named resource?"""
544
545    def resource_isdir(resource_name):
546        """Is the named resource a directory?  (like ``os.path.isdir()``)"""
547
548    def resource_listdir(resource_name):
549        """List of resource names in the directory (like ``os.listdir()``)"""
550
551
552class WorkingSet:
553    """A collection of active distributions on sys.path (or a similar list)"""
554
555    def __init__(self, entries=None):
556        """Create working set from list of path entries (default=sys.path)"""
557        self.entries = []
558        self.entry_keys = {}
559        self.by_key = {}
560        self.callbacks = []
561
562        if entries is None:
563            entries = sys.path
564
565        for entry in entries:
566            self.add_entry(entry)
567
568    @classmethod
569    def _build_master(cls):
570        """
571        Prepare the master working set.
572        """
573        ws = cls()
574        try:
575            from __main__ import __requires__
576        except ImportError:
577            # The main program does not list any requirements
578            return ws
579
580        # ensure the requirements are met
581        try:
582            ws.require(__requires__)
583        except VersionConflict:
584            return cls._build_from_requirements(__requires__)
585
586        return ws
587
588    @classmethod
589    def _build_from_requirements(cls, req_spec):
590        """
591        Build a working set from a requirement spec. Rewrites sys.path.
592        """
593        # try it without defaults already on sys.path
594        # by starting with an empty path
595        ws = cls([])
596        reqs = parse_requirements(req_spec)
597        dists = ws.resolve(reqs, Environment())
598        for dist in dists:
599            ws.add(dist)
600
601        # add any missing entries from sys.path
602        for entry in sys.path:
603            if entry not in ws.entries:
604                ws.add_entry(entry)
605
606        # then copy back to sys.path
607        sys.path[:] = ws.entries
608        return ws
609
610    def add_entry(self, entry):
611        """Add a path item to ``.entries``, finding any distributions on it
612
613        ``find_distributions(entry, True)`` is used to find distributions
614        corresponding to the path entry, and they are added.  `entry` is
615        always appended to ``.entries``, even if it is already present.
616        (This is because ``sys.path`` can contain the same value more than
617        once, and the ``.entries`` of the ``sys.path`` WorkingSet should always
618        equal ``sys.path``.)
619        """
620        self.entry_keys.setdefault(entry, [])
621        self.entries.append(entry)
622        for dist in find_distributions(entry, True):
623            self.add(dist, entry, False)
624
625    def __contains__(self, dist):
626        """True if `dist` is the active distribution for its project"""
627        return self.by_key.get(dist.key) == dist
628
629    def find(self, req):
630        """Find a distribution matching requirement `req`
631
632        If there is an active distribution for the requested project, this
633        returns it as long as it meets the version requirement specified by
634        `req`.  But, if there is an active distribution for the project and it
635        does *not* meet the `req` requirement, ``VersionConflict`` is raised.
636        If there is no active distribution for the requested project, ``None``
637        is returned.
638        """
639        dist = self.by_key.get(req.key)
640        if dist is not None and dist not in req:
641            # XXX add more info
642            raise VersionConflict(dist, req)
643        return dist
644
645    def iter_entry_points(self, group, name=None):
646        """Yield entry point objects from `group` matching `name`
647
648        If `name` is None, yields all entry points in `group` from all
649        distributions in the working set, otherwise only ones matching
650        both `group` and `name` are yielded (in distribution order).
651        """
652        return (
653            entry
654            for dist in self
655            for entry in dist.get_entry_map(group).values()
656            if name is None or name == entry.name
657        )
658
659    def run_script(self, requires, script_name):
660        """Locate distribution for `requires` and run `script_name` script"""
661        ns = sys._getframe(1).f_globals
662        name = ns['__name__']
663        ns.clear()
664        ns['__name__'] = name
665        self.require(requires)[0].run_script(script_name, ns)
666
667    def __iter__(self):
668        """Yield distributions for non-duplicate projects in the working set
669
670        The yield order is the order in which the items' path entries were
671        added to the working set.
672        """
673        seen = {}
674        for item in self.entries:
675            if item not in self.entry_keys:
676                # workaround a cache issue
677                continue
678
679            for key in self.entry_keys[item]:
680                if key not in seen:
681                    seen[key] = 1
682                    yield self.by_key[key]
683
684    def add(self, dist, entry=None, insert=True, replace=False):
685        """Add `dist` to working set, associated with `entry`
686
687        If `entry` is unspecified, it defaults to the ``.location`` of `dist`.
688        On exit from this routine, `entry` is added to the end of the working
689        set's ``.entries`` (if it wasn't already present).
690
691        `dist` is only added to the working set if it's for a project that
692        doesn't already have a distribution in the set, unless `replace=True`.
693        If it's added, any callbacks registered with the ``subscribe()`` method
694        will be called.
695        """
696        if insert:
697            dist.insert_on(self.entries, entry, replace=replace)
698
699        if entry is None:
700            entry = dist.location
701        keys = self.entry_keys.setdefault(entry, [])
702        keys2 = self.entry_keys.setdefault(dist.location, [])
703        if not replace and dist.key in self.by_key:
704            # ignore hidden distros
705            return
706
707        self.by_key[dist.key] = dist
708        if dist.key not in keys:
709            keys.append(dist.key)
710        if dist.key not in keys2:
711            keys2.append(dist.key)
712        self._added_new(dist)
713
714    def resolve(self, requirements, env=None, installer=None,
715                replace_conflicting=False, extras=None):
716        """List all distributions needed to (recursively) meet `requirements`
717
718        `requirements` must be a sequence of ``Requirement`` objects.  `env`,
719        if supplied, should be an ``Environment`` instance.  If
720        not supplied, it defaults to all distributions available within any
721        entry or distribution in the working set.  `installer`, if supplied,
722        will be invoked with each requirement that cannot be met by an
723        already-installed distribution; it should return a ``Distribution`` or
724        ``None``.
725
726        Unless `replace_conflicting=True`, raises a VersionConflict exception
727        if
728        any requirements are found on the path that have the correct name but
729        the wrong version.  Otherwise, if an `installer` is supplied it will be
730        invoked to obtain the correct version of the requirement and activate
731        it.
732
733        `extras` is a list of the extras to be used with these requirements.
734        This is important because extra requirements may look like `my_req;
735        extra = "my_extra"`, which would otherwise be interpreted as a purely
736        optional requirement.  Instead, we want to be able to assert that these
737        requirements are truly required.
738        """
739
740        # set up the stack
741        requirements = list(requirements)[::-1]
742        # set of processed requirements
743        processed = {}
744        # key -> dist
745        best = {}
746        to_activate = []
747
748        req_extras = _ReqExtras()
749
750        # Mapping of requirement to set of distributions that required it;
751        # useful for reporting info about conflicts.
752        required_by = collections.defaultdict(set)
753
754        while requirements:
755            # process dependencies breadth-first
756            req = requirements.pop(0)
757            if req in processed:
758                # Ignore cyclic or redundant dependencies
759                continue
760
761            if not req_extras.markers_pass(req, extras):
762                continue
763
764            dist = best.get(req.key)
765            if dist is None:
766                # Find the best distribution and add it to the map
767                dist = self.by_key.get(req.key)
768                if dist is None or (dist not in req and replace_conflicting):
769                    ws = self
770                    if env is None:
771                        if dist is None:
772                            env = Environment(self.entries)
773                        else:
774                            # Use an empty environment and workingset to avoid
775                            # any further conflicts with the conflicting
776                            # distribution
777                            env = Environment([])
778                            ws = WorkingSet([])
779                    dist = best[req.key] = env.best_match(
780                        req, ws, installer,
781                        replace_conflicting=replace_conflicting
782                    )
783                    if dist is None:
784                        requirers = required_by.get(req, None)
785                        raise DistributionNotFound(req, requirers)
786                to_activate.append(dist)
787            if dist not in req:
788                # Oops, the "best" so far conflicts with a dependency
789                dependent_req = required_by[req]
790                raise VersionConflict(dist, req).with_context(dependent_req)
791
792            # push the new requirements onto the stack
793            new_requirements = dist.requires(req.extras)[::-1]
794            requirements.extend(new_requirements)
795
796            # Register the new requirements needed by req
797            for new_requirement in new_requirements:
798                required_by[new_requirement].add(req.project_name)
799                req_extras[new_requirement] = req.extras
800
801            processed[req] = True
802
803        # return list of distros to activate
804        return to_activate
805
806    def find_plugins(
807            self, plugin_env, full_env=None, installer=None, fallback=True):
808        """Find all activatable distributions in `plugin_env`
809
810        Example usage::
811
812            distributions, errors = working_set.find_plugins(
813                Environment(plugin_dirlist)
814            )
815            # add plugins+libs to sys.path
816            map(working_set.add, distributions)
817            # display errors
818            print('Could not load', errors)
819
820        The `plugin_env` should be an ``Environment`` instance that contains
821        only distributions that are in the project's "plugin directory" or
822        directories. The `full_env`, if supplied, should be an ``Environment``
823        contains all currently-available distributions.  If `full_env` is not
824        supplied, one is created automatically from the ``WorkingSet`` this
825        method is called on, which will typically mean that every directory on
826        ``sys.path`` will be scanned for distributions.
827
828        `installer` is a standard installer callback as used by the
829        ``resolve()`` method. The `fallback` flag indicates whether we should
830        attempt to resolve older versions of a plugin if the newest version
831        cannot be resolved.
832
833        This method returns a 2-tuple: (`distributions`, `error_info`), where
834        `distributions` is a list of the distributions found in `plugin_env`
835        that were loadable, along with any other distributions that are needed
836        to resolve their dependencies.  `error_info` is a dictionary mapping
837        unloadable plugin distributions to an exception instance describing the
838        error that occurred. Usually this will be a ``DistributionNotFound`` or
839        ``VersionConflict`` instance.
840        """
841
842        plugin_projects = list(plugin_env)
843        # scan project names in alphabetic order
844        plugin_projects.sort()
845
846        error_info = {}
847        distributions = {}
848
849        if full_env is None:
850            env = Environment(self.entries)
851            env += plugin_env
852        else:
853            env = full_env + plugin_env
854
855        shadow_set = self.__class__([])
856        # put all our entries in shadow_set
857        list(map(shadow_set.add, self))
858
859        for project_name in plugin_projects:
860
861            for dist in plugin_env[project_name]:
862
863                req = [dist.as_requirement()]
864
865                try:
866                    resolvees = shadow_set.resolve(req, env, installer)
867
868                except ResolutionError as v:
869                    # save error info
870                    error_info[dist] = v
871                    if fallback:
872                        # try the next older version of project
873                        continue
874                    else:
875                        # give up on this project, keep going
876                        break
877
878                else:
879                    list(map(shadow_set.add, resolvees))
880                    distributions.update(dict.fromkeys(resolvees))
881
882                    # success, no need to try any more versions of this project
883                    break
884
885        distributions = list(distributions)
886        distributions.sort()
887
888        return distributions, error_info
889
890    def require(self, *requirements):
891        """Ensure that distributions matching `requirements` are activated
892
893        `requirements` must be a string or a (possibly-nested) sequence
894        thereof, specifying the distributions and versions required.  The
895        return value is a sequence of the distributions that needed to be
896        activated to fulfill the requirements; all relevant distributions are
897        included, even if they were already activated in this working set.
898        """
899        needed = self.resolve(parse_requirements(requirements))
900
901        for dist in needed:
902            self.add(dist)
903
904        return needed
905
906    def subscribe(self, callback, existing=True):
907        """Invoke `callback` for all distributions
908
909        If `existing=True` (default),
910        call on all existing ones, as well.
911        """
912        if callback in self.callbacks:
913            return
914        self.callbacks.append(callback)
915        if not existing:
916            return
917        for dist in self:
918            callback(dist)
919
920    def _added_new(self, dist):
921        for callback in self.callbacks:
922            callback(dist)
923
924    def __getstate__(self):
925        return (
926            self.entries[:], self.entry_keys.copy(), self.by_key.copy(),
927            self.callbacks[:]
928        )
929
930    def __setstate__(self, e_k_b_c):
931        entries, keys, by_key, callbacks = e_k_b_c
932        self.entries = entries[:]
933        self.entry_keys = keys.copy()
934        self.by_key = by_key.copy()
935        self.callbacks = callbacks[:]
936
937
938class _ReqExtras(dict):
939    """
940    Map each requirement to the extras that demanded it.
941    """
942
943    def markers_pass(self, req, extras=None):
944        """
945        Evaluate markers for req against each extra that
946        demanded it.
947
948        Return False if the req has a marker and fails
949        evaluation. Otherwise, return True.
950        """
951        extra_evals = (
952            req.marker.evaluate({'extra': extra})
953            for extra in self.get(req, ()) + (extras or (None,))
954        )
955        return not req.marker or any(extra_evals)
956
957
958class Environment:
959    """Searchable snapshot of distributions on a search path"""
960
961    def __init__(
962            self, search_path=None, platform=get_supported_platform(),
963            python=PY_MAJOR):
964        """Snapshot distributions available on a search path
965
966        Any distributions found on `search_path` are added to the environment.
967        `search_path` should be a sequence of ``sys.path`` items.  If not
968        supplied, ``sys.path`` is used.
969
970        `platform` is an optional string specifying the name of the platform
971        that platform-specific distributions must be compatible with.  If
972        unspecified, it defaults to the current platform.  `python` is an
973        optional string naming the desired version of Python (e.g. ``'3.6'``);
974        it defaults to the current version.
975
976        You may explicitly set `platform` (and/or `python`) to ``None`` if you
977        wish to map *all* distributions, not just those compatible with the
978        running platform or Python version.
979        """
980        self._distmap = {}
981        self.platform = platform
982        self.python = python
983        self.scan(search_path)
984
985    def can_add(self, dist):
986        """Is distribution `dist` acceptable for this environment?
987
988        The distribution must match the platform and python version
989        requirements specified when this environment was created, or False
990        is returned.
991        """
992        py_compat = (
993            self.python is None
994            or dist.py_version is None
995            or dist.py_version == self.python
996        )
997        return py_compat and compatible_platforms(dist.platform, self.platform)
998
999    def remove(self, dist):
1000        """Remove `dist` from the environment"""
1001        self._distmap[dist.key].remove(dist)
1002
1003    def scan(self, search_path=None):
1004        """Scan `search_path` for distributions usable in this environment
1005
1006        Any distributions found are added to the environment.
1007        `search_path` should be a sequence of ``sys.path`` items.  If not
1008        supplied, ``sys.path`` is used.  Only distributions conforming to
1009        the platform/python version defined at initialization are added.
1010        """
1011        if search_path is None:
1012            search_path = sys.path
1013
1014        for item in search_path:
1015            for dist in find_distributions(item):
1016                self.add(dist)
1017
1018    def __getitem__(self, project_name):
1019        """Return a newest-to-oldest list of distributions for `project_name`
1020
1021        Uses case-insensitive `project_name` comparison, assuming all the
1022        project's distributions use their project's name converted to all
1023        lowercase as their key.
1024
1025        """
1026        distribution_key = project_name.lower()
1027        return self._distmap.get(distribution_key, [])
1028
1029    def add(self, dist):
1030        """Add `dist` if we ``can_add()`` it and it has not already been added
1031        """
1032        if self.can_add(dist) and dist.has_version():
1033            dists = self._distmap.setdefault(dist.key, [])
1034            if dist not in dists:
1035                dists.append(dist)
1036                dists.sort(key=operator.attrgetter('hashcmp'), reverse=True)
1037
1038    def best_match(
1039            self, req, working_set, installer=None, replace_conflicting=False):
1040        """Find distribution best matching `req` and usable on `working_set`
1041
1042        This calls the ``find(req)`` method of the `working_set` to see if a
1043        suitable distribution is already active.  (This may raise
1044        ``VersionConflict`` if an unsuitable version of the project is already
1045        active in the specified `working_set`.)  If a suitable distribution
1046        isn't active, this method returns the newest distribution in the
1047        environment that meets the ``Requirement`` in `req`.  If no suitable
1048        distribution is found, and `installer` is supplied, then the result of
1049        calling the environment's ``obtain(req, installer)`` method will be
1050        returned.
1051        """
1052        try:
1053            dist = working_set.find(req)
1054        except VersionConflict:
1055            if not replace_conflicting:
1056                raise
1057            dist = None
1058        if dist is not None:
1059            return dist
1060        for dist in self[req.key]:
1061            if dist in req:
1062                return dist
1063        # try to download/install
1064        return self.obtain(req, installer)
1065
1066    def obtain(self, requirement, installer=None):
1067        """Obtain a distribution matching `requirement` (e.g. via download)
1068
1069        Obtain a distro that matches requirement (e.g. via download).  In the
1070        base ``Environment`` class, this routine just returns
1071        ``installer(requirement)``, unless `installer` is None, in which case
1072        None is returned instead.  This method is a hook that allows subclasses
1073        to attempt other ways of obtaining a distribution before falling back
1074        to the `installer` argument."""
1075        if installer is not None:
1076            return installer(requirement)
1077
1078    def __iter__(self):
1079        """Yield the unique project names of the available distributions"""
1080        for key in self._distmap.keys():
1081            if self[key]:
1082                yield key
1083
1084    def __iadd__(self, other):
1085        """In-place addition of a distribution or environment"""
1086        if isinstance(other, Distribution):
1087            self.add(other)
1088        elif isinstance(other, Environment):
1089            for project in other:
1090                for dist in other[project]:
1091                    self.add(dist)
1092        else:
1093            raise TypeError("Can't add %r to environment" % (other,))
1094        return self
1095
1096    def __add__(self, other):
1097        """Add an environment or distribution to an environment"""
1098        new = self.__class__([], platform=None, python=None)
1099        for env in self, other:
1100            new += env
1101        return new
1102
1103
1104# XXX backward compatibility
1105AvailableDistributions = Environment
1106
1107
1108class ExtractionError(RuntimeError):
1109    """An error occurred extracting a resource
1110
1111    The following attributes are available from instances of this exception:
1112
1113    manager
1114        The resource manager that raised this exception
1115
1116    cache_path
1117        The base directory for resource extraction
1118
1119    original_error
1120        The exception instance that caused extraction to fail
1121    """
1122
1123
1124class ResourceManager:
1125    """Manage resource extraction and packages"""
1126    extraction_path = None
1127
1128    def __init__(self):
1129        self.cached_files = {}
1130
1131    def resource_exists(self, package_or_requirement, resource_name):
1132        """Does the named resource exist?"""
1133        return get_provider(package_or_requirement).has_resource(resource_name)
1134
1135    def resource_isdir(self, package_or_requirement, resource_name):
1136        """Is the named resource an existing directory?"""
1137        return get_provider(package_or_requirement).resource_isdir(
1138            resource_name
1139        )
1140
1141    def resource_filename(self, package_or_requirement, resource_name):
1142        """Return a true filesystem path for specified resource"""
1143        return get_provider(package_or_requirement).get_resource_filename(
1144            self, resource_name
1145        )
1146
1147    def resource_stream(self, package_or_requirement, resource_name):
1148        """Return a readable file-like object for specified resource"""
1149        return get_provider(package_or_requirement).get_resource_stream(
1150            self, resource_name
1151        )
1152
1153    def resource_string(self, package_or_requirement, resource_name):
1154        """Return specified resource as a string"""
1155        return get_provider(package_or_requirement).get_resource_string(
1156            self, resource_name
1157        )
1158
1159    def resource_listdir(self, package_or_requirement, resource_name):
1160        """List the contents of the named resource directory"""
1161        return get_provider(package_or_requirement).resource_listdir(
1162            resource_name
1163        )
1164
1165    def extraction_error(self):
1166        """Give an error message for problems extracting file(s)"""
1167
1168        old_exc = sys.exc_info()[1]
1169        cache_path = self.extraction_path or get_default_cache()
1170
1171        tmpl = textwrap.dedent("""
1172            Can't extract file(s) to egg cache
1173
1174            The following error occurred while trying to extract file(s)
1175            to the Python egg cache:
1176
1177              {old_exc}
1178
1179            The Python egg cache directory is currently set to:
1180
1181              {cache_path}
1182
1183            Perhaps your account does not have write access to this directory?
1184            You can change the cache directory by setting the PYTHON_EGG_CACHE
1185            environment variable to point to an accessible directory.
1186            """).lstrip()
1187        err = ExtractionError(tmpl.format(**locals()))
1188        err.manager = self
1189        err.cache_path = cache_path
1190        err.original_error = old_exc
1191        raise err
1192
1193    def get_cache_path(self, archive_name, names=()):
1194        """Return absolute location in cache for `archive_name` and `names`
1195
1196        The parent directory of the resulting path will be created if it does
1197        not already exist.  `archive_name` should be the base filename of the
1198        enclosing egg (which may not be the name of the enclosing zipfile!),
1199        including its ".egg" extension.  `names`, if provided, should be a
1200        sequence of path name parts "under" the egg's extraction location.
1201
1202        This method should only be called by resource providers that need to
1203        obtain an extraction location, and only for names they intend to
1204        extract, as it tracks the generated names for possible cleanup later.
1205        """
1206        extract_path = self.extraction_path or get_default_cache()
1207        target_path = os.path.join(extract_path, archive_name + '-tmp', *names)
1208        try:
1209            _bypass_ensure_directory(target_path)
1210        except Exception:
1211            self.extraction_error()
1212
1213        self._warn_unsafe_extraction_path(extract_path)
1214
1215        self.cached_files[target_path] = 1
1216        return target_path
1217
1218    @staticmethod
1219    def _warn_unsafe_extraction_path(path):
1220        """
1221        If the default extraction path is overridden and set to an insecure
1222        location, such as /tmp, it opens up an opportunity for an attacker to
1223        replace an extracted file with an unauthorized payload. Warn the user
1224        if a known insecure location is used.
1225
1226        See Distribute #375 for more details.
1227        """
1228        if os.name == 'nt' and not path.startswith(os.environ['windir']):
1229            # On Windows, permissions are generally restrictive by default
1230            #  and temp directories are not writable by other users, so
1231            #  bypass the warning.
1232            return
1233        mode = os.stat(path).st_mode
1234        if mode & stat.S_IWOTH or mode & stat.S_IWGRP:
1235            msg = (
1236                "Extraction path is writable by group/others "
1237                "and vulnerable to attack when "
1238                "used with get_resource_filename ({path}). "
1239                "Consider a more secure "
1240                "location (set with .set_extraction_path or the "
1241                "PYTHON_EGG_CACHE environment variable)."
1242            ).format(**locals())
1243            warnings.warn(msg, UserWarning)
1244
1245    def postprocess(self, tempname, filename):
1246        """Perform any platform-specific postprocessing of `tempname`
1247
1248        This is where Mac header rewrites should be done; other platforms don't
1249        have anything special they should do.
1250
1251        Resource providers should call this method ONLY after successfully
1252        extracting a compressed resource.  They must NOT call it on resources
1253        that are already in the filesystem.
1254
1255        `tempname` is the current (temporary) name of the file, and `filename`
1256        is the name it will be renamed to by the caller after this routine
1257        returns.
1258        """
1259
1260        if os.name == 'posix':
1261            # Make the resource executable
1262            mode = ((os.stat(tempname).st_mode) | 0o555) & 0o7777
1263            os.chmod(tempname, mode)
1264
1265    def set_extraction_path(self, path):
1266        """Set the base path where resources will be extracted to, if needed.
1267
1268        If you do not call this routine before any extractions take place, the
1269        path defaults to the return value of ``get_default_cache()``.  (Which
1270        is based on the ``PYTHON_EGG_CACHE`` environment variable, with various
1271        platform-specific fallbacks.  See that routine's documentation for more
1272        details.)
1273
1274        Resources are extracted to subdirectories of this path based upon
1275        information given by the ``IResourceProvider``.  You may set this to a
1276        temporary directory, but then you must call ``cleanup_resources()`` to
1277        delete the extracted files when done.  There is no guarantee that
1278        ``cleanup_resources()`` will be able to remove all extracted files.
1279
1280        (Note: you may not change the extraction path for a given resource
1281        manager once resources have been extracted, unless you first call
1282        ``cleanup_resources()``.)
1283        """
1284        if self.cached_files:
1285            raise ValueError(
1286                "Can't change extraction path, files already extracted"
1287            )
1288
1289        self.extraction_path = path
1290
1291    def cleanup_resources(self, force=False):
1292        """
1293        Delete all extracted resource files and directories, returning a list
1294        of the file and directory names that could not be successfully removed.
1295        This function does not have any concurrency protection, so it should
1296        generally only be called when the extraction path is a temporary
1297        directory exclusive to a single process.  This method is not
1298        automatically called; you must call it explicitly or register it as an
1299        ``atexit`` function if you wish to ensure cleanup of a temporary
1300        directory used for extractions.
1301        """
1302        # XXX
1303
1304
1305def get_default_cache():
1306    """
1307    Return the ``PYTHON_EGG_CACHE`` environment variable
1308    or a platform-relevant user cache dir for an app
1309    named "Python-Eggs".
1310    """
1311    return (
1312        os.environ.get('PYTHON_EGG_CACHE')
1313        or appdirs.user_cache_dir(appname='Python-Eggs')
1314    )
1315
1316
1317def safe_name(name):
1318    """Convert an arbitrary string to a standard distribution name
1319
1320    Any runs of non-alphanumeric/. characters are replaced with a single '-'.
1321    """
1322    return re.sub('[^A-Za-z0-9.]+', '-', name)
1323
1324
1325def safe_version(version):
1326    """
1327    Convert an arbitrary string to a standard version string
1328    """
1329    try:
1330        # normalize the version
1331        return str(packaging.version.Version(version))
1332    except packaging.version.InvalidVersion:
1333        version = version.replace(' ', '.')
1334        return re.sub('[^A-Za-z0-9.]+', '-', version)
1335
1336
1337def safe_extra(extra):
1338    """Convert an arbitrary string to a standard 'extra' name
1339
1340    Any runs of non-alphanumeric characters are replaced with a single '_',
1341    and the result is always lowercased.
1342    """
1343    return re.sub('[^A-Za-z0-9.-]+', '_', extra).lower()
1344
1345
1346def to_filename(name):
1347    """Convert a project or version name to its filename-escaped form
1348
1349    Any '-' characters are currently replaced with '_'.
1350    """
1351    return name.replace('-', '_')
1352
1353
1354def invalid_marker(text):
1355    """
1356    Validate text as a PEP 508 environment marker; return an exception
1357    if invalid or False otherwise.
1358    """
1359    try:
1360        evaluate_marker(text)
1361    except SyntaxError as e:
1362        e.filename = None
1363        e.lineno = None
1364        return e
1365    return False
1366
1367
1368def evaluate_marker(text, extra=None):
1369    """
1370    Evaluate a PEP 508 environment marker.
1371    Return a boolean indicating the marker result in this environment.
1372    Raise SyntaxError if marker is invalid.
1373
1374    This implementation uses the 'pyparsing' module.
1375    """
1376    try:
1377        marker = packaging.markers.Marker(text)
1378        return marker.evaluate()
1379    except packaging.markers.InvalidMarker as e:
1380        raise SyntaxError(e) from e
1381
1382
1383class NullProvider:
1384    """Try to implement resources and metadata for arbitrary PEP 302 loaders"""
1385
1386    egg_name = None
1387    egg_info = None
1388    loader = None
1389
1390    def __init__(self, module):
1391        self.loader = getattr(module, '__loader__', None)
1392        self.module_path = os.path.dirname(getattr(module, '__file__', ''))
1393
1394    def get_resource_filename(self, manager, resource_name):
1395        return self._fn(self.module_path, resource_name)
1396
1397    def get_resource_stream(self, manager, resource_name):
1398        return io.BytesIO(self.get_resource_string(manager, resource_name))
1399
1400    def get_resource_string(self, manager, resource_name):
1401        return self._get(self._fn(self.module_path, resource_name))
1402
1403    def has_resource(self, resource_name):
1404        return self._has(self._fn(self.module_path, resource_name))
1405
1406    def _get_metadata_path(self, name):
1407        return self._fn(self.egg_info, name)
1408
1409    def has_metadata(self, name):
1410        if not self.egg_info:
1411            return self.egg_info
1412
1413        path = self._get_metadata_path(name)
1414        return self._has(path)
1415
1416    def get_metadata(self, name):
1417        if not self.egg_info:
1418            return ""
1419        path = self._get_metadata_path(name)
1420        value = self._get(path)
1421        if six.PY2:
1422            return value
1423        try:
1424            return value.decode('utf-8')
1425        except UnicodeDecodeError as exc:
1426            # Include the path in the error message to simplify
1427            # troubleshooting, and without changing the exception type.
1428            exc.reason += ' in {} file at path: {}'.format(name, path)
1429            raise
1430
1431    def get_metadata_lines(self, name):
1432        return yield_lines(self.get_metadata(name))
1433
1434    def resource_isdir(self, resource_name):
1435        return self._isdir(self._fn(self.module_path, resource_name))
1436
1437    def metadata_isdir(self, name):
1438        return self.egg_info and self._isdir(self._fn(self.egg_info, name))
1439
1440    def resource_listdir(self, resource_name):
1441        return self._listdir(self._fn(self.module_path, resource_name))
1442
1443    def metadata_listdir(self, name):
1444        if self.egg_info:
1445            return self._listdir(self._fn(self.egg_info, name))
1446        return []
1447
1448    def run_script(self, script_name, namespace):
1449        script = 'scripts/' + script_name
1450        if not self.has_metadata(script):
1451            raise ResolutionError(
1452                "Script {script!r} not found in metadata at {self.egg_info!r}"
1453                .format(**locals()),
1454            )
1455        script_text = self.get_metadata(script).replace('\r\n', '\n')
1456        script_text = script_text.replace('\r', '\n')
1457        script_filename = self._fn(self.egg_info, script)
1458        namespace['__file__'] = script_filename
1459        if os.path.exists(script_filename):
1460            with open(script_filename) as fid:
1461                source = fid.read()
1462            code = compile(source, script_filename, 'exec')
1463            exec(code, namespace, namespace)
1464        else:
1465            from linecache import cache
1466            cache[script_filename] = (
1467                len(script_text), 0, script_text.split('\n'), script_filename
1468            )
1469            script_code = compile(script_text, script_filename, 'exec')
1470            exec(script_code, namespace, namespace)
1471
1472    def _has(self, path):
1473        raise NotImplementedError(
1474            "Can't perform this operation for unregistered loader type"
1475        )
1476
1477    def _isdir(self, path):
1478        raise NotImplementedError(
1479            "Can't perform this operation for unregistered loader type"
1480        )
1481
1482    def _listdir(self, path):
1483        raise NotImplementedError(
1484            "Can't perform this operation for unregistered loader type"
1485        )
1486
1487    def _fn(self, base, resource_name):
1488        self._validate_resource_path(resource_name)
1489        if resource_name:
1490            return os.path.join(base, *resource_name.split('/'))
1491        return base
1492
1493    @staticmethod
1494    def _validate_resource_path(path):
1495        """
1496        Validate the resource paths according to the docs.
1497        https://setuptools.readthedocs.io/en/latest/pkg_resources.html#basic-resource-access
1498
1499        >>> warned = getfixture('recwarn')
1500        >>> warnings.simplefilter('always')
1501        >>> vrp = NullProvider._validate_resource_path
1502        >>> vrp('foo/bar.txt')
1503        >>> bool(warned)
1504        False
1505        >>> vrp('../foo/bar.txt')
1506        >>> bool(warned)
1507        True
1508        >>> warned.clear()
1509        >>> vrp('/foo/bar.txt')
1510        >>> bool(warned)
1511        True
1512        >>> vrp('foo/../../bar.txt')
1513        >>> bool(warned)
1514        True
1515        >>> warned.clear()
1516        >>> vrp('foo/f../bar.txt')
1517        >>> bool(warned)
1518        False
1519
1520        Windows path separators are straight-up disallowed.
1521        >>> vrp(r'\\foo/bar.txt')
1522        Traceback (most recent call last):
1523        ...
1524        ValueError: Use of .. or absolute path in a resource path \
1525is not allowed.
1526
1527        >>> vrp(r'C:\\foo/bar.txt')
1528        Traceback (most recent call last):
1529        ...
1530        ValueError: Use of .. or absolute path in a resource path \
1531is not allowed.
1532
1533        Blank values are allowed
1534
1535        >>> vrp('')
1536        >>> bool(warned)
1537        False
1538
1539        Non-string values are not.
1540
1541        >>> vrp(None)
1542        Traceback (most recent call last):
1543        ...
1544        AttributeError: ...
1545        """
1546        invalid = (
1547            os.path.pardir in path.split(posixpath.sep) or
1548            posixpath.isabs(path) or
1549            ntpath.isabs(path)
1550        )
1551        if not invalid:
1552            return
1553
1554        msg = "Use of .. or absolute path in a resource path is not allowed."
1555
1556        # Aggressively disallow Windows absolute paths
1557        if ntpath.isabs(path) and not posixpath.isabs(path):
1558            raise ValueError(msg)
1559
1560        # for compatibility, warn; in future
1561        # raise ValueError(msg)
1562        warnings.warn(
1563            msg[:-1] + " and will raise exceptions in a future release.",
1564            DeprecationWarning,
1565            stacklevel=4,
1566        )
1567
1568    def _get(self, path):
1569        if hasattr(self.loader, 'get_data'):
1570            return self.loader.get_data(path)
1571        raise NotImplementedError(
1572            "Can't perform this operation for loaders without 'get_data()'"
1573        )
1574
1575
1576register_loader_type(object, NullProvider)
1577
1578
1579def _parents(path):
1580    """
1581    yield all parents of path including path
1582    """
1583    last = None
1584    while path != last:
1585        yield path
1586        last = path
1587        path, _ = os.path.split(path)
1588
1589
1590class EggProvider(NullProvider):
1591    """Provider based on a virtual filesystem"""
1592
1593    def __init__(self, module):
1594        NullProvider.__init__(self, module)
1595        self._setup_prefix()
1596
1597    def _setup_prefix(self):
1598        # Assume that metadata may be nested inside a "basket"
1599        # of multiple eggs and use module_path instead of .archive.
1600        eggs = filter(_is_egg_path, _parents(self.module_path))
1601        egg = next(eggs, None)
1602        egg and self._set_egg(egg)
1603
1604    def _set_egg(self, path):
1605        self.egg_name = os.path.basename(path)
1606        self.egg_info = os.path.join(path, 'EGG-INFO')
1607        self.egg_root = path
1608
1609
1610class DefaultProvider(EggProvider):
1611    """Provides access to package resources in the filesystem"""
1612
1613    def _has(self, path):
1614        return os.path.exists(path)
1615
1616    def _isdir(self, path):
1617        return os.path.isdir(path)
1618
1619    def _listdir(self, path):
1620        return os.listdir(path)
1621
1622    def get_resource_stream(self, manager, resource_name):
1623        return open(self._fn(self.module_path, resource_name), 'rb')
1624
1625    def _get(self, path):
1626        with open(path, 'rb') as stream:
1627            return stream.read()
1628
1629    @classmethod
1630    def _register(cls):
1631        loader_names = 'SourceFileLoader', 'SourcelessFileLoader',
1632        for name in loader_names:
1633            loader_cls = getattr(importlib_machinery, name, type(None))
1634            register_loader_type(loader_cls, cls)
1635
1636
1637DefaultProvider._register()
1638
1639
1640class EmptyProvider(NullProvider):
1641    """Provider that returns nothing for all requests"""
1642
1643    module_path = None
1644
1645    _isdir = _has = lambda self, path: False
1646
1647    def _get(self, path):
1648        return ''
1649
1650    def _listdir(self, path):
1651        return []
1652
1653    def __init__(self):
1654        pass
1655
1656
1657empty_provider = EmptyProvider()
1658
1659
1660class ZipManifests(dict):
1661    """
1662    zip manifest builder
1663    """
1664
1665    @classmethod
1666    def build(cls, path):
1667        """
1668        Build a dictionary similar to the zipimport directory
1669        caches, except instead of tuples, store ZipInfo objects.
1670
1671        Use a platform-specific path separator (os.sep) for the path keys
1672        for compatibility with pypy on Windows.
1673        """
1674        with zipfile.ZipFile(path) as zfile:
1675            items = (
1676                (
1677                    name.replace('/', os.sep),
1678                    zfile.getinfo(name),
1679                )
1680                for name in zfile.namelist()
1681            )
1682            return dict(items)
1683
1684    load = build
1685
1686
1687class MemoizedZipManifests(ZipManifests):
1688    """
1689    Memoized zipfile manifests.
1690    """
1691    manifest_mod = collections.namedtuple('manifest_mod', 'manifest mtime')
1692
1693    def load(self, path):
1694        """
1695        Load a manifest at path or return a suitable manifest already loaded.
1696        """
1697        path = os.path.normpath(path)
1698        mtime = os.stat(path).st_mtime
1699
1700        if path not in self or self[path].mtime != mtime:
1701            manifest = self.build(path)
1702            self[path] = self.manifest_mod(manifest, mtime)
1703
1704        return self[path].manifest
1705
1706
1707class ZipProvider(EggProvider):
1708    """Resource support for zips and eggs"""
1709
1710    eagers = None
1711    _zip_manifests = MemoizedZipManifests()
1712
1713    def __init__(self, module):
1714        EggProvider.__init__(self, module)
1715        self.zip_pre = self.loader.archive + os.sep
1716
1717    def _zipinfo_name(self, fspath):
1718        # Convert a virtual filename (full path to file) into a zipfile subpath
1719        # usable with the zipimport directory cache for our target archive
1720        fspath = fspath.rstrip(os.sep)
1721        if fspath == self.loader.archive:
1722            return ''
1723        if fspath.startswith(self.zip_pre):
1724            return fspath[len(self.zip_pre):]
1725        raise AssertionError(
1726            "%s is not a subpath of %s" % (fspath, self.zip_pre)
1727        )
1728
1729    def _parts(self, zip_path):
1730        # Convert a zipfile subpath into an egg-relative path part list.
1731        # pseudo-fs path
1732        fspath = self.zip_pre + zip_path
1733        if fspath.startswith(self.egg_root + os.sep):
1734            return fspath[len(self.egg_root) + 1:].split(os.sep)
1735        raise AssertionError(
1736            "%s is not a subpath of %s" % (fspath, self.egg_root)
1737        )
1738
1739    @property
1740    def zipinfo(self):
1741        return self._zip_manifests.load(self.loader.archive)
1742
1743    def get_resource_filename(self, manager, resource_name):
1744        if not self.egg_name:
1745            raise NotImplementedError(
1746                "resource_filename() only supported for .egg, not .zip"
1747            )
1748        # no need to lock for extraction, since we use temp names
1749        zip_path = self._resource_to_zip(resource_name)
1750        eagers = self._get_eager_resources()
1751        if '/'.join(self._parts(zip_path)) in eagers:
1752            for name in eagers:
1753                self._extract_resource(manager, self._eager_to_zip(name))
1754        return self._extract_resource(manager, zip_path)
1755
1756    @staticmethod
1757    def _get_date_and_size(zip_stat):
1758        size = zip_stat.file_size
1759        # ymdhms+wday, yday, dst
1760        date_time = zip_stat.date_time + (0, 0, -1)
1761        # 1980 offset already done
1762        timestamp = time.mktime(date_time)
1763        return timestamp, size
1764
1765    def _extract_resource(self, manager, zip_path):
1766
1767        if zip_path in self._index():
1768            for name in self._index()[zip_path]:
1769                last = self._extract_resource(
1770                    manager, os.path.join(zip_path, name)
1771                )
1772            # return the extracted directory name
1773            return os.path.dirname(last)
1774
1775        timestamp, size = self._get_date_and_size(self.zipinfo[zip_path])
1776
1777        if not WRITE_SUPPORT:
1778            raise IOError('"os.rename" and "os.unlink" are not supported '
1779                          'on this platform')
1780        try:
1781
1782            real_path = manager.get_cache_path(
1783                self.egg_name, self._parts(zip_path)
1784            )
1785
1786            if self._is_current(real_path, zip_path):
1787                return real_path
1788
1789            outf, tmpnam = _mkstemp(
1790                ".$extract",
1791                dir=os.path.dirname(real_path),
1792            )
1793            os.write(outf, self.loader.get_data(zip_path))
1794            os.close(outf)
1795            utime(tmpnam, (timestamp, timestamp))
1796            manager.postprocess(tmpnam, real_path)
1797
1798            try:
1799                rename(tmpnam, real_path)
1800
1801            except os.error:
1802                if os.path.isfile(real_path):
1803                    if self._is_current(real_path, zip_path):
1804                        # the file became current since it was checked above,
1805                        #  so proceed.
1806                        return real_path
1807                    # Windows, del old file and retry
1808                    elif os.name == 'nt':
1809                        unlink(real_path)
1810                        rename(tmpnam, real_path)
1811                        return real_path
1812                raise
1813
1814        except os.error:
1815            # report a user-friendly error
1816            manager.extraction_error()
1817
1818        return real_path
1819
1820    def _is_current(self, file_path, zip_path):
1821        """
1822        Return True if the file_path is current for this zip_path
1823        """
1824        timestamp, size = self._get_date_and_size(self.zipinfo[zip_path])
1825        if not os.path.isfile(file_path):
1826            return False
1827        stat = os.stat(file_path)
1828        if stat.st_size != size or stat.st_mtime != timestamp:
1829            return False
1830        # check that the contents match
1831        zip_contents = self.loader.get_data(zip_path)
1832        with open(file_path, 'rb') as f:
1833            file_contents = f.read()
1834        return zip_contents == file_contents
1835
1836    def _get_eager_resources(self):
1837        if self.eagers is None:
1838            eagers = []
1839            for name in ('native_libs.txt', 'eager_resources.txt'):
1840                if self.has_metadata(name):
1841                    eagers.extend(self.get_metadata_lines(name))
1842            self.eagers = eagers
1843        return self.eagers
1844
1845    def _index(self):
1846        try:
1847            return self._dirindex
1848        except AttributeError:
1849            ind = {}
1850            for path in self.zipinfo:
1851                parts = path.split(os.sep)
1852                while parts:
1853                    parent = os.sep.join(parts[:-1])
1854                    if parent in ind:
1855                        ind[parent].append(parts[-1])
1856                        break
1857                    else:
1858                        ind[parent] = [parts.pop()]
1859            self._dirindex = ind
1860            return ind
1861
1862    def _has(self, fspath):
1863        zip_path = self._zipinfo_name(fspath)
1864        return zip_path in self.zipinfo or zip_path in self._index()
1865
1866    def _isdir(self, fspath):
1867        return self._zipinfo_name(fspath) in self._index()
1868
1869    def _listdir(self, fspath):
1870        return list(self._index().get(self._zipinfo_name(fspath), ()))
1871
1872    def _eager_to_zip(self, resource_name):
1873        return self._zipinfo_name(self._fn(self.egg_root, resource_name))
1874
1875    def _resource_to_zip(self, resource_name):
1876        return self._zipinfo_name(self._fn(self.module_path, resource_name))
1877
1878
1879register_loader_type(zipimport.zipimporter, ZipProvider)
1880
1881
1882class FileMetadata(EmptyProvider):
1883    """Metadata handler for standalone PKG-INFO files
1884
1885    Usage::
1886
1887        metadata = FileMetadata("/path/to/PKG-INFO")
1888
1889    This provider rejects all data and metadata requests except for PKG-INFO,
1890    which is treated as existing, and will be the contents of the file at
1891    the provided location.
1892    """
1893
1894    def __init__(self, path):
1895        self.path = path
1896
1897    def _get_metadata_path(self, name):
1898        return self.path
1899
1900    def has_metadata(self, name):
1901        return name == 'PKG-INFO' and os.path.isfile(self.path)
1902
1903    def get_metadata(self, name):
1904        if name != 'PKG-INFO':
1905            raise KeyError("No metadata except PKG-INFO is available")
1906
1907        with io.open(self.path, encoding='utf-8', errors="replace") as f:
1908            metadata = f.read()
1909        self._warn_on_replacement(metadata)
1910        return metadata
1911
1912    def _warn_on_replacement(self, metadata):
1913        # Python 2.7 compat for: replacement_char = '�'
1914        replacement_char = b'\xef\xbf\xbd'.decode('utf-8')
1915        if replacement_char in metadata:
1916            tmpl = "{self.path} could not be properly decoded in UTF-8"
1917            msg = tmpl.format(**locals())
1918            warnings.warn(msg)
1919
1920    def get_metadata_lines(self, name):
1921        return yield_lines(self.get_metadata(name))
1922
1923
1924class PathMetadata(DefaultProvider):
1925    """Metadata provider for egg directories
1926
1927    Usage::
1928
1929        # Development eggs:
1930
1931        egg_info = "/path/to/PackageName.egg-info"
1932        base_dir = os.path.dirname(egg_info)
1933        metadata = PathMetadata(base_dir, egg_info)
1934        dist_name = os.path.splitext(os.path.basename(egg_info))[0]
1935        dist = Distribution(basedir, project_name=dist_name, metadata=metadata)
1936
1937        # Unpacked egg directories:
1938
1939        egg_path = "/path/to/PackageName-ver-pyver-etc.egg"
1940        metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO'))
1941        dist = Distribution.from_filename(egg_path, metadata=metadata)
1942    """
1943
1944    def __init__(self, path, egg_info):
1945        self.module_path = path
1946        self.egg_info = egg_info
1947
1948
1949class EggMetadata(ZipProvider):
1950    """Metadata provider for .egg files"""
1951
1952    def __init__(self, importer):
1953        """Create a metadata provider from a zipimporter"""
1954
1955        self.zip_pre = importer.archive + os.sep
1956        self.loader = importer
1957        if importer.prefix:
1958            self.module_path = os.path.join(importer.archive, importer.prefix)
1959        else:
1960            self.module_path = importer.archive
1961        self._setup_prefix()
1962
1963
1964_declare_state('dict', _distribution_finders={})
1965
1966
1967def register_finder(importer_type, distribution_finder):
1968    """Register `distribution_finder` to find distributions in sys.path items
1969
1970    `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
1971    handler), and `distribution_finder` is a callable that, passed a path
1972    item and the importer instance, yields ``Distribution`` instances found on
1973    that path item.  See ``pkg_resources.find_on_path`` for an example."""
1974    _distribution_finders[importer_type] = distribution_finder
1975
1976
1977def find_distributions(path_item, only=False):
1978    """Yield distributions accessible via `path_item`"""
1979    importer = get_importer(path_item)
1980    finder = _find_adapter(_distribution_finders, importer)
1981    return finder(importer, path_item, only)
1982
1983
1984def find_eggs_in_zip(importer, path_item, only=False):
1985    """
1986    Find eggs in zip files; possibly multiple nested eggs.
1987    """
1988    if importer.archive.endswith('.whl'):
1989        # wheels are not supported with this finder
1990        # they don't have PKG-INFO metadata, and won't ever contain eggs
1991        return
1992    metadata = EggMetadata(importer)
1993    if metadata.has_metadata('PKG-INFO'):
1994        yield Distribution.from_filename(path_item, metadata=metadata)
1995    if only:
1996        # don't yield nested distros
1997        return
1998    for subitem in metadata.resource_listdir(''):
1999        if _is_egg_path(subitem):
2000            subpath = os.path.join(path_item, subitem)
2001            dists = find_eggs_in_zip(zipimport.zipimporter(subpath), subpath)
2002            for dist in dists:
2003                yield dist
2004        elif subitem.lower().endswith('.dist-info'):
2005            subpath = os.path.join(path_item, subitem)
2006            submeta = EggMetadata(zipimport.zipimporter(subpath))
2007            submeta.egg_info = subpath
2008            yield Distribution.from_location(path_item, subitem, submeta)
2009
2010
2011register_finder(zipimport.zipimporter, find_eggs_in_zip)
2012
2013
2014def find_nothing(importer, path_item, only=False):
2015    return ()
2016
2017
2018register_finder(object, find_nothing)
2019
2020
2021def _by_version_descending(names):
2022    """
2023    Given a list of filenames, return them in descending order
2024    by version number.
2025
2026    >>> names = 'bar', 'foo', 'Python-2.7.10.egg', 'Python-2.7.2.egg'
2027    >>> _by_version_descending(names)
2028    ['Python-2.7.10.egg', 'Python-2.7.2.egg', 'foo', 'bar']
2029    >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.egg'
2030    >>> _by_version_descending(names)
2031    ['Setuptools-1.2.3.egg', 'Setuptools-1.2.3b1.egg']
2032    >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.post1.egg'
2033    >>> _by_version_descending(names)
2034    ['Setuptools-1.2.3.post1.egg', 'Setuptools-1.2.3b1.egg']
2035    """
2036    def _by_version(name):
2037        """
2038        Parse each component of the filename
2039        """
2040        name, ext = os.path.splitext(name)
2041        parts = itertools.chain(name.split('-'), [ext])
2042        return [packaging.version.parse(part) for part in parts]
2043
2044    return sorted(names, key=_by_version, reverse=True)
2045
2046
2047def find_on_path(importer, path_item, only=False):
2048    """Yield distributions accessible on a sys.path directory"""
2049    path_item = _normalize_cached(path_item)
2050
2051    if _is_unpacked_egg(path_item):
2052        yield Distribution.from_filename(
2053            path_item, metadata=PathMetadata(
2054                path_item, os.path.join(path_item, 'EGG-INFO')
2055            )
2056        )
2057        return
2058
2059    entries = (
2060        os.path.join(path_item, child)
2061        for child in safe_listdir(path_item)
2062    )
2063
2064    # for performance, before sorting by version,
2065    # screen entries for only those that will yield
2066    # distributions
2067    filtered = (
2068        entry
2069        for entry in entries
2070        if dist_factory(path_item, entry, only)
2071    )
2072
2073    # scan for .egg and .egg-info in directory
2074    path_item_entries = _by_version_descending(filtered)
2075    for entry in path_item_entries:
2076        fullpath = os.path.join(path_item, entry)
2077        factory = dist_factory(path_item, entry, only)
2078        for dist in factory(fullpath):
2079            yield dist
2080
2081
2082def dist_factory(path_item, entry, only):
2083    """Return a dist_factory for the given entry."""
2084    lower = entry.lower()
2085    is_egg_info = lower.endswith('.egg-info')
2086    is_dist_info = (
2087        lower.endswith('.dist-info') and
2088        os.path.isdir(os.path.join(path_item, entry))
2089    )
2090    is_meta = is_egg_info or is_dist_info
2091    return (
2092        distributions_from_metadata
2093        if is_meta else
2094        find_distributions
2095        if not only and _is_egg_path(entry) else
2096        resolve_egg_link
2097        if not only and lower.endswith('.egg-link') else
2098        NoDists()
2099    )
2100
2101
2102class NoDists:
2103    """
2104    >>> bool(NoDists())
2105    False
2106
2107    >>> list(NoDists()('anything'))
2108    []
2109    """
2110    def __bool__(self):
2111        return False
2112    if six.PY2:
2113        __nonzero__ = __bool__
2114
2115    def __call__(self, fullpath):
2116        return iter(())
2117
2118
2119def safe_listdir(path):
2120    """
2121    Attempt to list contents of path, but suppress some exceptions.
2122    """
2123    try:
2124        return os.listdir(path)
2125    except (PermissionError, NotADirectoryError):
2126        pass
2127    except OSError as e:
2128        # Ignore the directory if does not exist, not a directory or
2129        # permission denied
2130        ignorable = (
2131            e.errno in (errno.ENOTDIR, errno.EACCES, errno.ENOENT)
2132            # Python 2 on Windows needs to be handled this way :(
2133            or getattr(e, "winerror", None) == 267
2134        )
2135        if not ignorable:
2136            raise
2137    return ()
2138
2139
2140def distributions_from_metadata(path):
2141    root = os.path.dirname(path)
2142    if os.path.isdir(path):
2143        if len(os.listdir(path)) == 0:
2144            # empty metadata dir; skip
2145            return
2146        metadata = PathMetadata(root, path)
2147    else:
2148        metadata = FileMetadata(path)
2149    entry = os.path.basename(path)
2150    yield Distribution.from_location(
2151        root, entry, metadata, precedence=DEVELOP_DIST,
2152    )
2153
2154
2155def non_empty_lines(path):
2156    """
2157    Yield non-empty lines from file at path
2158    """
2159    with open(path) as f:
2160        for line in f:
2161            line = line.strip()
2162            if line:
2163                yield line
2164
2165
2166def resolve_egg_link(path):
2167    """
2168    Given a path to an .egg-link, resolve distributions
2169    present in the referenced path.
2170    """
2171    referenced_paths = non_empty_lines(path)
2172    resolved_paths = (
2173        os.path.join(os.path.dirname(path), ref)
2174        for ref in referenced_paths
2175    )
2176    dist_groups = map(find_distributions, resolved_paths)
2177    return next(dist_groups, ())
2178
2179
2180register_finder(pkgutil.ImpImporter, find_on_path)
2181
2182if hasattr(importlib_machinery, 'FileFinder'):
2183    register_finder(importlib_machinery.FileFinder, find_on_path)
2184
2185_declare_state('dict', _namespace_handlers={})
2186_declare_state('dict', _namespace_packages={})
2187
2188
2189def register_namespace_handler(importer_type, namespace_handler):
2190    """Register `namespace_handler` to declare namespace packages
2191
2192    `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
2193    handler), and `namespace_handler` is a callable like this::
2194
2195        def namespace_handler(importer, path_entry, moduleName, module):
2196            # return a path_entry to use for child packages
2197
2198    Namespace handlers are only called if the importer object has already
2199    agreed that it can handle the relevant path item, and they should only
2200    return a subpath if the module __path__ does not already contain an
2201    equivalent subpath.  For an example namespace handler, see
2202    ``pkg_resources.file_ns_handler``.
2203    """
2204    _namespace_handlers[importer_type] = namespace_handler
2205
2206
2207def _handle_ns(packageName, path_item):
2208    """Ensure that named package includes a subpath of path_item (if needed)"""
2209
2210    importer = get_importer(path_item)
2211    if importer is None:
2212        return None
2213
2214    # use find_spec (PEP 451) and fall-back to find_module (PEP 302)
2215    try:
2216        loader = importer.find_spec(packageName).loader
2217    except AttributeError:
2218        # capture warnings due to #1111
2219        with warnings.catch_warnings():
2220            warnings.simplefilter("ignore")
2221            loader = importer.find_module(packageName)
2222
2223    if loader is None:
2224        return None
2225    module = sys.modules.get(packageName)
2226    if module is None:
2227        module = sys.modules[packageName] = types.ModuleType(packageName)
2228        module.__path__ = []
2229        _set_parent_ns(packageName)
2230    elif not hasattr(module, '__path__'):
2231        raise TypeError("Not a package:", packageName)
2232    handler = _find_adapter(_namespace_handlers, importer)
2233    subpath = handler(importer, path_item, packageName, module)
2234    if subpath is not None:
2235        path = module.__path__
2236        path.append(subpath)
2237        loader.load_module(packageName)
2238        _rebuild_mod_path(path, packageName, module)
2239    return subpath
2240
2241
2242def _rebuild_mod_path(orig_path, package_name, module):
2243    """
2244    Rebuild module.__path__ ensuring that all entries are ordered
2245    corresponding to their sys.path order
2246    """
2247    sys_path = [_normalize_cached(p) for p in sys.path]
2248
2249    def safe_sys_path_index(entry):
2250        """
2251        Workaround for #520 and #513.
2252        """
2253        try:
2254            return sys_path.index(entry)
2255        except ValueError:
2256            return float('inf')
2257
2258    def position_in_sys_path(path):
2259        """
2260        Return the ordinal of the path based on its position in sys.path
2261        """
2262        path_parts = path.split(os.sep)
2263        module_parts = package_name.count('.') + 1
2264        parts = path_parts[:-module_parts]
2265        return safe_sys_path_index(_normalize_cached(os.sep.join(parts)))
2266
2267    new_path = sorted(orig_path, key=position_in_sys_path)
2268    new_path = [_normalize_cached(p) for p in new_path]
2269
2270    if isinstance(module.__path__, list):
2271        module.__path__[:] = new_path
2272    else:
2273        module.__path__ = new_path
2274
2275
2276def declare_namespace(packageName):
2277    """Declare that package 'packageName' is a namespace package"""
2278
2279    _imp.acquire_lock()
2280    try:
2281        if packageName in _namespace_packages:
2282            return
2283
2284        path = sys.path
2285        parent, _, _ = packageName.rpartition('.')
2286
2287        if parent:
2288            declare_namespace(parent)
2289            if parent not in _namespace_packages:
2290                __import__(parent)
2291            try:
2292                path = sys.modules[parent].__path__
2293            except AttributeError as e:
2294                raise TypeError("Not a package:", parent) from e
2295
2296        # Track what packages are namespaces, so when new path items are added,
2297        # they can be updated
2298        _namespace_packages.setdefault(parent or None, []).append(packageName)
2299        _namespace_packages.setdefault(packageName, [])
2300
2301        for path_item in path:
2302            # Ensure all the parent's path items are reflected in the child,
2303            # if they apply
2304            _handle_ns(packageName, path_item)
2305
2306    finally:
2307        _imp.release_lock()
2308
2309
2310def fixup_namespace_packages(path_item, parent=None):
2311    """Ensure that previously-declared namespace packages include path_item"""
2312    _imp.acquire_lock()
2313    try:
2314        for package in _namespace_packages.get(parent, ()):
2315            subpath = _handle_ns(package, path_item)
2316            if subpath:
2317                fixup_namespace_packages(subpath, package)
2318    finally:
2319        _imp.release_lock()
2320
2321
2322def file_ns_handler(importer, path_item, packageName, module):
2323    """Compute an ns-package subpath for a filesystem or zipfile importer"""
2324
2325    subpath = os.path.join(path_item, packageName.split('.')[-1])
2326    normalized = _normalize_cached(subpath)
2327    for item in module.__path__:
2328        if _normalize_cached(item) == normalized:
2329            break
2330    else:
2331        # Only return the path if it's not already there
2332        return subpath
2333
2334
2335register_namespace_handler(pkgutil.ImpImporter, file_ns_handler)
2336register_namespace_handler(zipimport.zipimporter, file_ns_handler)
2337
2338if hasattr(importlib_machinery, 'FileFinder'):
2339    register_namespace_handler(importlib_machinery.FileFinder, file_ns_handler)
2340
2341
2342def null_ns_handler(importer, path_item, packageName, module):
2343    return None
2344
2345
2346register_namespace_handler(object, null_ns_handler)
2347
2348
2349def normalize_path(filename):
2350    """Normalize a file/dir name for comparison purposes"""
2351    return os.path.normcase(os.path.realpath(os.path.normpath(
2352        _cygwin_patch(filename))))
2353
2354
2355def _cygwin_patch(filename):  # pragma: nocover
2356    """
2357    Contrary to POSIX 2008, on Cygwin, getcwd (3) contains
2358    symlink components. Using
2359    os.path.abspath() works around this limitation. A fix in os.getcwd()
2360    would probably better, in Cygwin even more so, except
2361    that this seems to be by design...
2362    """
2363    return os.path.abspath(filename) if sys.platform == 'cygwin' else filename
2364
2365
2366def _normalize_cached(filename, _cache={}):
2367    try:
2368        return _cache[filename]
2369    except KeyError:
2370        _cache[filename] = result = normalize_path(filename)
2371        return result
2372
2373
2374def _is_egg_path(path):
2375    """
2376    Determine if given path appears to be an egg.
2377    """
2378    return _is_zip_egg(path) or _is_unpacked_egg(path)
2379
2380
2381def _is_zip_egg(path):
2382    return (
2383        path.lower().endswith('.egg') and
2384        os.path.isfile(path) and
2385        zipfile.is_zipfile(path)
2386    )
2387
2388
2389def _is_unpacked_egg(path):
2390    """
2391    Determine if given path appears to be an unpacked egg.
2392    """
2393    return (
2394        path.lower().endswith('.egg') and
2395        os.path.isfile(os.path.join(path, 'EGG-INFO', 'PKG-INFO'))
2396    )
2397
2398
2399def _set_parent_ns(packageName):
2400    parts = packageName.split('.')
2401    name = parts.pop()
2402    if parts:
2403        parent = '.'.join(parts)
2404        setattr(sys.modules[parent], name, sys.modules[packageName])
2405
2406
2407def yield_lines(strs):
2408    """Yield non-empty/non-comment lines of a string or sequence"""
2409    if isinstance(strs, six.string_types):
2410        for s in strs.splitlines():
2411            s = s.strip()
2412            # skip blank lines/comments
2413            if s and not s.startswith('#'):
2414                yield s
2415    else:
2416        for ss in strs:
2417            for s in yield_lines(ss):
2418                yield s
2419
2420
2421MODULE = re.compile(r"\w+(\.\w+)*$").match
2422EGG_NAME = re.compile(
2423    r"""
2424    (?P<name>[^-]+) (
2425        -(?P<ver>[^-]+) (
2426            -py(?P<pyver>[^-]+) (
2427                -(?P<plat>.+)
2428            )?
2429        )?
2430    )?
2431    """,
2432    re.VERBOSE | re.IGNORECASE,
2433).match
2434
2435
2436class EntryPoint:
2437    """Object representing an advertised importable object"""
2438
2439    def __init__(self, name, module_name, attrs=(), extras=(), dist=None):
2440        if not MODULE(module_name):
2441            raise ValueError("Invalid module name", module_name)
2442        self.name = name
2443        self.module_name = module_name
2444        self.attrs = tuple(attrs)
2445        self.extras = tuple(extras)
2446        self.dist = dist
2447
2448    def __str__(self):
2449        s = "%s = %s" % (self.name, self.module_name)
2450        if self.attrs:
2451            s += ':' + '.'.join(self.attrs)
2452        if self.extras:
2453            s += ' [%s]' % ','.join(self.extras)
2454        return s
2455
2456    def __repr__(self):
2457        return "EntryPoint.parse(%r)" % str(self)
2458
2459    def load(self, require=True, *args, **kwargs):
2460        """
2461        Require packages for this EntryPoint, then resolve it.
2462        """
2463        if not require or args or kwargs:
2464            warnings.warn(
2465                "Parameters to load are deprecated.  Call .resolve and "
2466                ".require separately.",
2467                PkgResourcesDeprecationWarning,
2468                stacklevel=2,
2469            )
2470        if require:
2471            self.require(*args, **kwargs)
2472        return self.resolve()
2473
2474    def resolve(self):
2475        """
2476        Resolve the entry point from its module and attrs.
2477        """
2478        module = __import__(self.module_name, fromlist=['__name__'], level=0)
2479        try:
2480            return functools.reduce(getattr, self.attrs, module)
2481        except AttributeError as exc:
2482            raise ImportError(str(exc)) from exc
2483
2484    def require(self, env=None, installer=None):
2485        if self.extras and not self.dist:
2486            raise UnknownExtra("Can't require() without a distribution", self)
2487
2488        # Get the requirements for this entry point with all its extras and
2489        # then resolve them. We have to pass `extras` along when resolving so
2490        # that the working set knows what extras we want. Otherwise, for
2491        # dist-info distributions, the working set will assume that the
2492        # requirements for that extra are purely optional and skip over them.
2493        reqs = self.dist.requires(self.extras)
2494        items = working_set.resolve(reqs, env, installer, extras=self.extras)
2495        list(map(working_set.add, items))
2496
2497    pattern = re.compile(
2498        r'\s*'
2499        r'(?P<name>.+?)\s*'
2500        r'=\s*'
2501        r'(?P<module>[\w.]+)\s*'
2502        r'(:\s*(?P<attr>[\w.]+))?\s*'
2503        r'(?P<extras>\[.*\])?\s*$'
2504    )
2505
2506    @classmethod
2507    def parse(cls, src, dist=None):
2508        """Parse a single entry point from string `src`
2509
2510        Entry point syntax follows the form::
2511
2512            name = some.module:some.attr [extra1, extra2]
2513
2514        The entry name and module name are required, but the ``:attrs`` and
2515        ``[extras]`` parts are optional
2516        """
2517        m = cls.pattern.match(src)
2518        if not m:
2519            msg = "EntryPoint must be in 'name=module:attrs [extras]' format"
2520            raise ValueError(msg, src)
2521        res = m.groupdict()
2522        extras = cls._parse_extras(res['extras'])
2523        attrs = res['attr'].split('.') if res['attr'] else ()
2524        return cls(res['name'], res['module'], attrs, extras, dist)
2525
2526    @classmethod
2527    def _parse_extras(cls, extras_spec):
2528        if not extras_spec:
2529            return ()
2530        req = Requirement.parse('x' + extras_spec)
2531        if req.specs:
2532            raise ValueError()
2533        return req.extras
2534
2535    @classmethod
2536    def parse_group(cls, group, lines, dist=None):
2537        """Parse an entry point group"""
2538        if not MODULE(group):
2539            raise ValueError("Invalid group name", group)
2540        this = {}
2541        for line in yield_lines(lines):
2542            ep = cls.parse(line, dist)
2543            if ep.name in this:
2544                raise ValueError("Duplicate entry point", group, ep.name)
2545            this[ep.name] = ep
2546        return this
2547
2548    @classmethod
2549    def parse_map(cls, data, dist=None):
2550        """Parse a map of entry point groups"""
2551        if isinstance(data, dict):
2552            data = data.items()
2553        else:
2554            data = split_sections(data)
2555        maps = {}
2556        for group, lines in data:
2557            if group is None:
2558                if not lines:
2559                    continue
2560                raise ValueError("Entry points must be listed in groups")
2561            group = group.strip()
2562            if group in maps:
2563                raise ValueError("Duplicate group name", group)
2564            maps[group] = cls.parse_group(group, lines, dist)
2565        return maps
2566
2567
2568def _version_from_file(lines):
2569    """
2570    Given an iterable of lines from a Metadata file, return
2571    the value of the Version field, if present, or None otherwise.
2572    """
2573    def is_version_line(line):
2574        return line.lower().startswith('version:')
2575    version_lines = filter(is_version_line, lines)
2576    line = next(iter(version_lines), '')
2577    _, _, value = line.partition(':')
2578    return safe_version(value.strip()) or None
2579
2580
2581class Distribution:
2582    """Wrap an actual or potential sys.path entry w/metadata"""
2583    PKG_INFO = 'PKG-INFO'
2584
2585    def __init__(
2586            self, location=None, metadata=None, project_name=None,
2587            version=None, py_version=PY_MAJOR, platform=None,
2588            precedence=EGG_DIST):
2589        self.project_name = safe_name(project_name or 'Unknown')
2590        if version is not None:
2591            self._version = safe_version(version)
2592        self.py_version = py_version
2593        self.platform = platform
2594        self.location = location
2595        self.precedence = precedence
2596        self._provider = metadata or empty_provider
2597
2598    @classmethod
2599    def from_location(cls, location, basename, metadata=None, **kw):
2600        project_name, version, py_version, platform = [None] * 4
2601        basename, ext = os.path.splitext(basename)
2602        if ext.lower() in _distributionImpl:
2603            cls = _distributionImpl[ext.lower()]
2604
2605            match = EGG_NAME(basename)
2606            if match:
2607                project_name, version, py_version, platform = match.group(
2608                    'name', 'ver', 'pyver', 'plat'
2609                )
2610        return cls(
2611            location, metadata, project_name=project_name, version=version,
2612            py_version=py_version, platform=platform, **kw
2613        )._reload_version()
2614
2615    def _reload_version(self):
2616        return self
2617
2618    @property
2619    def hashcmp(self):
2620        return (
2621            self.parsed_version,
2622            self.precedence,
2623            self.key,
2624            self.location,
2625            self.py_version or '',
2626            self.platform or '',
2627        )
2628
2629    def __hash__(self):
2630        return hash(self.hashcmp)
2631
2632    def __lt__(self, other):
2633        return self.hashcmp < other.hashcmp
2634
2635    def __le__(self, other):
2636        return self.hashcmp <= other.hashcmp
2637
2638    def __gt__(self, other):
2639        return self.hashcmp > other.hashcmp
2640
2641    def __ge__(self, other):
2642        return self.hashcmp >= other.hashcmp
2643
2644    def __eq__(self, other):
2645        if not isinstance(other, self.__class__):
2646            # It's not a Distribution, so they are not equal
2647            return False
2648        return self.hashcmp == other.hashcmp
2649
2650    def __ne__(self, other):
2651        return not self == other
2652
2653    # These properties have to be lazy so that we don't have to load any
2654    # metadata until/unless it's actually needed.  (i.e., some distributions
2655    # may not know their name or version without loading PKG-INFO)
2656
2657    @property
2658    def key(self):
2659        try:
2660            return self._key
2661        except AttributeError:
2662            self._key = key = self.project_name.lower()
2663            return key
2664
2665    @property
2666    def parsed_version(self):
2667        if not hasattr(self, "_parsed_version"):
2668            self._parsed_version = parse_version(self.version)
2669
2670        return self._parsed_version
2671
2672    def _warn_legacy_version(self):
2673        LV = packaging.version.LegacyVersion
2674        is_legacy = isinstance(self._parsed_version, LV)
2675        if not is_legacy:
2676            return
2677
2678        # While an empty version is technically a legacy version and
2679        # is not a valid PEP 440 version, it's also unlikely to
2680        # actually come from someone and instead it is more likely that
2681        # it comes from setuptools attempting to parse a filename and
2682        # including it in the list. So for that we'll gate this warning
2683        # on if the version is anything at all or not.
2684        if not self.version:
2685            return
2686
2687        tmpl = textwrap.dedent("""
2688            '{project_name} ({version})' is being parsed as a legacy,
2689            non PEP 440,
2690            version. You may find odd behavior and sort order.
2691            In particular it will be sorted as less than 0.0. It
2692            is recommended to migrate to PEP 440 compatible
2693            versions.
2694            """).strip().replace('\n', ' ')
2695
2696        warnings.warn(tmpl.format(**vars(self)), PEP440Warning)
2697
2698    @property
2699    def version(self):
2700        try:
2701            return self._version
2702        except AttributeError as e:
2703            version = self._get_version()
2704            if version is None:
2705                path = self._get_metadata_path_for_display(self.PKG_INFO)
2706                msg = (
2707                    "Missing 'Version:' header and/or {} file at path: {}"
2708                ).format(self.PKG_INFO, path)
2709                raise ValueError(msg, self) from e
2710
2711            return version
2712
2713    @property
2714    def _dep_map(self):
2715        """
2716        A map of extra to its list of (direct) requirements
2717        for this distribution, including the null extra.
2718        """
2719        try:
2720            return self.__dep_map
2721        except AttributeError:
2722            self.__dep_map = self._filter_extras(self._build_dep_map())
2723        return self.__dep_map
2724
2725    @staticmethod
2726    def _filter_extras(dm):
2727        """
2728        Given a mapping of extras to dependencies, strip off
2729        environment markers and filter out any dependencies
2730        not matching the markers.
2731        """
2732        for extra in list(filter(None, dm)):
2733            new_extra = extra
2734            reqs = dm.pop(extra)
2735            new_extra, _, marker = extra.partition(':')
2736            fails_marker = marker and (
2737                invalid_marker(marker)
2738                or not evaluate_marker(marker)
2739            )
2740            if fails_marker:
2741                reqs = []
2742            new_extra = safe_extra(new_extra) or None
2743
2744            dm.setdefault(new_extra, []).extend(reqs)
2745        return dm
2746
2747    def _build_dep_map(self):
2748        dm = {}
2749        for name in 'requires.txt', 'depends.txt':
2750            for extra, reqs in split_sections(self._get_metadata(name)):
2751                dm.setdefault(extra, []).extend(parse_requirements(reqs))
2752        return dm
2753
2754    def requires(self, extras=()):
2755        """List of Requirements needed for this distro if `extras` are used"""
2756        dm = self._dep_map
2757        deps = []
2758        deps.extend(dm.get(None, ()))
2759        for ext in extras:
2760            try:
2761                deps.extend(dm[safe_extra(ext)])
2762            except KeyError as e:
2763                raise UnknownExtra(
2764                    "%s has no such extra feature %r" % (self, ext)
2765                ) from e
2766        return deps
2767
2768    def _get_metadata_path_for_display(self, name):
2769        """
2770        Return the path to the given metadata file, if available.
2771        """
2772        try:
2773            # We need to access _get_metadata_path() on the provider object
2774            # directly rather than through this class's __getattr__()
2775            # since _get_metadata_path() is marked private.
2776            path = self._provider._get_metadata_path(name)
2777
2778        # Handle exceptions e.g. in case the distribution's metadata
2779        # provider doesn't support _get_metadata_path().
2780        except Exception:
2781            return '[could not detect]'
2782
2783        return path
2784
2785    def _get_metadata(self, name):
2786        if self.has_metadata(name):
2787            for line in self.get_metadata_lines(name):
2788                yield line
2789
2790    def _get_version(self):
2791        lines = self._get_metadata(self.PKG_INFO)
2792        version = _version_from_file(lines)
2793
2794        return version
2795
2796    def activate(self, path=None, replace=False):
2797        """Ensure distribution is importable on `path` (default=sys.path)"""
2798        if path is None:
2799            path = sys.path
2800        self.insert_on(path, replace=replace)
2801        if path is sys.path:
2802            fixup_namespace_packages(self.location)
2803            for pkg in self._get_metadata('namespace_packages.txt'):
2804                if pkg in sys.modules:
2805                    declare_namespace(pkg)
2806
2807    def egg_name(self):
2808        """Return what this distribution's standard .egg filename should be"""
2809        filename = "%s-%s-py%s" % (
2810            to_filename(self.project_name), to_filename(self.version),
2811            self.py_version or PY_MAJOR
2812        )
2813
2814        if self.platform:
2815            filename += '-' + self.platform
2816        return filename
2817
2818    def __repr__(self):
2819        if self.location:
2820            return "%s (%s)" % (self, self.location)
2821        else:
2822            return str(self)
2823
2824    def __str__(self):
2825        try:
2826            version = getattr(self, 'version', None)
2827        except ValueError:
2828            version = None
2829        version = version or "[unknown version]"
2830        return "%s %s" % (self.project_name, version)
2831
2832    def __getattr__(self, attr):
2833        """Delegate all unrecognized public attributes to .metadata provider"""
2834        if attr.startswith('_'):
2835            raise AttributeError(attr)
2836        return getattr(self._provider, attr)
2837
2838    def __dir__(self):
2839        return list(
2840            set(super(Distribution, self).__dir__())
2841            | set(
2842                attr for attr in self._provider.__dir__()
2843                if not attr.startswith('_')
2844            )
2845        )
2846
2847    if not hasattr(object, '__dir__'):
2848        # python 2.7 not supported
2849        del __dir__
2850
2851    @classmethod
2852    def from_filename(cls, filename, metadata=None, **kw):
2853        return cls.from_location(
2854            _normalize_cached(filename), os.path.basename(filename), metadata,
2855            **kw
2856        )
2857
2858    def as_requirement(self):
2859        """Return a ``Requirement`` that matches this distribution exactly"""
2860        if isinstance(self.parsed_version, packaging.version.Version):
2861            spec = "%s==%s" % (self.project_name, self.parsed_version)
2862        else:
2863            spec = "%s===%s" % (self.project_name, self.parsed_version)
2864
2865        return Requirement.parse(spec)
2866
2867    def load_entry_point(self, group, name):
2868        """Return the `name` entry point of `group` or raise ImportError"""
2869        ep = self.get_entry_info(group, name)
2870        if ep is None:
2871            raise ImportError("Entry point %r not found" % ((group, name),))
2872        return ep.load()
2873
2874    def get_entry_map(self, group=None):
2875        """Return the entry point map for `group`, or the full entry map"""
2876        try:
2877            ep_map = self._ep_map
2878        except AttributeError:
2879            ep_map = self._ep_map = EntryPoint.parse_map(
2880                self._get_metadata('entry_points.txt'), self
2881            )
2882        if group is not None:
2883            return ep_map.get(group, {})
2884        return ep_map
2885
2886    def get_entry_info(self, group, name):
2887        """Return the EntryPoint object for `group`+`name`, or ``None``"""
2888        return self.get_entry_map(group).get(name)
2889
2890    def insert_on(self, path, loc=None, replace=False):
2891        """Ensure self.location is on path
2892
2893        If replace=False (default):
2894            - If location is already in path anywhere, do nothing.
2895            - Else:
2896              - If it's an egg and its parent directory is on path,
2897                insert just ahead of the parent.
2898              - Else: add to the end of path.
2899        If replace=True:
2900            - If location is already on path anywhere (not eggs)
2901              or higher priority than its parent (eggs)
2902              do nothing.
2903            - Else:
2904              - If it's an egg and its parent directory is on path,
2905                insert just ahead of the parent,
2906                removing any lower-priority entries.
2907              - Else: add it to the front of path.
2908        """
2909
2910        loc = loc or self.location
2911        if not loc:
2912            return
2913
2914        nloc = _normalize_cached(loc)
2915        bdir = os.path.dirname(nloc)
2916        npath = [(p and _normalize_cached(p) or p) for p in path]
2917
2918        for p, item in enumerate(npath):
2919            if item == nloc:
2920                if replace:
2921                    break
2922                else:
2923                    # don't modify path (even removing duplicates) if
2924                    # found and not replace
2925                    return
2926            elif item == bdir and self.precedence == EGG_DIST:
2927                # if it's an .egg, give it precedence over its directory
2928                # UNLESS it's already been added to sys.path and replace=False
2929                if (not replace) and nloc in npath[p:]:
2930                    return
2931                if path is sys.path:
2932                    self.check_version_conflict()
2933                path.insert(p, loc)
2934                npath.insert(p, nloc)
2935                break
2936        else:
2937            if path is sys.path:
2938                self.check_version_conflict()
2939            if replace:
2940                path.insert(0, loc)
2941            else:
2942                path.append(loc)
2943            return
2944
2945        # p is the spot where we found or inserted loc; now remove duplicates
2946        while True:
2947            try:
2948                np = npath.index(nloc, p + 1)
2949            except ValueError:
2950                break
2951            else:
2952                del npath[np], path[np]
2953                # ha!
2954                p = np
2955
2956        return
2957
2958    def check_version_conflict(self):
2959        if self.key == 'setuptools':
2960            # ignore the inevitable setuptools self-conflicts  :(
2961            return
2962
2963        nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt'))
2964        loc = normalize_path(self.location)
2965        for modname in self._get_metadata('top_level.txt'):
2966            if (modname not in sys.modules or modname in nsp
2967                    or modname in _namespace_packages):
2968                continue
2969            if modname in ('pkg_resources', 'setuptools', 'site'):
2970                continue
2971            fn = getattr(sys.modules[modname], '__file__', None)
2972            if fn and (normalize_path(fn).startswith(loc) or
2973                       fn.startswith(self.location)):
2974                continue
2975            issue_warning(
2976                "Module %s was already imported from %s, but %s is being added"
2977                " to sys.path" % (modname, fn, self.location),
2978            )
2979
2980    def has_version(self):
2981        try:
2982            self.version
2983        except ValueError:
2984            issue_warning("Unbuilt egg for " + repr(self))
2985            return False
2986        return True
2987
2988    def clone(self, **kw):
2989        """Copy this distribution, substituting in any changed keyword args"""
2990        names = 'project_name version py_version platform location precedence'
2991        for attr in names.split():
2992            kw.setdefault(attr, getattr(self, attr, None))
2993        kw.setdefault('metadata', self._provider)
2994        return self.__class__(**kw)
2995
2996    @property
2997    def extras(self):
2998        return [dep for dep in self._dep_map if dep]
2999
3000
3001class EggInfoDistribution(Distribution):
3002    def _reload_version(self):
3003        """
3004        Packages installed by distutils (e.g. numpy or scipy),
3005        which uses an old safe_version, and so
3006        their version numbers can get mangled when
3007        converted to filenames (e.g., 1.11.0.dev0+2329eae to
3008        1.11.0.dev0_2329eae). These distributions will not be
3009        parsed properly
3010        downstream by Distribution and safe_version, so
3011        take an extra step and try to get the version number from
3012        the metadata file itself instead of the filename.
3013        """
3014        md_version = self._get_version()
3015        if md_version:
3016            self._version = md_version
3017        return self
3018
3019
3020class DistInfoDistribution(Distribution):
3021    """
3022    Wrap an actual or potential sys.path entry
3023    w/metadata, .dist-info style.
3024    """
3025    PKG_INFO = 'METADATA'
3026    EQEQ = re.compile(r"([\(,])\s*(\d.*?)\s*([,\)])")
3027
3028    @property
3029    def _parsed_pkg_info(self):
3030        """Parse and cache metadata"""
3031        try:
3032            return self._pkg_info
3033        except AttributeError:
3034            metadata = self.get_metadata(self.PKG_INFO)
3035            self._pkg_info = email.parser.Parser().parsestr(metadata)
3036            return self._pkg_info
3037
3038    @property
3039    def _dep_map(self):
3040        try:
3041            return self.__dep_map
3042        except AttributeError:
3043            self.__dep_map = self._compute_dependencies()
3044            return self.__dep_map
3045
3046    def _compute_dependencies(self):
3047        """Recompute this distribution's dependencies."""
3048        dm = self.__dep_map = {None: []}
3049
3050        reqs = []
3051        # Including any condition expressions
3052        for req in self._parsed_pkg_info.get_all('Requires-Dist') or []:
3053            reqs.extend(parse_requirements(req))
3054
3055        def reqs_for_extra(extra):
3056            for req in reqs:
3057                if not req.marker or req.marker.evaluate({'extra': extra}):
3058                    yield req
3059
3060        common = frozenset(reqs_for_extra(None))
3061        dm[None].extend(common)
3062
3063        for extra in self._parsed_pkg_info.get_all('Provides-Extra') or []:
3064            s_extra = safe_extra(extra.strip())
3065            dm[s_extra] = list(frozenset(reqs_for_extra(extra)) - common)
3066
3067        return dm
3068
3069
3070_distributionImpl = {
3071    '.egg': Distribution,
3072    '.egg-info': EggInfoDistribution,
3073    '.dist-info': DistInfoDistribution,
3074}
3075
3076
3077def issue_warning(*args, **kw):
3078    level = 1
3079    g = globals()
3080    try:
3081        # find the first stack frame that is *not* code in
3082        # the pkg_resources module, to use for the warning
3083        while sys._getframe(level).f_globals is g:
3084            level += 1
3085    except ValueError:
3086        pass
3087    warnings.warn(stacklevel=level + 1, *args, **kw)
3088
3089
3090def parse_requirements(strs):
3091    """Yield ``Requirement`` objects for each specification in `strs`
3092
3093    `strs` must be a string, or a (possibly-nested) iterable thereof.
3094    """
3095    # create a steppable iterator, so we can handle \-continuations
3096    lines = iter(yield_lines(strs))
3097
3098    for line in lines:
3099        # Drop comments -- a hash without a space may be in a URL.
3100        if ' #' in line:
3101            line = line[:line.find(' #')]
3102        # If there is a line continuation, drop it, and append the next line.
3103        if line.endswith('\\'):
3104            line = line[:-2].strip()
3105            try:
3106                line += next(lines)
3107            except StopIteration:
3108                return
3109        yield Requirement(line)
3110
3111
3112class RequirementParseError(packaging.requirements.InvalidRequirement):
3113    "Compatibility wrapper for InvalidRequirement"
3114
3115
3116class Requirement(packaging.requirements.Requirement):
3117    def __init__(self, requirement_string):
3118        """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!"""
3119        super(Requirement, self).__init__(requirement_string)
3120        self.unsafe_name = self.name
3121        project_name = safe_name(self.name)
3122        self.project_name, self.key = project_name, project_name.lower()
3123        self.specs = [
3124            (spec.operator, spec.version) for spec in self.specifier]
3125        self.extras = tuple(map(safe_extra, self.extras))
3126        self.hashCmp = (
3127            self.key,
3128            self.url,
3129            self.specifier,
3130            frozenset(self.extras),
3131            str(self.marker) if self.marker else None,
3132        )
3133        self.__hash = hash(self.hashCmp)
3134
3135    def __eq__(self, other):
3136        return (
3137            isinstance(other, Requirement) and
3138            self.hashCmp == other.hashCmp
3139        )
3140
3141    def __ne__(self, other):
3142        return not self == other
3143
3144    def __contains__(self, item):
3145        if isinstance(item, Distribution):
3146            if item.key != self.key:
3147                return False
3148
3149            item = item.version
3150
3151        # Allow prereleases always in order to match the previous behavior of
3152        # this method. In the future this should be smarter and follow PEP 440
3153        # more accurately.
3154        return self.specifier.contains(item, prereleases=True)
3155
3156    def __hash__(self):
3157        return self.__hash
3158
3159    def __repr__(self):
3160        return "Requirement.parse(%r)" % str(self)
3161
3162    @staticmethod
3163    def parse(s):
3164        req, = parse_requirements(s)
3165        return req
3166
3167
3168def _always_object(classes):
3169    """
3170    Ensure object appears in the mro even
3171    for old-style classes.
3172    """
3173    if object not in classes:
3174        return classes + (object,)
3175    return classes
3176
3177
3178def _find_adapter(registry, ob):
3179    """Return an adapter factory for `ob` from `registry`"""
3180    types = _always_object(inspect.getmro(getattr(ob, '__class__', type(ob))))
3181    for t in types:
3182        if t in registry:
3183            return registry[t]
3184
3185
3186def ensure_directory(path):
3187    """Ensure that the parent directory of `path` exists"""
3188    dirname = os.path.dirname(path)
3189    os.makedirs(dirname, exist_ok=True)
3190
3191
3192def _bypass_ensure_directory(path):
3193    """Sandbox-bypassing version of ensure_directory()"""
3194    if not WRITE_SUPPORT:
3195        raise IOError('"os.mkdir" not supported on this platform.')
3196    dirname, filename = split(path)
3197    if dirname and filename and not isdir(dirname):
3198        _bypass_ensure_directory(dirname)
3199        try:
3200            mkdir(dirname, 0o755)
3201        except FileExistsError:
3202            pass
3203
3204
3205def split_sections(s):
3206    """Split a string or iterable thereof into (section, content) pairs
3207
3208    Each ``section`` is a stripped version of the section header ("[section]")
3209    and each ``content`` is a list of stripped lines excluding blank lines and
3210    comment-only lines.  If there are any such lines before the first section
3211    header, they're returned in a first ``section`` of ``None``.
3212    """
3213    section = None
3214    content = []
3215    for line in yield_lines(s):
3216        if line.startswith("["):
3217            if line.endswith("]"):
3218                if section or content:
3219                    yield section, content
3220                section = line[1:-1].strip()
3221                content = []
3222            else:
3223                raise ValueError("Invalid section heading", line)
3224        else:
3225            content.append(line)
3226
3227    # wrap up last segment
3228    yield section, content
3229
3230
3231def _mkstemp(*args, **kw):
3232    old_open = os.open
3233    try:
3234        # temporarily bypass sandboxing
3235        os.open = os_open
3236        return tempfile.mkstemp(*args, **kw)
3237    finally:
3238        # and then put it back
3239        os.open = old_open
3240
3241
3242# Silence the PEP440Warning by default, so that end users don't get hit by it
3243# randomly just because they use pkg_resources. We want to append the rule
3244# because we want earlier uses of filterwarnings to take precedence over this
3245# one.
3246warnings.filterwarnings("ignore", category=PEP440Warning, append=True)
3247
3248
3249# from jaraco.functools 1.3
3250def _call_aside(f, *args, **kwargs):
3251    f(*args, **kwargs)
3252    return f
3253
3254
3255@_call_aside
3256def _initialize(g=globals()):
3257    "Set up global resource manager (deliberately not state-saved)"
3258    manager = ResourceManager()
3259    g['_manager'] = manager
3260    g.update(
3261        (name, getattr(manager, name))
3262        for name in dir(manager)
3263        if not name.startswith('_')
3264    )
3265
3266
3267@_call_aside
3268def _initialize_master_working_set():
3269    """
3270    Prepare the master working set and make the ``require()``
3271    API available.
3272
3273    This function has explicit effects on the global state
3274    of pkg_resources. It is intended to be invoked once at
3275    the initialization of this module.
3276
3277    Invocation by other packages is unsupported and done
3278    at their own risk.
3279    """
3280    working_set = WorkingSet._build_master()
3281    _declare_state('object', working_set=working_set)
3282
3283    require = working_set.require
3284    iter_entry_points = working_set.iter_entry_points
3285    add_activation_listener = working_set.subscribe
3286    run_script = working_set.run_script
3287    # backward compatibility
3288    run_main = run_script
3289    # Activate all distributions already on sys.path with replace=False and
3290    # ensure that all distributions added to the working set in the future
3291    # (e.g. by calling ``require()``) will get activated as well,
3292    # with higher priority (replace=True).
3293    tuple(
3294        dist.activate(replace=False)
3295        for dist in working_set
3296    )
3297    add_activation_listener(
3298        lambda dist: dist.activate(replace=True),
3299        existing=False,
3300    )
3301    working_set.entries = []
3302    # match order
3303    list(map(working_set.add_entry, sys.path))
3304    globals().update(locals())
3305
3306
3307class PkgResourcesDeprecationWarning(Warning):
3308    """
3309    Base class for warning about deprecations in ``pkg_resources``
3310
3311    This class is not derived from ``DeprecationWarning``, and as such is
3312    visible by default.
3313    """
3314