1"""Package resource API
2--------------------
3
4A resource is a logical file contained within a package, or a logical
5subdirectory thereof.  The package resource API expects resource names
6to have their path parts separated with ``/``, *not* whatever the local
7path separator is.  Do not use os.path operations to manipulate resource
8names being passed into the API.
9
10The package resource API is designed to work with normal filesystem packages,
11.egg files, and unpacked .egg files.  It can also work in a limited way with
12.zip files and with custom PEP 302 loaders that support the ``get_data()``
13method.
14"""
15
16import sys
17import os
18import time
19import re
20import imp
21import zipfile
22import zipimport
23import warnings
24import stat
25import functools
26import pkgutil
27import token
28import symbol
29import operator
30import platform
31from pkgutil import get_importer
32
33try:
34    from urlparse import urlparse, urlunparse
35except ImportError:
36    from urllib.parse import urlparse, urlunparse
37
38try:
39    frozenset
40except NameError:
41    from sets import ImmutableSet as frozenset
42try:
43    basestring
44    next = lambda o: o.next()
45    from cStringIO import StringIO as BytesIO
46except NameError:
47    basestring = str
48    from io import BytesIO
49    def execfile(fn, globs=None, locs=None):
50        if globs is None:
51            globs = globals()
52        if locs is None:
53            locs = globs
54        exec(compile(open(fn).read(), fn, 'exec'), globs, locs)
55
56# capture these to bypass sandboxing
57from os import utime
58try:
59    from os import mkdir, rename, unlink
60    WRITE_SUPPORT = True
61except ImportError:
62    # no write support, probably under GAE
63    WRITE_SUPPORT = False
64
65from os import open as os_open
66from os.path import isdir, split
67
68# Avoid try/except due to potential problems with delayed import mechanisms.
69if sys.version_info.major >= 3 and sys.implementation.name == "cpython":
70    import importlib._bootstrap as importlib_bootstrap
71else:
72    importlib_bootstrap = None
73
74try:
75    import parser
76except ImportError:
77    pass
78
79def _bypass_ensure_directory(name, mode=0x1FF):  # 0777
80    # Sandbox-bypassing version of ensure_directory()
81    if not WRITE_SUPPORT:
82        raise IOError('"os.mkdir" not supported on this platform.')
83    dirname, filename = split(name)
84    if dirname and filename and not isdir(dirname):
85        _bypass_ensure_directory(dirname)
86        mkdir(dirname, mode)
87
88
89_state_vars = {}
90
91def _declare_state(vartype, **kw):
92    g = globals()
93    for name, val in kw.items():
94        g[name] = val
95        _state_vars[name] = vartype
96
97def __getstate__():
98    state = {}
99    g = globals()
100    for k, v in _state_vars.items():
101        state[k] = g['_sget_'+v](g[k])
102    return state
103
104def __setstate__(state):
105    g = globals()
106    for k, v in state.items():
107        g['_sset_'+_state_vars[k]](k, g[k], v)
108    return state
109
110def _sget_dict(val):
111    return val.copy()
112
113def _sset_dict(key, ob, state):
114    ob.clear()
115    ob.update(state)
116
117def _sget_object(val):
118    return val.__getstate__()
119
120def _sset_object(key, ob, state):
121    ob.__setstate__(state)
122
123_sget_none = _sset_none = lambda *args: None
124
125
126def get_supported_platform():
127    """Return this platform's maximum compatible version.
128
129    distutils.util.get_platform() normally reports the minimum version
130    of Mac OS X that would be required to *use* extensions produced by
131    distutils.  But what we want when checking compatibility is to know the
132    version of Mac OS X that we are *running*.  To allow usage of packages that
133    explicitly require a newer version of Mac OS X, we must also know the
134    current version of the OS.
135
136    If this condition occurs for any other platform with a version in its
137    platform strings, this function should be extended accordingly.
138    """
139    plat = get_build_platform()
140    m = macosVersionString.match(plat)
141    if m is not None and sys.platform == "darwin":
142        try:
143            plat = 'macosx-%s-%s' % ('.'.join(_macosx_vers()[:2]), m.group(3))
144        except ValueError:
145            pass    # not Mac OS X
146    return plat
147
148__all__ = [
149    # Basic resource access and distribution/entry point discovery
150    'require', 'run_script', 'get_provider',  'get_distribution',
151    'load_entry_point', 'get_entry_map', 'get_entry_info', 'iter_entry_points',
152    'resource_string', 'resource_stream', 'resource_filename',
153    'resource_listdir', 'resource_exists', 'resource_isdir',
154
155    # Environmental control
156    'declare_namespace', 'working_set', 'add_activation_listener',
157    'find_distributions', 'set_extraction_path', 'cleanup_resources',
158    'get_default_cache',
159
160    # Primary implementation classes
161    'Environment', 'WorkingSet', 'ResourceManager',
162    'Distribution', 'Requirement', 'EntryPoint',
163
164    # Exceptions
165    'ResolutionError','VersionConflict','DistributionNotFound','UnknownExtra',
166    'ExtractionError',
167
168    # Parsing functions and string utilities
169    'parse_requirements', 'parse_version', 'safe_name', 'safe_version',
170    'get_platform', 'compatible_platforms', 'yield_lines', 'split_sections',
171    'safe_extra', 'to_filename', 'invalid_marker', 'evaluate_marker',
172
173    # filesystem utilities
174    'ensure_directory', 'normalize_path',
175
176    # Distribution "precedence" constants
177    'EGG_DIST', 'BINARY_DIST', 'SOURCE_DIST', 'CHECKOUT_DIST', 'DEVELOP_DIST',
178
179    # "Provider" interfaces, implementations, and registration/lookup APIs
180    'IMetadataProvider', 'IResourceProvider', 'FileMetadata',
181    'PathMetadata', 'EggMetadata', 'EmptyProvider', 'empty_provider',
182    'NullProvider', 'EggProvider', 'DefaultProvider', 'ZipProvider',
183    'register_finder', 'register_namespace_handler', 'register_loader_type',
184    'fixup_namespace_packages', 'get_importer',
185
186    # Deprecated/backward compatibility only
187    'run_main', 'AvailableDistributions',
188]
189
190class ResolutionError(Exception):
191    """Abstract base for dependency resolution errors"""
192    def __repr__(self):
193        return self.__class__.__name__+repr(self.args)
194
195class VersionConflict(ResolutionError):
196    """An already-installed version conflicts with the requested version"""
197
198class DistributionNotFound(ResolutionError):
199    """A requested distribution was not found"""
200
201class UnknownExtra(ResolutionError):
202    """Distribution doesn't have an "extra feature" of the given name"""
203_provider_factories = {}
204
205PY_MAJOR = sys.version[:3]
206EGG_DIST = 3
207BINARY_DIST = 2
208SOURCE_DIST = 1
209CHECKOUT_DIST = 0
210DEVELOP_DIST = -1
211
212def register_loader_type(loader_type, provider_factory):
213    """Register `provider_factory` to make providers for `loader_type`
214
215    `loader_type` is the type or class of a PEP 302 ``module.__loader__``,
216    and `provider_factory` is a function that, passed a *module* object,
217    returns an ``IResourceProvider`` for that module.
218    """
219    _provider_factories[loader_type] = provider_factory
220
221def get_provider(moduleOrReq):
222    """Return an IResourceProvider for the named module or requirement"""
223    if isinstance(moduleOrReq,Requirement):
224        return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0]
225    try:
226        module = sys.modules[moduleOrReq]
227    except KeyError:
228        __import__(moduleOrReq)
229        module = sys.modules[moduleOrReq]
230    loader = getattr(module, '__loader__', None)
231    return _find_adapter(_provider_factories, loader)(module)
232
233def _macosx_vers(_cache=[]):
234    if not _cache:
235        import platform
236        version = platform.mac_ver()[0]
237        # fallback for MacPorts
238        if version == '':
239            import plistlib
240            plist = '/System/Library/CoreServices/SystemVersion.plist'
241            if os.path.exists(plist):
242                if hasattr(plistlib, 'readPlist'):
243                    plist_content = plistlib.readPlist(plist)
244                    if 'ProductVersion' in plist_content:
245                        version = plist_content['ProductVersion']
246
247        _cache.append(version.split('.'))
248    return _cache[0]
249
250def _macosx_arch(machine):
251    return {'PowerPC':'ppc', 'Power_Macintosh':'ppc'}.get(machine,machine)
252
253def get_build_platform():
254    """Return this platform's string for platform-specific distributions
255
256    XXX Currently this is the same as ``distutils.util.get_platform()``, but it
257    needs some hacks for Linux and Mac OS X.
258    """
259    from sysconfig import get_platform
260    plat = get_platform()
261    if sys.platform == "darwin" and not plat.startswith('macosx-'):
262        try:
263            version = _macosx_vers()
264            machine = os.uname()[4].replace(" ", "_")
265            return "macosx-%d.%d-%s" % (int(version[0]), int(version[1]),
266                _macosx_arch(machine))
267        except ValueError:
268            # if someone is running a non-Mac darwin system, this will fall
269            # through to the default implementation
270            pass
271    return plat
272
273macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)")
274darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)")
275get_platform = get_build_platform   # XXX backward compat
276
277
278def compatible_platforms(provided,required):
279    """Can code for the `provided` platform run on the `required` platform?
280
281    Returns true if either platform is ``None``, or the platforms are equal.
282
283    XXX Needs compatibility checks for Linux and other unixy OSes.
284    """
285    if provided is None or required is None or provided==required:
286        return True     # easy case
287
288    # Mac OS X special cases
289    reqMac = macosVersionString.match(required)
290    if reqMac:
291        provMac = macosVersionString.match(provided)
292
293        # is this a Mac package?
294        if not provMac:
295            # this is backwards compatibility for packages built before
296            # setuptools 0.6. All packages built after this point will
297            # use the new macosx designation.
298            provDarwin = darwinVersionString.match(provided)
299            if provDarwin:
300                dversion = int(provDarwin.group(1))
301                macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2))
302                if dversion == 7 and macosversion >= "10.3" or \
303                        dversion == 8 and macosversion >= "10.4":
304
305                    #import warnings
306                    #warnings.warn("Mac eggs should be rebuilt to "
307                    #    "use the macosx designation instead of darwin.",
308                    #    category=DeprecationWarning)
309                    return True
310            return False    # egg isn't macosx or legacy darwin
311
312        # are they the same major version and machine type?
313        if provMac.group(1) != reqMac.group(1) or \
314                provMac.group(3) != reqMac.group(3):
315            return False
316
317        # is the required OS major update >= the provided one?
318        if int(provMac.group(2)) > int(reqMac.group(2)):
319            return False
320
321        return True
322
323    # XXX Linux and other platforms' special cases should go here
324    return False
325
326
327def run_script(dist_spec, script_name):
328    """Locate distribution `dist_spec` and run its `script_name` script"""
329    ns = sys._getframe(1).f_globals
330    name = ns['__name__']
331    ns.clear()
332    ns['__name__'] = name
333    require(dist_spec)[0].run_script(script_name, ns)
334
335run_main = run_script   # backward compatibility
336
337def get_distribution(dist):
338    """Return a current distribution object for a Requirement or string"""
339    if isinstance(dist,basestring): dist = Requirement.parse(dist)
340    if isinstance(dist,Requirement): dist = get_provider(dist)
341    if not isinstance(dist,Distribution):
342        raise TypeError("Expected string, Requirement, or Distribution", dist)
343    return dist
344
345def load_entry_point(dist, group, name):
346    """Return `name` entry point of `group` for `dist` or raise ImportError"""
347    return get_distribution(dist).load_entry_point(group, name)
348
349def get_entry_map(dist, group=None):
350    """Return the entry point map for `group`, or the full entry map"""
351    return get_distribution(dist).get_entry_map(group)
352
353def get_entry_info(dist, group, name):
354    """Return the EntryPoint object for `group`+`name`, or ``None``"""
355    return get_distribution(dist).get_entry_info(group, name)
356
357
358class IMetadataProvider:
359
360    def has_metadata(name):
361        """Does the package's distribution contain the named metadata?"""
362
363    def get_metadata(name):
364        """The named metadata resource as a string"""
365
366    def get_metadata_lines(name):
367        """Yield named metadata resource as list of non-blank non-comment lines
368
369       Leading and trailing whitespace is stripped from each line, and lines
370       with ``#`` as the first non-blank character are omitted."""
371
372    def metadata_isdir(name):
373        """Is the named metadata a directory?  (like ``os.path.isdir()``)"""
374
375    def metadata_listdir(name):
376        """List of metadata names in the directory (like ``os.listdir()``)"""
377
378    def run_script(script_name, namespace):
379        """Execute the named script in the supplied namespace dictionary"""
380
381
382class IResourceProvider(IMetadataProvider):
383    """An object that provides access to package resources"""
384
385    def get_resource_filename(manager, resource_name):
386        """Return a true filesystem path for `resource_name`
387
388        `manager` must be an ``IResourceManager``"""
389
390    def get_resource_stream(manager, resource_name):
391        """Return a readable file-like object for `resource_name`
392
393        `manager` must be an ``IResourceManager``"""
394
395    def get_resource_string(manager, resource_name):
396        """Return a string containing the contents of `resource_name`
397
398        `manager` must be an ``IResourceManager``"""
399
400    def has_resource(resource_name):
401        """Does the package contain the named resource?"""
402
403    def resource_isdir(resource_name):
404        """Is the named resource a directory?  (like ``os.path.isdir()``)"""
405
406    def resource_listdir(resource_name):
407        """List of resource names in the directory (like ``os.listdir()``)"""
408
409
410class WorkingSet(object):
411    """A collection of active distributions on sys.path (or a similar list)"""
412
413    def __init__(self, entries=None):
414        """Create working set from list of path entries (default=sys.path)"""
415        self.entries = []
416        self.entry_keys = {}
417        self.by_key = {}
418        self.callbacks = []
419
420        if entries is None:
421            entries = sys.path
422
423        for entry in entries:
424            self.add_entry(entry)
425
426    def add_entry(self, entry):
427        """Add a path item to ``.entries``, finding any distributions on it
428
429        ``find_distributions(entry, True)`` is used to find distributions
430        corresponding to the path entry, and they are added.  `entry` is
431        always appended to ``.entries``, even if it is already present.
432        (This is because ``sys.path`` can contain the same value more than
433        once, and the ``.entries`` of the ``sys.path`` WorkingSet should always
434        equal ``sys.path``.)
435        """
436        self.entry_keys.setdefault(entry, [])
437        self.entries.append(entry)
438        for dist in find_distributions(entry, True):
439            self.add(dist, entry, False)
440
441    def __contains__(self,dist):
442        """True if `dist` is the active distribution for its project"""
443        return self.by_key.get(dist.key) == dist
444
445    def find(self, req):
446        """Find a distribution matching requirement `req`
447
448        If there is an active distribution for the requested project, this
449        returns it as long as it meets the version requirement specified by
450        `req`.  But, if there is an active distribution for the project and it
451        does *not* meet the `req` requirement, ``VersionConflict`` is raised.
452        If there is no active distribution for the requested project, ``None``
453        is returned.
454        """
455        dist = self.by_key.get(req.key)
456        if dist is not None and dist not in req:
457            raise VersionConflict(dist,req)     # XXX add more info
458        else:
459            return dist
460
461    def iter_entry_points(self, group, name=None):
462        """Yield entry point objects from `group` matching `name`
463
464        If `name` is None, yields all entry points in `group` from all
465        distributions in the working set, otherwise only ones matching
466        both `group` and `name` are yielded (in distribution order).
467        """
468        for dist in self:
469            entries = dist.get_entry_map(group)
470            if name is None:
471                for ep in entries.values():
472                    yield ep
473            elif name in entries:
474                yield entries[name]
475
476    def run_script(self, requires, script_name):
477        """Locate distribution for `requires` and run `script_name` script"""
478        ns = sys._getframe(1).f_globals
479        name = ns['__name__']
480        ns.clear()
481        ns['__name__'] = name
482        self.require(requires)[0].run_script(script_name, ns)
483
484    def __iter__(self):
485        """Yield distributions for non-duplicate projects in the working set
486
487        The yield order is the order in which the items' path entries were
488        added to the working set.
489        """
490        seen = {}
491        for item in self.entries:
492            if item not in self.entry_keys:
493                # workaround a cache issue
494                continue
495
496            for key in self.entry_keys[item]:
497                if key not in seen:
498                    seen[key]=1
499                    yield self.by_key[key]
500
501    def add(self, dist, entry=None, insert=True):
502        """Add `dist` to working set, associated with `entry`
503
504        If `entry` is unspecified, it defaults to the ``.location`` of `dist`.
505        On exit from this routine, `entry` is added to the end of the working
506        set's ``.entries`` (if it wasn't already present).
507
508        `dist` is only added to the working set if it's for a project that
509        doesn't already have a distribution in the set.  If it's added, any
510        callbacks registered with the ``subscribe()`` method will be called.
511        """
512        if insert:
513            dist.insert_on(self.entries, entry)
514
515        if entry is None:
516            entry = dist.location
517        keys = self.entry_keys.setdefault(entry,[])
518        keys2 = self.entry_keys.setdefault(dist.location,[])
519        if dist.key in self.by_key:
520            return      # ignore hidden distros
521
522        self.by_key[dist.key] = dist
523        if dist.key not in keys:
524            keys.append(dist.key)
525        if dist.key not in keys2:
526            keys2.append(dist.key)
527        self._added_new(dist)
528
529    def resolve(self, requirements, env=None, installer=None):
530        """List all distributions needed to (recursively) meet `requirements`
531
532        `requirements` must be a sequence of ``Requirement`` objects.  `env`,
533        if supplied, should be an ``Environment`` instance.  If
534        not supplied, it defaults to all distributions available within any
535        entry or distribution in the working set.  `installer`, if supplied,
536        will be invoked with each requirement that cannot be met by an
537        already-installed distribution; it should return a ``Distribution`` or
538        ``None``.
539        """
540
541        requirements = list(requirements)[::-1]  # set up the stack
542        processed = {}  # set of processed requirements
543        best = {}  # key -> dist
544        to_activate = []
545
546        while requirements:
547            req = requirements.pop(0)   # process dependencies breadth-first
548            if req in processed:
549                # Ignore cyclic or redundant dependencies
550                continue
551            dist = best.get(req.key)
552            if dist is None:
553                # Find the best distribution and add it to the map
554                dist = self.by_key.get(req.key)
555                if dist is None:
556                    if env is None:
557                        env = Environment(self.entries)
558                    dist = best[req.key] = env.best_match(req, self, installer)
559                    if dist is None:
560                        #msg = ("The '%s' distribution was not found on this "
561                        #       "system, and is required by this application.")
562                        #raise DistributionNotFound(msg % req)
563
564                        # unfortunately, zc.buildout uses a str(err)
565                        # to get the name of the distribution here..
566                        raise DistributionNotFound(req)
567                to_activate.append(dist)
568            if dist not in req:
569                # Oops, the "best" so far conflicts with a dependency
570                raise VersionConflict(dist,req) # XXX put more info here
571            requirements.extend(dist.requires(req.extras)[::-1])
572            processed[req] = True
573
574        return to_activate    # return list of distros to activate
575
576    def find_plugins(self, plugin_env, full_env=None, installer=None,
577            fallback=True):
578        """Find all activatable distributions in `plugin_env`
579
580        Example usage::
581
582            distributions, errors = working_set.find_plugins(
583                Environment(plugin_dirlist)
584            )
585            map(working_set.add, distributions)  # add plugins+libs to sys.path
586            print 'Could not load', errors        # display errors
587
588        The `plugin_env` should be an ``Environment`` instance that contains
589        only distributions that are in the project's "plugin directory" or
590        directories. The `full_env`, if supplied, should be an ``Environment``
591        contains all currently-available distributions.  If `full_env` is not
592        supplied, one is created automatically from the ``WorkingSet`` this
593        method is called on, which will typically mean that every directory on
594        ``sys.path`` will be scanned for distributions.
595
596        `installer` is a standard installer callback as used by the
597        ``resolve()`` method. The `fallback` flag indicates whether we should
598        attempt to resolve older versions of a plugin if the newest version
599        cannot be resolved.
600
601        This method returns a 2-tuple: (`distributions`, `error_info`), where
602        `distributions` is a list of the distributions found in `plugin_env`
603        that were loadable, along with any other distributions that are needed
604        to resolve their dependencies.  `error_info` is a dictionary mapping
605        unloadable plugin distributions to an exception instance describing the
606        error that occurred. Usually this will be a ``DistributionNotFound`` or
607        ``VersionConflict`` instance.
608        """
609
610        plugin_projects = list(plugin_env)
611        plugin_projects.sort()  # scan project names in alphabetic order
612
613        error_info = {}
614        distributions = {}
615
616        if full_env is None:
617            env = Environment(self.entries)
618            env += plugin_env
619        else:
620            env = full_env + plugin_env
621
622        shadow_set = self.__class__([])
623        list(map(shadow_set.add, self))   # put all our entries in shadow_set
624
625        for project_name in plugin_projects:
626
627            for dist in plugin_env[project_name]:
628
629                req = [dist.as_requirement()]
630
631                try:
632                    resolvees = shadow_set.resolve(req, env, installer)
633
634                except ResolutionError:
635                    v = sys.exc_info()[1]
636                    error_info[dist] = v    # save error info
637                    if fallback:
638                        continue    # try the next older version of project
639                    else:
640                        break       # give up on this project, keep going
641
642                else:
643                    list(map(shadow_set.add, resolvees))
644                    distributions.update(dict.fromkeys(resolvees))
645
646                    # success, no need to try any more versions of this project
647                    break
648
649        distributions = list(distributions)
650        distributions.sort()
651
652        return distributions, error_info
653
654    def require(self, *requirements):
655        """Ensure that distributions matching `requirements` are activated
656
657        `requirements` must be a string or a (possibly-nested) sequence
658        thereof, specifying the distributions and versions required.  The
659        return value is a sequence of the distributions that needed to be
660        activated to fulfill the requirements; all relevant distributions are
661        included, even if they were already activated in this working set.
662        """
663        needed = self.resolve(parse_requirements(requirements))
664
665        for dist in needed:
666            self.add(dist)
667
668        return needed
669
670    def subscribe(self, callback):
671        """Invoke `callback` for all distributions (including existing ones)"""
672        if callback in self.callbacks:
673            return
674        self.callbacks.append(callback)
675        for dist in self:
676            callback(dist)
677
678    def _added_new(self, dist):
679        for callback in self.callbacks:
680            callback(dist)
681
682    def __getstate__(self):
683        return (
684            self.entries[:], self.entry_keys.copy(), self.by_key.copy(),
685            self.callbacks[:]
686        )
687
688    def __setstate__(self, e_k_b_c):
689        entries, keys, by_key, callbacks = e_k_b_c
690        self.entries = entries[:]
691        self.entry_keys = keys.copy()
692        self.by_key = by_key.copy()
693        self.callbacks = callbacks[:]
694
695
696class Environment(object):
697    """Searchable snapshot of distributions on a search path"""
698
699    def __init__(self, search_path=None, platform=get_supported_platform(), python=PY_MAJOR):
700        """Snapshot distributions available on a search path
701
702        Any distributions found on `search_path` are added to the environment.
703        `search_path` should be a sequence of ``sys.path`` items.  If not
704        supplied, ``sys.path`` is used.
705
706        `platform` is an optional string specifying the name of the platform
707        that platform-specific distributions must be compatible with.  If
708        unspecified, it defaults to the current platform.  `python` is an
709        optional string naming the desired version of Python (e.g. ``'3.3'``);
710        it defaults to the current version.
711
712        You may explicitly set `platform` (and/or `python`) to ``None`` if you
713        wish to map *all* distributions, not just those compatible with the
714        running platform or Python version.
715        """
716        self._distmap = {}
717        self._cache = {}
718        self.platform = platform
719        self.python = python
720        self.scan(search_path)
721
722    def can_add(self, dist):
723        """Is distribution `dist` acceptable for this environment?
724
725        The distribution must match the platform and python version
726        requirements specified when this environment was created, or False
727        is returned.
728        """
729        return (self.python is None or dist.py_version is None
730            or dist.py_version==self.python) \
731            and compatible_platforms(dist.platform,self.platform)
732
733    def remove(self, dist):
734        """Remove `dist` from the environment"""
735        self._distmap[dist.key].remove(dist)
736
737    def scan(self, search_path=None):
738        """Scan `search_path` for distributions usable in this environment
739
740        Any distributions found are added to the environment.
741        `search_path` should be a sequence of ``sys.path`` items.  If not
742        supplied, ``sys.path`` is used.  Only distributions conforming to
743        the platform/python version defined at initialization are added.
744        """
745        if search_path is None:
746            search_path = sys.path
747
748        for item in search_path:
749            for dist in find_distributions(item):
750                self.add(dist)
751
752    def __getitem__(self,project_name):
753        """Return a newest-to-oldest list of distributions for `project_name`
754        """
755        try:
756            return self._cache[project_name]
757        except KeyError:
758            project_name = project_name.lower()
759            if project_name not in self._distmap:
760                return []
761
762        if project_name not in self._cache:
763            dists = self._cache[project_name] = self._distmap[project_name]
764            _sort_dists(dists)
765
766        return self._cache[project_name]
767
768    def add(self,dist):
769        """Add `dist` if we ``can_add()`` it and it isn't already added"""
770        if self.can_add(dist) and dist.has_version():
771            dists = self._distmap.setdefault(dist.key,[])
772            if dist not in dists:
773                dists.append(dist)
774                if dist.key in self._cache:
775                    _sort_dists(self._cache[dist.key])
776
777    def best_match(self, req, working_set, installer=None):
778        """Find distribution best matching `req` and usable on `working_set`
779
780        This calls the ``find(req)`` method of the `working_set` to see if a
781        suitable distribution is already active.  (This may raise
782        ``VersionConflict`` if an unsuitable version of the project is already
783        active in the specified `working_set`.)  If a suitable distribution
784        isn't active, this method returns the newest distribution in the
785        environment that meets the ``Requirement`` in `req`.  If no suitable
786        distribution is found, and `installer` is supplied, then the result of
787        calling the environment's ``obtain(req, installer)`` method will be
788        returned.
789        """
790        dist = working_set.find(req)
791        if dist is not None:
792            return dist
793        for dist in self[req.key]:
794            if dist in req:
795                return dist
796        return self.obtain(req, installer) # try and download/install
797
798    def obtain(self, requirement, installer=None):
799        """Obtain a distribution matching `requirement` (e.g. via download)
800
801        Obtain a distro that matches requirement (e.g. via download).  In the
802        base ``Environment`` class, this routine just returns
803        ``installer(requirement)``, unless `installer` is None, in which case
804        None is returned instead.  This method is a hook that allows subclasses
805        to attempt other ways of obtaining a distribution before falling back
806        to the `installer` argument."""
807        if installer is not None:
808            return installer(requirement)
809
810    def __iter__(self):
811        """Yield the unique project names of the available distributions"""
812        for key in self._distmap.keys():
813            if self[key]: yield key
814
815    def __iadd__(self, other):
816        """In-place addition of a distribution or environment"""
817        if isinstance(other,Distribution):
818            self.add(other)
819        elif isinstance(other,Environment):
820            for project in other:
821                for dist in other[project]:
822                    self.add(dist)
823        else:
824            raise TypeError("Can't add %r to environment" % (other,))
825        return self
826
827    def __add__(self, other):
828        """Add an environment or distribution to an environment"""
829        new = self.__class__([], platform=None, python=None)
830        for env in self, other:
831            new += env
832        return new
833
834
835AvailableDistributions = Environment    # XXX backward compatibility
836
837
838class ExtractionError(RuntimeError):
839    """An error occurred extracting a resource
840
841    The following attributes are available from instances of this exception:
842
843    manager
844        The resource manager that raised this exception
845
846    cache_path
847        The base directory for resource extraction
848
849    original_error
850        The exception instance that caused extraction to fail
851    """
852
853
854class ResourceManager:
855    """Manage resource extraction and packages"""
856    extraction_path = None
857
858    def __init__(self):
859        self.cached_files = {}
860
861    def resource_exists(self, package_or_requirement, resource_name):
862        """Does the named resource exist?"""
863        return get_provider(package_or_requirement).has_resource(resource_name)
864
865    def resource_isdir(self, package_or_requirement, resource_name):
866        """Is the named resource an existing directory?"""
867        return get_provider(package_or_requirement).resource_isdir(
868            resource_name
869        )
870
871    def resource_filename(self, package_or_requirement, resource_name):
872        """Return a true filesystem path for specified resource"""
873        return get_provider(package_or_requirement).get_resource_filename(
874            self, resource_name
875        )
876
877    def resource_stream(self, package_or_requirement, resource_name):
878        """Return a readable file-like object for specified resource"""
879        return get_provider(package_or_requirement).get_resource_stream(
880            self, resource_name
881        )
882
883    def resource_string(self, package_or_requirement, resource_name):
884        """Return specified resource as a string"""
885        return get_provider(package_or_requirement).get_resource_string(
886            self, resource_name
887        )
888
889    def resource_listdir(self, package_or_requirement, resource_name):
890        """List the contents of the named resource directory"""
891        return get_provider(package_or_requirement).resource_listdir(
892            resource_name
893        )
894
895    def extraction_error(self):
896        """Give an error message for problems extracting file(s)"""
897
898        old_exc = sys.exc_info()[1]
899        cache_path = self.extraction_path or get_default_cache()
900
901        err = ExtractionError("""Can't extract file(s) to egg cache
902
903The following error occurred while trying to extract file(s) to the Python egg
904cache:
905
906  %s
907
908The Python egg cache directory is currently set to:
909
910  %s
911
912Perhaps your account does not have write access to this directory?  You can
913change the cache directory by setting the PYTHON_EGG_CACHE environment
914variable to point to an accessible directory.
915""" % (old_exc, cache_path)
916        )
917        err.manager = self
918        err.cache_path = cache_path
919        err.original_error = old_exc
920        raise err
921
922    def get_cache_path(self, archive_name, names=()):
923        """Return absolute location in cache for `archive_name` and `names`
924
925        The parent directory of the resulting path will be created if it does
926        not already exist.  `archive_name` should be the base filename of the
927        enclosing egg (which may not be the name of the enclosing zipfile!),
928        including its ".egg" extension.  `names`, if provided, should be a
929        sequence of path name parts "under" the egg's extraction location.
930
931        This method should only be called by resource providers that need to
932        obtain an extraction location, and only for names they intend to
933        extract, as it tracks the generated names for possible cleanup later.
934        """
935        extract_path = self.extraction_path or get_default_cache()
936        target_path = os.path.join(extract_path, archive_name+'-tmp', *names)
937        try:
938            _bypass_ensure_directory(target_path)
939        except:
940            self.extraction_error()
941
942        self._warn_unsafe_extraction_path(extract_path)
943
944        self.cached_files[target_path] = 1
945        return target_path
946
947    @staticmethod
948    def _warn_unsafe_extraction_path(path):
949        """
950        If the default extraction path is overridden and set to an insecure
951        location, such as /tmp, it opens up an opportunity for an attacker to
952        replace an extracted file with an unauthorized payload. Warn the user
953        if a known insecure location is used.
954
955        See Distribute #375 for more details.
956        """
957        if os.name == 'nt' and not path.startswith(os.environ['windir']):
958            # On Windows, permissions are generally restrictive by default
959            #  and temp directories are not writable by other users, so
960            #  bypass the warning.
961            return
962        mode = os.stat(path).st_mode
963        if mode & stat.S_IWOTH or mode & stat.S_IWGRP:
964            msg = ("%s is writable by group/others and vulnerable to attack "
965                "when "
966                "used with get_resource_filename. Consider a more secure "
967                "location (set with .set_extraction_path or the "
968                "PYTHON_EGG_CACHE environment variable)." % path)
969            warnings.warn(msg, UserWarning)
970
971    def postprocess(self, tempname, filename):
972        """Perform any platform-specific postprocessing of `tempname`
973
974        This is where Mac header rewrites should be done; other platforms don't
975        have anything special they should do.
976
977        Resource providers should call this method ONLY after successfully
978        extracting a compressed resource.  They must NOT call it on resources
979        that are already in the filesystem.
980
981        `tempname` is the current (temporary) name of the file, and `filename`
982        is the name it will be renamed to by the caller after this routine
983        returns.
984        """
985
986        if os.name == 'posix':
987            # Make the resource executable
988            mode = ((os.stat(tempname).st_mode) | 0x16D) & 0xFFF # 0555, 07777
989            os.chmod(tempname, mode)
990
991    def set_extraction_path(self, path):
992        """Set the base path where resources will be extracted to, if needed.
993
994        If you do not call this routine before any extractions take place, the
995        path defaults to the return value of ``get_default_cache()``.  (Which
996        is based on the ``PYTHON_EGG_CACHE`` environment variable, with various
997        platform-specific fallbacks.  See that routine's documentation for more
998        details.)
999
1000        Resources are extracted to subdirectories of this path based upon
1001        information given by the ``IResourceProvider``.  You may set this to a
1002        temporary directory, but then you must call ``cleanup_resources()`` to
1003        delete the extracted files when done.  There is no guarantee that
1004        ``cleanup_resources()`` will be able to remove all extracted files.
1005
1006        (Note: you may not change the extraction path for a given resource
1007        manager once resources have been extracted, unless you first call
1008        ``cleanup_resources()``.)
1009        """
1010        if self.cached_files:
1011            raise ValueError(
1012                "Can't change extraction path, files already extracted"
1013            )
1014
1015        self.extraction_path = path
1016
1017    def cleanup_resources(self, force=False):
1018        """
1019        Delete all extracted resource files and directories, returning a list
1020        of the file and directory names that could not be successfully removed.
1021        This function does not have any concurrency protection, so it should
1022        generally only be called when the extraction path is a temporary
1023        directory exclusive to a single process.  This method is not
1024        automatically called; you must call it explicitly or register it as an
1025        ``atexit`` function if you wish to ensure cleanup of a temporary
1026        directory used for extractions.
1027        """
1028        # XXX
1029
1030def get_default_cache():
1031    """Determine the default cache location
1032
1033    This returns the ``PYTHON_EGG_CACHE`` environment variable, if set.
1034    Otherwise, on Windows, it returns a "Python-Eggs" subdirectory of the
1035    "Application Data" directory.  On all other systems, it's "~/.python-eggs".
1036    """
1037    try:
1038        return os.environ['PYTHON_EGG_CACHE']
1039    except KeyError:
1040        pass
1041
1042    if os.name!='nt':
1043        return os.path.expanduser('~/.python-eggs')
1044
1045    app_data = 'Application Data'   # XXX this may be locale-specific!
1046    app_homes = [
1047        (('APPDATA',), None),       # best option, should be locale-safe
1048        (('USERPROFILE',), app_data),
1049        (('HOMEDRIVE','HOMEPATH'), app_data),
1050        (('HOMEPATH',), app_data),
1051        (('HOME',), None),
1052        (('WINDIR',), app_data),    # 95/98/ME
1053    ]
1054
1055    for keys, subdir in app_homes:
1056        dirname = ''
1057        for key in keys:
1058            if key in os.environ:
1059                dirname = os.path.join(dirname, os.environ[key])
1060            else:
1061                break
1062        else:
1063            if subdir:
1064                dirname = os.path.join(dirname,subdir)
1065            return os.path.join(dirname, 'Python-Eggs')
1066    else:
1067        raise RuntimeError(
1068            "Please set the PYTHON_EGG_CACHE enviroment variable"
1069        )
1070
1071def safe_name(name):
1072    """Convert an arbitrary string to a standard distribution name
1073
1074    Any runs of non-alphanumeric/. characters are replaced with a single '-'.
1075    """
1076    return re.sub('[^A-Za-z0-9.]+', '-', name)
1077
1078
1079def safe_version(version):
1080    """Convert an arbitrary string to a standard version string
1081
1082    Spaces become dots, and all other non-alphanumeric characters become
1083    dashes, with runs of multiple dashes condensed to a single dash.
1084    """
1085    version = version.replace(' ','.')
1086    return re.sub('[^A-Za-z0-9.]+', '-', version)
1087
1088
1089def safe_extra(extra):
1090    """Convert an arbitrary string to a standard 'extra' name
1091
1092    Any runs of non-alphanumeric characters are replaced with a single '_',
1093    and the result is always lowercased.
1094    """
1095    return re.sub('[^A-Za-z0-9.]+', '_', extra).lower()
1096
1097
1098def to_filename(name):
1099    """Convert a project or version name to its filename-escaped form
1100
1101    Any '-' characters are currently replaced with '_'.
1102    """
1103    return name.replace('-','_')
1104
1105
1106class MarkerEvaluation(object):
1107    values = {
1108        'os_name': lambda: os.name,
1109        'sys_platform': lambda: sys.platform,
1110        'python_full_version': lambda: sys.version.split()[0],
1111        'python_version': lambda:'%s.%s' % (sys.version_info[0], sys.version_info[1]),
1112        'platform_version': platform.version,
1113        'platform_machine': platform.machine,
1114        'python_implementation': platform.python_implementation,
1115    }
1116
1117    @classmethod
1118    def is_invalid_marker(cls, text):
1119        """
1120        Validate text as a PEP 426 environment marker; return an exception
1121        if invalid or False otherwise.
1122        """
1123        try:
1124            cls.evaluate_marker(text)
1125        except SyntaxError:
1126            return cls.normalize_exception(sys.exc_info()[1])
1127        return False
1128
1129    @staticmethod
1130    def normalize_exception(exc):
1131        """
1132        Given a SyntaxError from a marker evaluation, normalize the error message:
1133         - Remove indications of filename and line number.
1134         - Replace platform-specific error messages with standard error messages.
1135        """
1136        subs = {
1137            'unexpected EOF while parsing': 'invalid syntax',
1138            'parenthesis is never closed': 'invalid syntax',
1139        }
1140        exc.filename = None
1141        exc.lineno = None
1142        exc.msg = subs.get(exc.msg, exc.msg)
1143        return exc
1144
1145    @classmethod
1146    def and_test(cls, nodelist):
1147        # MUST NOT short-circuit evaluation, or invalid syntax can be skipped!
1148        return functools.reduce(operator.and_, [cls.interpret(nodelist[i]) for i in range(1,len(nodelist),2)])
1149
1150    @classmethod
1151    def test(cls, nodelist):
1152        # MUST NOT short-circuit evaluation, or invalid syntax can be skipped!
1153        return functools.reduce(operator.or_, [cls.interpret(nodelist[i]) for i in range(1,len(nodelist),2)])
1154
1155    @classmethod
1156    def atom(cls, nodelist):
1157        t = nodelist[1][0]
1158        if t == token.LPAR:
1159            if nodelist[2][0] == token.RPAR:
1160                raise SyntaxError("Empty parentheses")
1161            return cls.interpret(nodelist[2])
1162        raise SyntaxError("Language feature not supported in environment markers")
1163
1164    @classmethod
1165    def comparison(cls, nodelist):
1166        if len(nodelist)>4:
1167            raise SyntaxError("Chained comparison not allowed in environment markers")
1168        comp = nodelist[2][1]
1169        cop = comp[1]
1170        if comp[0] == token.NAME:
1171            if len(nodelist[2]) == 3:
1172                if cop == 'not':
1173                    cop = 'not in'
1174                else:
1175                    cop = 'is not'
1176        try:
1177            cop = cls.get_op(cop)
1178        except KeyError:
1179            raise SyntaxError(repr(cop)+" operator not allowed in environment markers")
1180        return cop(cls.evaluate(nodelist[1]), cls.evaluate(nodelist[3]))
1181
1182    @classmethod
1183    def get_op(cls, op):
1184        ops = {
1185            symbol.test: cls.test,
1186            symbol.and_test: cls.and_test,
1187            symbol.atom: cls.atom,
1188            symbol.comparison: cls.comparison,
1189            'not in': lambda x, y: x not in y,
1190            'in': lambda x, y: x in y,
1191            '==': operator.eq,
1192            '!=': operator.ne,
1193        }
1194        if hasattr(symbol, 'or_test'):
1195            ops[symbol.or_test] = cls.test
1196        return ops[op]
1197
1198    @classmethod
1199    def evaluate_marker(cls, text, extra=None):
1200        """
1201        Evaluate a PEP 426 environment marker on CPython 2.4+.
1202        Return a boolean indicating the marker result in this environment.
1203        Raise SyntaxError if marker is invalid.
1204
1205        This implementation uses the 'parser' module, which is not implemented on
1206        Jython and has been superseded by the 'ast' module in Python 2.6 and
1207        later.
1208        """
1209        return cls.interpret(parser.expr(text).totuple(1)[1])
1210
1211    @classmethod
1212    def _markerlib_evaluate(cls, text):
1213        """
1214        Evaluate a PEP 426 environment marker using markerlib.
1215        Return a boolean indicating the marker result in this environment.
1216        Raise SyntaxError if marker is invalid.
1217        """
1218        import _markerlib
1219        # markerlib implements Metadata 1.2 (PEP 345) environment markers.
1220        # Translate the variables to Metadata 2.0 (PEP 426).
1221        env = _markerlib.default_environment()
1222        for key in env.keys():
1223            new_key = key.replace('.', '_')
1224            env[new_key] = env.pop(key)
1225        try:
1226            result = _markerlib.interpret(text, env)
1227        except NameError:
1228            e = sys.exc_info()[1]
1229            raise SyntaxError(e.args[0])
1230        return result
1231
1232    if 'parser' not in globals():
1233        # Fall back to less-complete _markerlib implementation if 'parser' module
1234        # is not available.
1235        evaluate_marker = _markerlib_evaluate
1236
1237    @classmethod
1238    def interpret(cls, nodelist):
1239        while len(nodelist)==2: nodelist = nodelist[1]
1240        try:
1241            op = cls.get_op(nodelist[0])
1242        except KeyError:
1243            raise SyntaxError("Comparison or logical expression expected")
1244        return op(nodelist)
1245
1246    @classmethod
1247    def evaluate(cls, nodelist):
1248        while len(nodelist)==2: nodelist = nodelist[1]
1249        kind = nodelist[0]
1250        name = nodelist[1]
1251        if kind==token.NAME:
1252            try:
1253                op = cls.values[name]
1254            except KeyError:
1255                raise SyntaxError("Unknown name %r" % name)
1256            return op()
1257        if kind==token.STRING:
1258            s = nodelist[1]
1259            if s[:1] not in "'\"" or s.startswith('"""') or s.startswith("'''") \
1260                    or '\\' in s:
1261                raise SyntaxError(
1262                    "Only plain strings allowed in environment markers")
1263            return s[1:-1]
1264        raise SyntaxError("Language feature not supported in environment markers")
1265
1266invalid_marker = MarkerEvaluation.is_invalid_marker
1267evaluate_marker = MarkerEvaluation.evaluate_marker
1268
1269class NullProvider:
1270    """Try to implement resources and metadata for arbitrary PEP 302 loaders"""
1271
1272    egg_name = None
1273    egg_info = None
1274    loader = None
1275
1276    def __init__(self, module):
1277        self.loader = getattr(module, '__loader__', None)
1278        self.module_path = os.path.dirname(getattr(module, '__file__', ''))
1279
1280    def get_resource_filename(self, manager, resource_name):
1281        return self._fn(self.module_path, resource_name)
1282
1283    def get_resource_stream(self, manager, resource_name):
1284        return BytesIO(self.get_resource_string(manager, resource_name))
1285
1286    def get_resource_string(self, manager, resource_name):
1287        return self._get(self._fn(self.module_path, resource_name))
1288
1289    def has_resource(self, resource_name):
1290        return self._has(self._fn(self.module_path, resource_name))
1291
1292    def has_metadata(self, name):
1293        return self.egg_info and self._has(self._fn(self.egg_info,name))
1294
1295    if sys.version_info.major == 2:
1296        def get_metadata(self, name):
1297            if not self.egg_info:
1298                return ""
1299            return self._get(self._fn(self.egg_info,name))
1300    else:
1301        def get_metadata(self, name):
1302            if not self.egg_info:
1303                return ""
1304            return self._get(self._fn(self.egg_info,name)).decode("utf-8")
1305
1306    def get_metadata_lines(self, name):
1307        return yield_lines(self.get_metadata(name))
1308
1309    def resource_isdir(self,resource_name):
1310        return self._isdir(self._fn(self.module_path, resource_name))
1311
1312    def metadata_isdir(self,name):
1313        return self.egg_info and self._isdir(self._fn(self.egg_info,name))
1314
1315    def resource_listdir(self,resource_name):
1316        return self._listdir(self._fn(self.module_path,resource_name))
1317
1318    def metadata_listdir(self,name):
1319        if self.egg_info:
1320            return self._listdir(self._fn(self.egg_info,name))
1321        return []
1322
1323    def run_script(self,script_name,namespace):
1324        script = 'scripts/'+script_name
1325        if not self.has_metadata(script):
1326            raise ResolutionError("No script named %r" % script_name)
1327        script_text = self.get_metadata(script).replace('\r\n','\n')
1328        script_text = script_text.replace('\r','\n')
1329        script_filename = self._fn(self.egg_info,script)
1330        namespace['__file__'] = script_filename
1331        if os.path.exists(script_filename):
1332            execfile(script_filename, namespace, namespace)
1333        else:
1334            from linecache import cache
1335            cache[script_filename] = (
1336                len(script_text), 0, script_text.split('\n'), script_filename
1337            )
1338            script_code = compile(script_text,script_filename,'exec')
1339            exec(script_code, namespace, namespace)
1340
1341    def _has(self, path):
1342        raise NotImplementedError(
1343            "Can't perform this operation for unregistered loader type"
1344        )
1345
1346    def _isdir(self, path):
1347        raise NotImplementedError(
1348            "Can't perform this operation for unregistered loader type"
1349        )
1350
1351    def _listdir(self, path):
1352        raise NotImplementedError(
1353            "Can't perform this operation for unregistered loader type"
1354        )
1355
1356    def _fn(self, base, resource_name):
1357        if resource_name:
1358            return os.path.join(base, *resource_name.split('/'))
1359        return base
1360
1361    def _get(self, path):
1362        if hasattr(self.loader, 'get_data'):
1363            return self.loader.get_data(path)
1364        raise NotImplementedError(
1365            "Can't perform this operation for loaders without 'get_data()'"
1366        )
1367
1368register_loader_type(object, NullProvider)
1369
1370
1371class EggProvider(NullProvider):
1372    """Provider based on a virtual filesystem"""
1373
1374    def __init__(self,module):
1375        NullProvider.__init__(self,module)
1376        self._setup_prefix()
1377
1378    def _setup_prefix(self):
1379        # we assume here that our metadata may be nested inside a "basket"
1380        # of multiple eggs; that's why we use module_path instead of .archive
1381        path = self.module_path
1382        old = None
1383        while path!=old:
1384            if path.lower().endswith('.egg'):
1385                self.egg_name = os.path.basename(path)
1386                self.egg_info = os.path.join(path, 'EGG-INFO')
1387                self.egg_root = path
1388                break
1389            old = path
1390            path, base = os.path.split(path)
1391
1392class DefaultProvider(EggProvider):
1393    """Provides access to package resources in the filesystem"""
1394
1395    def _has(self, path):
1396        return os.path.exists(path)
1397
1398    def _isdir(self,path):
1399        return os.path.isdir(path)
1400
1401    def _listdir(self,path):
1402        return os.listdir(path)
1403
1404    def get_resource_stream(self, manager, resource_name):
1405        return open(self._fn(self.module_path, resource_name), 'rb')
1406
1407    def _get(self, path):
1408        stream = open(path, 'rb')
1409        try:
1410            return stream.read()
1411        finally:
1412            stream.close()
1413
1414register_loader_type(type(None), DefaultProvider)
1415
1416if importlib_bootstrap is not None:
1417    register_loader_type(importlib_bootstrap.SourceFileLoader, DefaultProvider)
1418
1419
1420class EmptyProvider(NullProvider):
1421    """Provider that returns nothing for all requests"""
1422
1423    _isdir = _has = lambda self,path: False
1424    _get = lambda self,path: ''
1425    _listdir = lambda self,path: []
1426    module_path = None
1427
1428    def __init__(self):
1429        pass
1430
1431empty_provider = EmptyProvider()
1432
1433
1434def build_zipmanifest(path):
1435    """
1436    This builds a similar dictionary to the zipimport directory
1437    caches.  However instead of tuples, ZipInfo objects are stored.
1438
1439    The translation of the tuple is as follows:
1440      * [0] - zipinfo.filename on stock pythons this needs "/" --> os.sep
1441              on pypy it is the same (one reason why distribute did work
1442              in some cases on pypy and win32).
1443      * [1] - zipinfo.compress_type
1444      * [2] - zipinfo.compress_size
1445      * [3] - zipinfo.file_size
1446      * [4] - len(utf-8 encoding of filename) if zipinfo & 0x800
1447              len(ascii encoding of filename) otherwise
1448      * [5] - (zipinfo.date_time[0] - 1980) << 9 |
1449               zipinfo.date_time[1] << 5 | zipinfo.date_time[2]
1450      * [6] - (zipinfo.date_time[3] - 1980) << 11 |
1451               zipinfo.date_time[4] << 5 | (zipinfo.date_time[5] // 2)
1452      * [7] - zipinfo.CRC
1453    """
1454    zipinfo = dict()
1455    zfile = zipfile.ZipFile(path)
1456    #Got ZipFile has not __exit__ on python 3.1
1457    try:
1458        for zitem in zfile.namelist():
1459            zpath = zitem.replace('/', os.sep)
1460            zipinfo[zpath] = zfile.getinfo(zitem)
1461            assert zipinfo[zpath] is not None
1462    finally:
1463        zfile.close()
1464    return zipinfo
1465
1466
1467class ZipProvider(EggProvider):
1468    """Resource support for zips and eggs"""
1469
1470    eagers = None
1471
1472    def __init__(self, module):
1473        EggProvider.__init__(self,module)
1474        self.zipinfo = build_zipmanifest(self.loader.archive)
1475        self.zip_pre = self.loader.archive+os.sep
1476
1477    def _zipinfo_name(self, fspath):
1478        # Convert a virtual filename (full path to file) into a zipfile subpath
1479        # usable with the zipimport directory cache for our target archive
1480        if fspath.startswith(self.zip_pre):
1481            return fspath[len(self.zip_pre):]
1482        raise AssertionError(
1483            "%s is not a subpath of %s" % (fspath,self.zip_pre)
1484        )
1485
1486    def _parts(self,zip_path):
1487        # Convert a zipfile subpath into an egg-relative path part list
1488        fspath = self.zip_pre+zip_path  # pseudo-fs path
1489        if fspath.startswith(self.egg_root+os.sep):
1490            return fspath[len(self.egg_root)+1:].split(os.sep)
1491        raise AssertionError(
1492            "%s is not a subpath of %s" % (fspath,self.egg_root)
1493        )
1494
1495    def get_resource_filename(self, manager, resource_name):
1496        if not self.egg_name:
1497            raise NotImplementedError(
1498                "resource_filename() only supported for .egg, not .zip"
1499            )
1500        # no need to lock for extraction, since we use temp names
1501        zip_path = self._resource_to_zip(resource_name)
1502        eagers = self._get_eager_resources()
1503        if '/'.join(self._parts(zip_path)) in eagers:
1504            for name in eagers:
1505                self._extract_resource(manager, self._eager_to_zip(name))
1506        return self._extract_resource(manager, zip_path)
1507
1508    @staticmethod
1509    def _get_date_and_size(zip_stat):
1510        size = zip_stat.file_size
1511        date_time = zip_stat.date_time + (0, 0, -1)  # ymdhms+wday, yday, dst
1512        #1980 offset already done
1513        timestamp = time.mktime(date_time)
1514        return timestamp, size
1515
1516    def _extract_resource(self, manager, zip_path):
1517
1518        if zip_path in self._index():
1519            for name in self._index()[zip_path]:
1520                last = self._extract_resource(
1521                    manager, os.path.join(zip_path, name)
1522                )
1523            return os.path.dirname(last)  # return the extracted directory name
1524
1525        timestamp, size = self._get_date_and_size(self.zipinfo[zip_path])
1526
1527        if not WRITE_SUPPORT:
1528            raise IOError('"os.rename" and "os.unlink" are not supported '
1529                          'on this platform')
1530        try:
1531
1532            real_path = manager.get_cache_path(
1533                self.egg_name, self._parts(zip_path)
1534            )
1535
1536            if self._is_current(real_path, zip_path):
1537                return real_path
1538
1539            outf, tmpnam = _mkstemp(".$extract", dir=os.path.dirname(real_path))
1540            os.write(outf, self.loader.get_data(zip_path))
1541            os.close(outf)
1542            utime(tmpnam, (timestamp,timestamp))
1543            manager.postprocess(tmpnam, real_path)
1544
1545            try:
1546                rename(tmpnam, real_path)
1547
1548            except os.error:
1549                if os.path.isfile(real_path):
1550                    if self._is_current(real_path, zip_path):
1551                        # the file became current since it was checked above,
1552                        #  so proceed.
1553                        return real_path
1554                    elif os.name=='nt':     # Windows, del old file and retry
1555                        unlink(real_path)
1556                        rename(tmpnam, real_path)
1557                        return real_path
1558                raise
1559
1560        except os.error:
1561            manager.extraction_error()  # report a user-friendly error
1562
1563        return real_path
1564
1565    def _is_current(self, file_path, zip_path):
1566        """
1567        Return True if the file_path is current for this zip_path
1568        """
1569        timestamp, size = self._get_date_and_size(self.zipinfo[zip_path])
1570        if not os.path.isfile(file_path):
1571            return False
1572        stat = os.stat(file_path)
1573        if stat.st_size!=size or stat.st_mtime!=timestamp:
1574            return False
1575        # check that the contents match
1576        zip_contents = self.loader.get_data(zip_path)
1577        f = open(file_path, 'rb')
1578        file_contents = f.read()
1579        f.close()
1580        return zip_contents == file_contents
1581
1582    def _get_eager_resources(self):
1583        if self.eagers is None:
1584            eagers = []
1585            for name in ('native_libs.txt', 'eager_resources.txt'):
1586                if self.has_metadata(name):
1587                    eagers.extend(self.get_metadata_lines(name))
1588            self.eagers = eagers
1589        return self.eagers
1590
1591    def _index(self):
1592        try:
1593            return self._dirindex
1594        except AttributeError:
1595            ind = {}
1596            for path in self.zipinfo:
1597                parts = path.split(os.sep)
1598                while parts:
1599                    parent = os.sep.join(parts[:-1])
1600                    if parent in ind:
1601                        ind[parent].append(parts[-1])
1602                        break
1603                    else:
1604                        ind[parent] = [parts.pop()]
1605            self._dirindex = ind
1606            return ind
1607
1608    def _has(self, fspath):
1609        zip_path = self._zipinfo_name(fspath)
1610        return zip_path in self.zipinfo or zip_path in self._index()
1611
1612    def _isdir(self,fspath):
1613        return self._zipinfo_name(fspath) in self._index()
1614
1615    def _listdir(self,fspath):
1616        return list(self._index().get(self._zipinfo_name(fspath), ()))
1617
1618    def _eager_to_zip(self,resource_name):
1619        return self._zipinfo_name(self._fn(self.egg_root,resource_name))
1620
1621    def _resource_to_zip(self,resource_name):
1622        return self._zipinfo_name(self._fn(self.module_path,resource_name))
1623
1624register_loader_type(zipimport.zipimporter, ZipProvider)
1625
1626
1627class FileMetadata(EmptyProvider):
1628    """Metadata handler for standalone PKG-INFO files
1629
1630    Usage::
1631
1632        metadata = FileMetadata("/path/to/PKG-INFO")
1633
1634    This provider rejects all data and metadata requests except for PKG-INFO,
1635    which is treated as existing, and will be the contents of the file at
1636    the provided location.
1637    """
1638
1639    def __init__(self,path):
1640        self.path = path
1641
1642    def has_metadata(self,name):
1643        return name=='PKG-INFO'
1644
1645    def get_metadata(self,name):
1646        if name=='PKG-INFO':
1647            f = open(self.path,'rU')
1648            metadata = f.read()
1649            f.close()
1650            return metadata
1651        raise KeyError("No metadata except PKG-INFO is available")
1652
1653    def get_metadata_lines(self,name):
1654        return yield_lines(self.get_metadata(name))
1655
1656
1657class PathMetadata(DefaultProvider):
1658    """Metadata provider for egg directories
1659
1660    Usage::
1661
1662        # Development eggs:
1663
1664        egg_info = "/path/to/PackageName.egg-info"
1665        base_dir = os.path.dirname(egg_info)
1666        metadata = PathMetadata(base_dir, egg_info)
1667        dist_name = os.path.splitext(os.path.basename(egg_info))[0]
1668        dist = Distribution(basedir,project_name=dist_name,metadata=metadata)
1669
1670        # Unpacked egg directories:
1671
1672        egg_path = "/path/to/PackageName-ver-pyver-etc.egg"
1673        metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO'))
1674        dist = Distribution.from_filename(egg_path, metadata=metadata)
1675    """
1676
1677    def __init__(self, path, egg_info):
1678        self.module_path = path
1679        self.egg_info = egg_info
1680
1681
1682class EggMetadata(ZipProvider):
1683    """Metadata provider for .egg files"""
1684
1685    def __init__(self, importer):
1686        """Create a metadata provider from a zipimporter"""
1687
1688        self.zipinfo = build_zipmanifest(importer.archive)
1689        self.zip_pre = importer.archive+os.sep
1690        self.loader = importer
1691        if importer.prefix:
1692            self.module_path = os.path.join(importer.archive, importer.prefix)
1693        else:
1694            self.module_path = importer.archive
1695        self._setup_prefix()
1696
1697_declare_state('dict', _distribution_finders = {})
1698
1699def register_finder(importer_type, distribution_finder):
1700    """Register `distribution_finder` to find distributions in sys.path items
1701
1702    `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
1703    handler), and `distribution_finder` is a callable that, passed a path
1704    item and the importer instance, yields ``Distribution`` instances found on
1705    that path item.  See ``pkg_resources.find_on_path`` for an example."""
1706    _distribution_finders[importer_type] = distribution_finder
1707
1708
1709def find_distributions(path_item, only=False):
1710    """Yield distributions accessible via `path_item`"""
1711    importer = get_importer(path_item)
1712    finder = _find_adapter(_distribution_finders, importer)
1713    return finder(importer, path_item, only)
1714
1715def find_in_zip(importer, path_item, only=False):
1716    metadata = EggMetadata(importer)
1717    if metadata.has_metadata('PKG-INFO'):
1718        yield Distribution.from_filename(path_item, metadata=metadata)
1719    if only:
1720        return  # don't yield nested distros
1721    for subitem in metadata.resource_listdir('/'):
1722        if subitem.endswith('.egg'):
1723            subpath = os.path.join(path_item, subitem)
1724            for dist in find_in_zip(zipimport.zipimporter(subpath), subpath):
1725                yield dist
1726
1727register_finder(zipimport.zipimporter, find_in_zip)
1728
1729def find_nothing(importer, path_item, only=False):
1730    return ()
1731register_finder(object,find_nothing)
1732
1733def find_on_path(importer, path_item, only=False):
1734    """Yield distributions accessible on a sys.path directory"""
1735    path_item = _normalize_cached(path_item)
1736
1737    if os.path.isdir(path_item) and os.access(path_item, os.R_OK):
1738        if path_item.lower().endswith('.egg'):
1739            # unpacked egg
1740            yield Distribution.from_filename(
1741                path_item, metadata=PathMetadata(
1742                    path_item, os.path.join(path_item,'EGG-INFO')
1743                )
1744            )
1745        else:
1746            # scan for .egg and .egg-info in directory
1747            for entry in os.listdir(path_item):
1748                lower = entry.lower()
1749                if lower.endswith('.egg-info') or lower.endswith('.dist-info'):
1750                    fullpath = os.path.join(path_item, entry)
1751                    if os.path.isdir(fullpath):
1752                        # egg-info directory, allow getting metadata
1753                        metadata = PathMetadata(path_item, fullpath)
1754                    else:
1755                        metadata = FileMetadata(fullpath)
1756                    yield Distribution.from_location(
1757                        path_item,entry,metadata,precedence=DEVELOP_DIST
1758                    )
1759                elif not only and lower.endswith('.egg'):
1760                    for dist in find_distributions(os.path.join(path_item, entry)):
1761                        yield dist
1762                elif not only and lower.endswith('.egg-link'):
1763                    entry_file = open(os.path.join(path_item, entry))
1764                    try:
1765                        entry_lines = entry_file.readlines()
1766                    finally:
1767                        entry_file.close()
1768                    for line in entry_lines:
1769                        if not line.strip(): continue
1770                        for item in find_distributions(os.path.join(path_item,line.rstrip())):
1771                            yield item
1772                        break
1773register_finder(pkgutil.ImpImporter,find_on_path)
1774
1775if importlib_bootstrap is not None:
1776    register_finder(importlib_bootstrap.FileFinder, find_on_path)
1777
1778_declare_state('dict', _namespace_handlers={})
1779_declare_state('dict', _namespace_packages={})
1780
1781
1782def register_namespace_handler(importer_type, namespace_handler):
1783    """Register `namespace_handler` to declare namespace packages
1784
1785    `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
1786    handler), and `namespace_handler` is a callable like this::
1787
1788        def namespace_handler(importer,path_entry,moduleName,module):
1789            # return a path_entry to use for child packages
1790
1791    Namespace handlers are only called if the importer object has already
1792    agreed that it can handle the relevant path item, and they should only
1793    return a subpath if the module __path__ does not already contain an
1794    equivalent subpath.  For an example namespace handler, see
1795    ``pkg_resources.file_ns_handler``.
1796    """
1797    _namespace_handlers[importer_type] = namespace_handler
1798
1799def _handle_ns(packageName, path_item):
1800    """Ensure that named package includes a subpath of path_item (if needed)"""
1801    importer = get_importer(path_item)
1802    if importer is None:
1803        return None
1804    loader = importer.find_module(packageName)
1805    if loader is None:
1806        return None
1807    module = sys.modules.get(packageName)
1808    if module is None:
1809        module = sys.modules[packageName] = imp.new_module(packageName)
1810        module.__path__ = []
1811        _set_parent_ns(packageName)
1812    elif not hasattr(module,'__path__'):
1813        raise TypeError("Not a package:", packageName)
1814    handler = _find_adapter(_namespace_handlers, importer)
1815    subpath = handler(importer,path_item,packageName,module)
1816    if subpath is not None:
1817        path = module.__path__
1818        path.append(subpath)
1819        loader.load_module(packageName)
1820        module.__path__ = path
1821    return subpath
1822
1823def declare_namespace(packageName):
1824    """Declare that package 'packageName' is a namespace package"""
1825
1826    imp.acquire_lock()
1827    try:
1828        if packageName in _namespace_packages:
1829            return
1830
1831        path, parent = sys.path, None
1832        if '.' in packageName:
1833            parent = '.'.join(packageName.split('.')[:-1])
1834            declare_namespace(parent)
1835            if parent not in _namespace_packages:
1836                __import__(parent)
1837            try:
1838                path = sys.modules[parent].__path__
1839            except AttributeError:
1840                raise TypeError("Not a package:", parent)
1841
1842        # Track what packages are namespaces, so when new path items are added,
1843        # they can be updated
1844        _namespace_packages.setdefault(parent,[]).append(packageName)
1845        _namespace_packages.setdefault(packageName,[])
1846
1847        for path_item in path:
1848            # Ensure all the parent's path items are reflected in the child,
1849            # if they apply
1850            _handle_ns(packageName, path_item)
1851
1852    finally:
1853        imp.release_lock()
1854
1855def fixup_namespace_packages(path_item, parent=None):
1856    """Ensure that previously-declared namespace packages include path_item"""
1857    imp.acquire_lock()
1858    try:
1859        for package in _namespace_packages.get(parent,()):
1860            subpath = _handle_ns(package, path_item)
1861            if subpath: fixup_namespace_packages(subpath,package)
1862    finally:
1863        imp.release_lock()
1864
1865def file_ns_handler(importer, path_item, packageName, module):
1866    """Compute an ns-package subpath for a filesystem or zipfile importer"""
1867
1868    subpath = os.path.join(path_item, packageName.split('.')[-1])
1869    normalized = _normalize_cached(subpath)
1870    for item in module.__path__:
1871        if _normalize_cached(item)==normalized:
1872            break
1873    else:
1874        # Only return the path if it's not already there
1875        return subpath
1876
1877register_namespace_handler(pkgutil.ImpImporter,file_ns_handler)
1878register_namespace_handler(zipimport.zipimporter,file_ns_handler)
1879
1880if importlib_bootstrap is not None:
1881    register_namespace_handler(importlib_bootstrap.FileFinder, file_ns_handler)
1882
1883
1884def null_ns_handler(importer, path_item, packageName, module):
1885    return None
1886
1887register_namespace_handler(object,null_ns_handler)
1888
1889
1890def normalize_path(filename):
1891    """Normalize a file/dir name for comparison purposes"""
1892    return os.path.normcase(os.path.realpath(filename))
1893
1894def _normalize_cached(filename,_cache={}):
1895    try:
1896        return _cache[filename]
1897    except KeyError:
1898        _cache[filename] = result = normalize_path(filename)
1899        return result
1900
1901def _set_parent_ns(packageName):
1902    parts = packageName.split('.')
1903    name = parts.pop()
1904    if parts:
1905        parent = '.'.join(parts)
1906        setattr(sys.modules[parent], name, sys.modules[packageName])
1907
1908
1909def yield_lines(strs):
1910    """Yield non-empty/non-comment lines of a ``basestring`` or sequence"""
1911    if isinstance(strs,basestring):
1912        for s in strs.splitlines():
1913            s = s.strip()
1914            if s and not s.startswith('#'):     # skip blank lines/comments
1915                yield s
1916    else:
1917        for ss in strs:
1918            for s in yield_lines(ss):
1919                yield s
1920
1921LINE_END = re.compile(r"\s*(#.*)?$").match         # whitespace and comment
1922CONTINUE = re.compile(r"\s*\\\s*(#.*)?$").match    # line continuation
1923DISTRO = re.compile(r"\s*((\w|[-.])+)").match    # Distribution or extra
1924VERSION = re.compile(r"\s*(<=?|>=?|==|!=)\s*((\w|[-.])+)").match  # ver. info
1925COMMA = re.compile(r"\s*,").match               # comma between items
1926OBRACKET = re.compile(r"\s*\[").match
1927CBRACKET = re.compile(r"\s*\]").match
1928MODULE = re.compile(r"\w+(\.\w+)*$").match
1929EGG_NAME = re.compile(
1930    r"(?P<name>[^-]+)"
1931    r"( -(?P<ver>[^-]+) (-py(?P<pyver>[^-]+) (-(?P<plat>.+))? )? )?",
1932    re.VERBOSE | re.IGNORECASE
1933).match
1934
1935component_re = re.compile(r'(\d+ | [a-z]+ | \.| -)', re.VERBOSE)
1936replace = {'pre':'c', 'preview':'c','-':'final-','rc':'c','dev':'@'}.get
1937
1938def _parse_version_parts(s):
1939    for part in component_re.split(s):
1940        part = replace(part,part)
1941        if not part or part=='.':
1942            continue
1943        if part[:1] in '0123456789':
1944            yield part.zfill(8)    # pad for numeric comparison
1945        else:
1946            yield '*'+part
1947
1948    yield '*final'  # ensure that alpha/beta/candidate are before final
1949
1950def parse_version(s):
1951    """Convert a version string to a chronologically-sortable key
1952
1953    This is a rough cross between distutils' StrictVersion and LooseVersion;
1954    if you give it versions that would work with StrictVersion, then it behaves
1955    the same; otherwise it acts like a slightly-smarter LooseVersion. It is
1956    *possible* to create pathological version coding schemes that will fool
1957    this parser, but they should be very rare in practice.
1958
1959    The returned value will be a tuple of strings.  Numeric portions of the
1960    version are padded to 8 digits so they will compare numerically, but
1961    without relying on how numbers compare relative to strings.  Dots are
1962    dropped, but dashes are retained.  Trailing zeros between alpha segments
1963    or dashes are suppressed, so that e.g. "2.4.0" is considered the same as
1964    "2.4". Alphanumeric parts are lower-cased.
1965
1966    The algorithm assumes that strings like "-" and any alpha string that
1967    alphabetically follows "final"  represents a "patch level".  So, "2.4-1"
1968    is assumed to be a branch or patch of "2.4", and therefore "2.4.1" is
1969    considered newer than "2.4-1", which in turn is newer than "2.4".
1970
1971    Strings like "a", "b", "c", "alpha", "beta", "candidate" and so on (that
1972    come before "final" alphabetically) are assumed to be pre-release versions,
1973    so that the version "2.4" is considered newer than "2.4a1".
1974
1975    Finally, to handle miscellaneous cases, the strings "pre", "preview", and
1976    "rc" are treated as if they were "c", i.e. as though they were release
1977    candidates, and therefore are not as new as a version string that does not
1978    contain them, and "dev" is replaced with an '@' so that it sorts lower than
1979    than any other pre-release tag.
1980    """
1981    parts = []
1982    for part in _parse_version_parts(s.lower()):
1983        if part.startswith('*'):
1984            if part<'*final':   # remove '-' before a prerelease tag
1985                while parts and parts[-1]=='*final-': parts.pop()
1986            # remove trailing zeros from each series of numeric parts
1987            while parts and parts[-1]=='00000000':
1988                parts.pop()
1989        parts.append(part)
1990    return tuple(parts)
1991class EntryPoint(object):
1992    """Object representing an advertised importable object"""
1993
1994    def __init__(self, name, module_name, attrs=(), extras=(), dist=None):
1995        if not MODULE(module_name):
1996            raise ValueError("Invalid module name", module_name)
1997        self.name = name
1998        self.module_name = module_name
1999        self.attrs = tuple(attrs)
2000        self.extras = Requirement.parse(("x[%s]" % ','.join(extras))).extras
2001        self.dist = dist
2002
2003    def __str__(self):
2004        s = "%s = %s" % (self.name, self.module_name)
2005        if self.attrs:
2006            s += ':' + '.'.join(self.attrs)
2007        if self.extras:
2008            s += ' [%s]' % ','.join(self.extras)
2009        return s
2010
2011    def __repr__(self):
2012        return "EntryPoint.parse(%r)" % str(self)
2013
2014    def load(self, require=True, env=None, installer=None):
2015        if require: self.require(env, installer)
2016        entry = __import__(self.module_name, globals(),globals(), ['__name__'])
2017        for attr in self.attrs:
2018            try:
2019                entry = getattr(entry,attr)
2020            except AttributeError:
2021                raise ImportError("%r has no %r attribute" % (entry,attr))
2022        return entry
2023
2024    def require(self, env=None, installer=None):
2025        if self.extras and not self.dist:
2026            raise UnknownExtra("Can't require() without a distribution", self)
2027        list(map(working_set.add,
2028            working_set.resolve(self.dist.requires(self.extras),env,installer)))
2029
2030    #@classmethod
2031    def parse(cls, src, dist=None):
2032        """Parse a single entry point from string `src`
2033
2034        Entry point syntax follows the form::
2035
2036            name = some.module:some.attr [extra1,extra2]
2037
2038        The entry name and module name are required, but the ``:attrs`` and
2039        ``[extras]`` parts are optional
2040        """
2041        try:
2042            attrs = extras = ()
2043            name,value = src.split('=',1)
2044            if '[' in value:
2045                value,extras = value.split('[',1)
2046                req = Requirement.parse("x["+extras)
2047                if req.specs: raise ValueError
2048                extras = req.extras
2049            if ':' in value:
2050                value,attrs = value.split(':',1)
2051                if not MODULE(attrs.rstrip()):
2052                    raise ValueError
2053                attrs = attrs.rstrip().split('.')
2054        except ValueError:
2055            raise ValueError(
2056                "EntryPoint must be in 'name=module:attrs [extras]' format",
2057                src
2058            )
2059        else:
2060            return cls(name.strip(), value.strip(), attrs, extras, dist)
2061
2062    parse = classmethod(parse)
2063
2064    #@classmethod
2065    def parse_group(cls, group, lines, dist=None):
2066        """Parse an entry point group"""
2067        if not MODULE(group):
2068            raise ValueError("Invalid group name", group)
2069        this = {}
2070        for line in yield_lines(lines):
2071            ep = cls.parse(line, dist)
2072            if ep.name in this:
2073                raise ValueError("Duplicate entry point", group, ep.name)
2074            this[ep.name]=ep
2075        return this
2076
2077    parse_group = classmethod(parse_group)
2078
2079    #@classmethod
2080    def parse_map(cls, data, dist=None):
2081        """Parse a map of entry point groups"""
2082        if isinstance(data,dict):
2083            data = data.items()
2084        else:
2085            data = split_sections(data)
2086        maps = {}
2087        for group, lines in data:
2088            if group is None:
2089                if not lines:
2090                    continue
2091                raise ValueError("Entry points must be listed in groups")
2092            group = group.strip()
2093            if group in maps:
2094                raise ValueError("Duplicate group name", group)
2095            maps[group] = cls.parse_group(group, lines, dist)
2096        return maps
2097
2098    parse_map = classmethod(parse_map)
2099
2100
2101def _remove_md5_fragment(location):
2102    if not location:
2103        return ''
2104    parsed = urlparse(location)
2105    if parsed[-1].startswith('md5='):
2106        return urlunparse(parsed[:-1] + ('',))
2107    return location
2108
2109
2110class Distribution(object):
2111    """Wrap an actual or potential sys.path entry w/metadata"""
2112    PKG_INFO = 'PKG-INFO'
2113
2114    def __init__(self, location=None, metadata=None, project_name=None,
2115            version=None, py_version=PY_MAJOR, platform=None,
2116            precedence=EGG_DIST):
2117        self.project_name = safe_name(project_name or 'Unknown')
2118        if version is not None:
2119            self._version = safe_version(version)
2120        self.py_version = py_version
2121        self.platform = platform
2122        self.location = location
2123        self.precedence = precedence
2124        self._provider = metadata or empty_provider
2125
2126    #@classmethod
2127    def from_location(cls,location,basename,metadata=None,**kw):
2128        project_name, version, py_version, platform = [None]*4
2129        basename, ext = os.path.splitext(basename)
2130        if ext.lower() in _distributionImpl:
2131            # .dist-info gets much metadata differently
2132            match = EGG_NAME(basename)
2133            if match:
2134                project_name, version, py_version, platform = match.group(
2135                    'name','ver','pyver','plat'
2136                )
2137            cls = _distributionImpl[ext.lower()]
2138        return cls(
2139            location, metadata, project_name=project_name, version=version,
2140            py_version=py_version, platform=platform, **kw
2141        )
2142    from_location = classmethod(from_location)
2143
2144    hashcmp = property(
2145        lambda self: (
2146            getattr(self,'parsed_version',()),
2147            self.precedence,
2148            self.key,
2149            _remove_md5_fragment(self.location),
2150            self.py_version,
2151            self.platform
2152        )
2153    )
2154    def __hash__(self): return hash(self.hashcmp)
2155    def __lt__(self, other):
2156        return self.hashcmp < other.hashcmp
2157    def __le__(self, other):
2158        return self.hashcmp <= other.hashcmp
2159    def __gt__(self, other):
2160        return self.hashcmp > other.hashcmp
2161    def __ge__(self, other):
2162        return self.hashcmp >= other.hashcmp
2163    def __eq__(self, other):
2164        if not isinstance(other, self.__class__):
2165            # It's not a Distribution, so they are not equal
2166            return False
2167        return self.hashcmp == other.hashcmp
2168    def __ne__(self, other):
2169        return not self == other
2170
2171    # These properties have to be lazy so that we don't have to load any
2172    # metadata until/unless it's actually needed.  (i.e., some distributions
2173    # may not know their name or version without loading PKG-INFO)
2174
2175    #@property
2176    def key(self):
2177        try:
2178            return self._key
2179        except AttributeError:
2180            self._key = key = self.project_name.lower()
2181            return key
2182    key = property(key)
2183
2184    #@property
2185    def parsed_version(self):
2186        try:
2187            return self._parsed_version
2188        except AttributeError:
2189            self._parsed_version = pv = parse_version(self.version)
2190            return pv
2191
2192    parsed_version = property(parsed_version)
2193
2194    #@property
2195    def version(self):
2196        try:
2197            return self._version
2198        except AttributeError:
2199            for line in self._get_metadata(self.PKG_INFO):
2200                if line.lower().startswith('version:'):
2201                    self._version = safe_version(line.split(':',1)[1].strip())
2202                    return self._version
2203            else:
2204                raise ValueError(
2205                    "Missing 'Version:' header and/or %s file" % self.PKG_INFO, self
2206                )
2207    version = property(version)
2208
2209    #@property
2210    def _dep_map(self):
2211        try:
2212            return self.__dep_map
2213        except AttributeError:
2214            dm = self.__dep_map = {None: []}
2215            for name in 'requires.txt', 'depends.txt':
2216                for extra,reqs in split_sections(self._get_metadata(name)):
2217                    if extra:
2218                        if ':' in extra:
2219                            extra, marker = extra.split(':',1)
2220                            if invalid_marker(marker):
2221                                reqs=[] # XXX warn
2222                            elif not evaluate_marker(marker):
2223                                reqs=[]
2224                        extra = safe_extra(extra) or None
2225                    dm.setdefault(extra,[]).extend(parse_requirements(reqs))
2226            return dm
2227    _dep_map = property(_dep_map)
2228
2229    def requires(self,extras=()):
2230        """List of Requirements needed for this distro if `extras` are used"""
2231        dm = self._dep_map
2232        deps = []
2233        deps.extend(dm.get(None,()))
2234        for ext in extras:
2235            try:
2236                deps.extend(dm[safe_extra(ext)])
2237            except KeyError:
2238                raise UnknownExtra(
2239                    "%s has no such extra feature %r" % (self, ext)
2240                )
2241        return deps
2242
2243    def _get_metadata(self,name):
2244        if self.has_metadata(name):
2245            for line in self.get_metadata_lines(name):
2246                yield line
2247
2248    def activate(self,path=None):
2249        """Ensure distribution is importable on `path` (default=sys.path)"""
2250        if path is None: path = sys.path
2251        self.insert_on(path)
2252        if path is sys.path:
2253            fixup_namespace_packages(self.location)
2254            list(map(declare_namespace, self._get_metadata('namespace_packages.txt')))
2255
2256    def egg_name(self):
2257        """Return what this distribution's standard .egg filename should be"""
2258        filename = "%s-%s-py%s" % (
2259            to_filename(self.project_name), to_filename(self.version),
2260            self.py_version or PY_MAJOR
2261        )
2262
2263        if self.platform:
2264            filename += '-'+self.platform
2265        return filename
2266
2267    def __repr__(self):
2268        if self.location:
2269            return "%s (%s)" % (self,self.location)
2270        else:
2271            return str(self)
2272
2273    def __str__(self):
2274        try: version = getattr(self,'version',None)
2275        except ValueError: version = None
2276        version = version or "[unknown version]"
2277        return "%s %s" % (self.project_name,version)
2278
2279    def __getattr__(self,attr):
2280        """Delegate all unrecognized public attributes to .metadata provider"""
2281        if attr.startswith('_'):
2282            raise AttributeError(attr)
2283        return getattr(self._provider, attr)
2284
2285    #@classmethod
2286    def from_filename(cls,filename,metadata=None, **kw):
2287        return cls.from_location(
2288            _normalize_cached(filename), os.path.basename(filename), metadata,
2289            **kw
2290        )
2291    from_filename = classmethod(from_filename)
2292
2293    def as_requirement(self):
2294        """Return a ``Requirement`` that matches this distribution exactly"""
2295        return Requirement.parse('%s==%s' % (self.project_name, self.version))
2296
2297    def load_entry_point(self, group, name):
2298        """Return the `name` entry point of `group` or raise ImportError"""
2299        ep = self.get_entry_info(group,name)
2300        if ep is None:
2301            raise ImportError("Entry point %r not found" % ((group,name),))
2302        return ep.load()
2303
2304    def get_entry_map(self, group=None):
2305        """Return the entry point map for `group`, or the full entry map"""
2306        try:
2307            ep_map = self._ep_map
2308        except AttributeError:
2309            ep_map = self._ep_map = EntryPoint.parse_map(
2310                self._get_metadata('entry_points.txt'), self
2311            )
2312        if group is not None:
2313            return ep_map.get(group,{})
2314        return ep_map
2315
2316    def get_entry_info(self, group, name):
2317        """Return the EntryPoint object for `group`+`name`, or ``None``"""
2318        return self.get_entry_map(group).get(name)
2319
2320    def insert_on(self, path, loc = None):
2321        """Insert self.location in path before its nearest parent directory"""
2322
2323        loc = loc or self.location
2324        if not loc:
2325            return
2326
2327        nloc = _normalize_cached(loc)
2328        bdir = os.path.dirname(nloc)
2329        npath= [(p and _normalize_cached(p) or p) for p in path]
2330
2331        for p, item in enumerate(npath):
2332            if item==nloc:
2333                break
2334            elif item==bdir and self.precedence==EGG_DIST:
2335                # if it's an .egg, give it precedence over its directory
2336                if path is sys.path:
2337                    self.check_version_conflict()
2338                path.insert(p, loc)
2339                npath.insert(p, nloc)
2340                break
2341        else:
2342            if path is sys.path:
2343                self.check_version_conflict()
2344            path.append(loc)
2345            return
2346
2347        # p is the spot where we found or inserted loc; now remove duplicates
2348        while 1:
2349            try:
2350                np = npath.index(nloc, p+1)
2351            except ValueError:
2352                break
2353            else:
2354                del npath[np], path[np]
2355                p = np  # ha!
2356
2357        return
2358
2359    def check_version_conflict(self):
2360        if self.key=='setuptools':
2361            return      # ignore the inevitable setuptools self-conflicts  :(
2362
2363        nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt'))
2364        loc = normalize_path(self.location)
2365        for modname in self._get_metadata('top_level.txt'):
2366            if (modname not in sys.modules or modname in nsp
2367                    or modname in _namespace_packages):
2368                continue
2369            if modname in ('pkg_resources', 'setuptools', 'site'):
2370                continue
2371            fn = getattr(sys.modules[modname], '__file__', None)
2372            if fn and (normalize_path(fn).startswith(loc) or
2373                       fn.startswith(self.location)):
2374                continue
2375            issue_warning(
2376                "Module %s was already imported from %s, but %s is being added"
2377                " to sys.path" % (modname, fn, self.location),
2378            )
2379
2380    def has_version(self):
2381        try:
2382            self.version
2383        except ValueError:
2384            issue_warning("Unbuilt egg for "+repr(self))
2385            return False
2386        return True
2387
2388    def clone(self,**kw):
2389        """Copy this distribution, substituting in any changed keyword args"""
2390        for attr in (
2391            'project_name', 'version', 'py_version', 'platform', 'location',
2392            'precedence'
2393        ):
2394            kw.setdefault(attr, getattr(self,attr,None))
2395        kw.setdefault('metadata', self._provider)
2396        return self.__class__(**kw)
2397
2398    #@property
2399    def extras(self):
2400        return [dep for dep in self._dep_map if dep]
2401    extras = property(extras)
2402
2403
2404class DistInfoDistribution(Distribution):
2405    """Wrap an actual or potential sys.path entry w/metadata, .dist-info style"""
2406    PKG_INFO = 'METADATA'
2407    EQEQ = re.compile(r"([\(,])\s*(\d.*?)\s*([,\)])")
2408
2409    @property
2410    def _parsed_pkg_info(self):
2411        """Parse and cache metadata"""
2412        try:
2413            return self._pkg_info
2414        except AttributeError:
2415            from email.parser import Parser
2416            self._pkg_info = Parser().parsestr(self.get_metadata(self.PKG_INFO))
2417            return self._pkg_info
2418
2419    @property
2420    def _dep_map(self):
2421        try:
2422            return self.__dep_map
2423        except AttributeError:
2424            self.__dep_map = self._compute_dependencies()
2425            return self.__dep_map
2426
2427    def _preparse_requirement(self, requires_dist):
2428        """Convert 'Foobar (1); baz' to ('Foobar ==1', 'baz')
2429        Split environment marker, add == prefix to version specifiers as
2430        necessary, and remove parenthesis.
2431        """
2432        parts = requires_dist.split(';', 1) + ['']
2433        distvers = parts[0].strip()
2434        mark = parts[1].strip()
2435        distvers = re.sub(self.EQEQ, r"\1==\2\3", distvers)
2436        distvers = distvers.replace('(', '').replace(')', '')
2437        return (distvers, mark)
2438
2439    def _compute_dependencies(self):
2440        """Recompute this distribution's dependencies."""
2441        from _markerlib import compile as compile_marker
2442        dm = self.__dep_map = {None: []}
2443
2444        reqs = []
2445        # Including any condition expressions
2446        for req in self._parsed_pkg_info.get_all('Requires-Dist') or []:
2447            distvers, mark = self._preparse_requirement(req)
2448            parsed = next(parse_requirements(distvers))
2449            parsed.marker_fn = compile_marker(mark)
2450            reqs.append(parsed)
2451
2452        def reqs_for_extra(extra):
2453            for req in reqs:
2454                if req.marker_fn(override={'extra':extra}):
2455                    yield req
2456
2457        common = frozenset(reqs_for_extra(None))
2458        dm[None].extend(common)
2459
2460        for extra in self._parsed_pkg_info.get_all('Provides-Extra') or []:
2461            extra = safe_extra(extra.strip())
2462            dm[extra] = list(frozenset(reqs_for_extra(extra)) - common)
2463
2464        return dm
2465
2466
2467_distributionImpl = {
2468    '.egg': Distribution,
2469    '.egg-info': Distribution,
2470    '.dist-info': DistInfoDistribution,
2471    }
2472
2473
2474def issue_warning(*args,**kw):
2475    level = 1
2476    g = globals()
2477    try:
2478        # find the first stack frame that is *not* code in
2479        # the pkg_resources module, to use for the warning
2480        while sys._getframe(level).f_globals is g:
2481            level += 1
2482    except ValueError:
2483        pass
2484    from warnings import warn
2485    warn(stacklevel = level+1, *args, **kw)
2486
2487
2488def parse_requirements(strs):
2489    """Yield ``Requirement`` objects for each specification in `strs`
2490
2491    `strs` must be an instance of ``basestring``, or a (possibly-nested)
2492    iterable thereof.
2493    """
2494    # create a steppable iterator, so we can handle \-continuations
2495    lines = iter(yield_lines(strs))
2496
2497    def scan_list(ITEM,TERMINATOR,line,p,groups,item_name):
2498
2499        items = []
2500
2501        while not TERMINATOR(line,p):
2502            if CONTINUE(line,p):
2503                try:
2504                    line = next(lines)
2505                    p = 0
2506                except StopIteration:
2507                    raise ValueError(
2508                        "\\ must not appear on the last nonblank line"
2509                    )
2510
2511            match = ITEM(line,p)
2512            if not match:
2513                raise ValueError("Expected "+item_name+" in",line,"at",line[p:])
2514
2515            items.append(match.group(*groups))
2516            p = match.end()
2517
2518            match = COMMA(line,p)
2519            if match:
2520                p = match.end() # skip the comma
2521            elif not TERMINATOR(line,p):
2522                raise ValueError(
2523                    "Expected ',' or end-of-list in",line,"at",line[p:]
2524                )
2525
2526        match = TERMINATOR(line,p)
2527        if match: p = match.end()   # skip the terminator, if any
2528        return line, p, items
2529
2530    for line in lines:
2531        match = DISTRO(line)
2532        if not match:
2533            raise ValueError("Missing distribution spec", line)
2534        project_name = match.group(1)
2535        p = match.end()
2536        extras = []
2537
2538        match = OBRACKET(line,p)
2539        if match:
2540            p = match.end()
2541            line, p, extras = scan_list(
2542                DISTRO, CBRACKET, line, p, (1,), "'extra' name"
2543            )
2544
2545        line, p, specs = scan_list(VERSION,LINE_END,line,p,(1,2),"version spec")
2546        specs = [(op,safe_version(val)) for op,val in specs]
2547        yield Requirement(project_name, specs, extras)
2548
2549
2550def _sort_dists(dists):
2551    tmp = [(dist.hashcmp,dist) for dist in dists]
2552    tmp.sort()
2553    dists[::-1] = [d for hc,d in tmp]
2554
2555
2556class Requirement:
2557    def __init__(self, project_name, specs, extras):
2558        """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!"""
2559        self.unsafe_name, project_name = project_name, safe_name(project_name)
2560        self.project_name, self.key = project_name, project_name.lower()
2561        index = [(parse_version(v),state_machine[op],op,v) for op,v in specs]
2562        index.sort()
2563        self.specs = [(op,ver) for parsed,trans,op,ver in index]
2564        self.index, self.extras = index, tuple(map(safe_extra,extras))
2565        self.hashCmp = (
2566            self.key, tuple([(op,parsed) for parsed,trans,op,ver in index]),
2567            frozenset(self.extras)
2568        )
2569        self.__hash = hash(self.hashCmp)
2570
2571    def __str__(self):
2572        specs = ','.join([''.join(s) for s in self.specs])
2573        extras = ','.join(self.extras)
2574        if extras: extras = '[%s]' % extras
2575        return '%s%s%s' % (self.project_name, extras, specs)
2576
2577    def __eq__(self,other):
2578        return isinstance(other,Requirement) and self.hashCmp==other.hashCmp
2579
2580    def __contains__(self,item):
2581        if isinstance(item,Distribution):
2582            if item.key != self.key: return False
2583            if self.index: item = item.parsed_version  # only get if we need it
2584        elif isinstance(item,basestring):
2585            item = parse_version(item)
2586        last = None
2587        compare = lambda a, b: (a > b) - (a < b) # -1, 0, 1
2588        for parsed,trans,op,ver in self.index:
2589            action = trans[compare(item,parsed)] # Indexing: 0, 1, -1
2590            if action=='F':
2591                return False
2592            elif action=='T':
2593                return True
2594            elif action=='+':
2595                last = True
2596            elif action=='-' or last is None:   last = False
2597        if last is None: last = True    # no rules encountered
2598        return last
2599
2600    def __hash__(self):
2601        return self.__hash
2602
2603    def __repr__(self): return "Requirement.parse(%r)" % str(self)
2604
2605    #@staticmethod
2606    def parse(s):
2607        reqs = list(parse_requirements(s))
2608        if reqs:
2609            if len(reqs)==1:
2610                return reqs[0]
2611            raise ValueError("Expected only one requirement", s)
2612        raise ValueError("No requirements found", s)
2613
2614    parse = staticmethod(parse)
2615
2616state_machine = {
2617    #       =><
2618    '<': '--T',
2619    '<=': 'T-T',
2620    '>': 'F+F',
2621    '>=': 'T+F',
2622    '==': 'T..',
2623    '!=': 'F++',
2624}
2625
2626
2627def _get_mro(cls):
2628    """Get an mro for a type or classic class"""
2629    if not isinstance(cls,type):
2630        class cls(cls,object): pass
2631        return cls.__mro__[1:]
2632    return cls.__mro__
2633
2634def _find_adapter(registry, ob):
2635    """Return an adapter factory for `ob` from `registry`"""
2636    for t in _get_mro(getattr(ob, '__class__', type(ob))):
2637        if t in registry:
2638            return registry[t]
2639
2640
2641def ensure_directory(path):
2642    """Ensure that the parent directory of `path` exists"""
2643    dirname = os.path.dirname(path)
2644    if not os.path.isdir(dirname):
2645        os.makedirs(dirname)
2646
2647def split_sections(s):
2648    """Split a string or iterable thereof into (section,content) pairs
2649
2650    Each ``section`` is a stripped version of the section header ("[section]")
2651    and each ``content`` is a list of stripped lines excluding blank lines and
2652    comment-only lines.  If there are any such lines before the first section
2653    header, they're returned in a first ``section`` of ``None``.
2654    """
2655    section = None
2656    content = []
2657    for line in yield_lines(s):
2658        if line.startswith("["):
2659            if line.endswith("]"):
2660                if section or content:
2661                    yield section, content
2662                section = line[1:-1].strip()
2663                content = []
2664            else:
2665                raise ValueError("Invalid section heading", line)
2666        else:
2667            content.append(line)
2668
2669    # wrap up last segment
2670    yield section, content
2671
2672def _mkstemp(*args,**kw):
2673    from tempfile import mkstemp
2674    old_open = os.open
2675    try:
2676        os.open = os_open   # temporarily bypass sandboxing
2677        return mkstemp(*args,**kw)
2678    finally:
2679        os.open = old_open  # and then put it back
2680
2681
2682# Set up global resource manager (deliberately not state-saved)
2683_manager = ResourceManager()
2684def _initialize(g):
2685    for name in dir(_manager):
2686        if not name.startswith('_'):
2687            g[name] = getattr(_manager, name)
2688_initialize(globals())
2689
2690# Prepare the master working set and make the ``require()`` API available
2691_declare_state('object', working_set = WorkingSet())
2692try:
2693    # Does the main program list any requirements?
2694    from __main__ import __requires__
2695except ImportError:
2696    pass # No: just use the default working set based on sys.path
2697else:
2698    # Yes: ensure the requirements are met, by prefixing sys.path if necessary
2699    try:
2700        working_set.require(__requires__)
2701    except VersionConflict:     # try it without defaults already on sys.path
2702        working_set = WorkingSet([])    # by starting with an empty path
2703        for dist in working_set.resolve(
2704            parse_requirements(__requires__), Environment()
2705        ):
2706            working_set.add(dist)
2707        for entry in sys.path:  # add any missing entries from sys.path
2708            if entry not in working_set.entries:
2709                working_set.add_entry(entry)
2710        sys.path[:] = working_set.entries   # then copy back to sys.path
2711
2712require = working_set.require
2713iter_entry_points = working_set.iter_entry_points
2714add_activation_listener = working_set.subscribe
2715run_script = working_set.run_script
2716run_main = run_script   # backward compatibility
2717# Activate all distributions already on sys.path, and ensure that
2718# all distributions added to the working set in the future (e.g. by
2719# calling ``require()``) will get activated as well.
2720add_activation_listener(lambda dist: dist.activate())
2721working_set.entries=[]
2722list(map(working_set.add_entry,sys.path)) # match order
2723