1"""Package resource API
2--------------------
3
4A resource is a logical file contained within a package, or a logical
5subdirectory thereof.  The package resource API expects resource names
6to have their path parts separated with ``/``, *not* whatever the local
7path separator is.  Do not use os.path operations to manipulate resource
8names being passed into the API.
9
10The package resource API is designed to work with normal filesystem packages,
11.egg files, and unpacked .egg files.  It can also work in a limited way with
12.zip files and with custom PEP 302 loaders that support the ``get_data()``
13method.
14"""
15
16import sys, os, zipimport, time, re, imp, new
17
18try:
19    frozenset
20except NameError:
21    from sets import ImmutableSet as frozenset
22
23from os import utime, rename, unlink    # capture these to bypass sandboxing
24from os import open as os_open
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42def get_supported_platform():
43    """Return this platform's maximum compatible version.
44
45    distutils.util.get_platform() normally reports the minimum version
46    of Mac OS X that would be required to *use* extensions produced by
47    distutils.  But what we want when checking compatibility is to know the
48    version of Mac OS X that we are *running*.  To allow usage of packages that
49    explicitly require a newer version of Mac OS X, we must also know the
50    current version of the OS.
51
52    If this condition occurs for any other platform with a version in its
53    platform strings, this function should be extended accordingly.
54    """
55    plat = get_build_platform(); m = macosVersionString.match(plat)
56    if m is not None and sys.platform == "darwin":
57        try:
58            plat = 'macosx-%s-%s' % ('.'.join(_macosx_vers()[:2]), m.group(3))
59        except ValueError:
60            pass    # not Mac OS X
61    return plat
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83__all__ = [
84    # Basic resource access and distribution/entry point discovery
85    'require', 'run_script', 'get_provider',  'get_distribution',
86    'load_entry_point', 'get_entry_map', 'get_entry_info', 'iter_entry_points',
87    'resource_string', 'resource_stream', 'resource_filename',
88    'resource_listdir', 'resource_exists', 'resource_isdir',
89
90    # Environmental control
91    'declare_namespace', 'working_set', 'add_activation_listener',
92    'find_distributions', 'set_extraction_path', 'cleanup_resources',
93    'get_default_cache',
94
95    # Primary implementation classes
96    'Environment', 'WorkingSet', 'ResourceManager',
97    'Distribution', 'Requirement', 'EntryPoint',
98
99    # Exceptions
100    'ResolutionError','VersionConflict','DistributionNotFound','UnknownExtra',
101    'ExtractionError',
102
103    # Parsing functions and string utilities
104    'parse_requirements', 'parse_version', 'safe_name', 'safe_version',
105    'get_platform', 'compatible_platforms', 'yield_lines', 'split_sections',
106    'safe_extra', 'to_filename',
107
108    # filesystem utilities
109    'ensure_directory', 'normalize_path',
110
111    # Distribution "precedence" constants
112    'EGG_DIST', 'BINARY_DIST', 'SOURCE_DIST', 'CHECKOUT_DIST', 'DEVELOP_DIST',
113
114    # "Provider" interfaces, implementations, and registration/lookup APIs
115    'IMetadataProvider', 'IResourceProvider', 'FileMetadata',
116    'PathMetadata', 'EggMetadata', 'EmptyProvider', 'empty_provider',
117    'NullProvider', 'EggProvider', 'DefaultProvider', 'ZipProvider',
118    'register_finder', 'register_namespace_handler', 'register_loader_type',
119    'fixup_namespace_packages', 'get_importer',
120
121    # Deprecated/backward compatibility only
122    'run_main', 'AvailableDistributions',
123]
124class ResolutionError(Exception):
125    """Abstract base for dependency resolution errors"""
126    def __repr__(self): return self.__class__.__name__+repr(self.args)
127
128class VersionConflict(ResolutionError):
129    """An already-installed version conflicts with the requested version"""
130
131class DistributionNotFound(ResolutionError):
132    """A requested distribution was not found"""
133
134class UnknownExtra(ResolutionError):
135    """Distribution doesn't have an "extra feature" of the given name"""
136_provider_factories = {}
137PY_MAJOR = sys.version[:3]
138EGG_DIST    = 3
139BINARY_DIST = 2
140SOURCE_DIST = 1
141CHECKOUT_DIST = 0
142DEVELOP_DIST = -1
143
144def register_loader_type(loader_type, provider_factory):
145    """Register `provider_factory` to make providers for `loader_type`
146
147    `loader_type` is the type or class of a PEP 302 ``module.__loader__``,
148    and `provider_factory` is a function that, passed a *module* object,
149    returns an ``IResourceProvider`` for that module.
150    """
151    _provider_factories[loader_type] = provider_factory
152
153def get_provider(moduleOrReq):
154    """Return an IResourceProvider for the named module or requirement"""
155    if isinstance(moduleOrReq,Requirement):
156        return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0]
157    try:
158        module = sys.modules[moduleOrReq]
159    except KeyError:
160        __import__(moduleOrReq)
161        module = sys.modules[moduleOrReq]
162    loader = getattr(module, '__loader__', None)
163    return _find_adapter(_provider_factories, loader)(module)
164
165def _macosx_vers(_cache=[]):
166    if not _cache:
167        info = os.popen('/usr/bin/sw_vers').read().splitlines()
168        for line in info:
169            key, value = line.split(None, 1)
170            if key == 'ProductVersion:':
171                _cache.append(value.strip().split("."))
172                break
173        else:
174            raise ValueError, "What?!"
175    return _cache[0]
176
177def _macosx_arch(machine):
178    return {'PowerPC':'ppc', 'Power_Macintosh':'ppc'}.get(machine,machine)
179
180def get_build_platform():
181    """Return this platform's string for platform-specific distributions
182
183    XXX Currently this is the same as ``distutils.util.get_platform()``, but it
184    needs some hacks for Linux and Mac OS X.
185    """
186    from distutils.util import get_platform
187    plat = get_platform()
188    if sys.platform == "darwin" and not plat.startswith('macosx-'):
189        try:
190            version = _macosx_vers()
191            machine = os.uname()[4].replace(" ", "_")
192            return "macosx-%d.%d-%s" % (int(version[0]), int(version[1]),
193                _macosx_arch(machine))
194        except ValueError:
195            # if someone is running a non-Mac darwin system, this will fall
196            # through to the default implementation
197            pass
198    return plat
199
200macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)")
201darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)")
202get_platform = get_build_platform   # XXX backward compat
203
204
205
206def compatible_platforms(provided,required):
207    """Can code for the `provided` platform run on the `required` platform?
208
209    Returns true if either platform is ``None``, or the platforms are equal.
210
211    XXX Needs compatibility checks for Linux and other unixy OSes.
212    """
213    if provided is None or required is None or provided==required:
214        return True     # easy case
215
216    # Mac OS X special cases
217    reqMac = macosVersionString.match(required)
218    if reqMac:
219        provMac = macosVersionString.match(provided)
220
221        # is this a Mac package?
222        if not provMac:
223            # this is backwards compatibility for packages built before
224            # setuptools 0.6. All packages built after this point will
225            # use the new macosx designation.
226            provDarwin = darwinVersionString.match(provided)
227            if provDarwin:
228                dversion = int(provDarwin.group(1))
229                macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2))
230                if dversion == 7 and macosversion >= "10.3" or \
231                    dversion == 8 and macosversion >= "10.4":
232
233                    #import warnings
234                    #warnings.warn("Mac eggs should be rebuilt to "
235                    #    "use the macosx designation instead of darwin.",
236                    #    category=DeprecationWarning)
237                    return True
238            return False    # egg isn't macosx or legacy darwin
239
240        # are they the same major version and machine type?
241        if provMac.group(1) != reqMac.group(1) or \
242            provMac.group(3) != reqMac.group(3):
243            return False
244
245
246
247        # is the required OS major update >= the provided one?
248        if int(provMac.group(2)) > int(reqMac.group(2)):
249            return False
250
251        return True
252
253    # XXX Linux and other platforms' special cases should go here
254    return False
255
256
257def run_script(dist_spec, script_name):
258    """Locate distribution `dist_spec` and run its `script_name` script"""
259    ns = sys._getframe(1).f_globals
260    name = ns['__name__']
261    ns.clear()
262    ns['__name__'] = name
263    require(dist_spec)[0].run_script(script_name, ns)
264
265run_main = run_script   # backward compatibility
266
267def get_distribution(dist):
268    """Return a current distribution object for a Requirement or string"""
269    if isinstance(dist,basestring): dist = Requirement.parse(dist)
270    if isinstance(dist,Requirement): dist = get_provider(dist)
271    if not isinstance(dist,Distribution):
272        raise TypeError("Expected string, Requirement, or Distribution", dist)
273    return dist
274
275def load_entry_point(dist, group, name):
276    """Return `name` entry point of `group` for `dist` or raise ImportError"""
277    return get_distribution(dist).load_entry_point(group, name)
278
279def get_entry_map(dist, group=None):
280    """Return the entry point map for `group`, or the full entry map"""
281    return get_distribution(dist).get_entry_map(group)
282
283def get_entry_info(dist, group, name):
284    """Return the EntryPoint object for `group`+`name`, or ``None``"""
285    return get_distribution(dist).get_entry_info(group, name)
286
287
288class IMetadataProvider:
289
290    def has_metadata(name):
291        """Does the package's distribution contain the named metadata?"""
292
293    def get_metadata(name):
294        """The named metadata resource as a string"""
295
296    def get_metadata_lines(name):
297        """Yield named metadata resource as list of non-blank non-comment lines
298
299       Leading and trailing whitespace is stripped from each line, and lines
300       with ``#`` as the first non-blank character are omitted."""
301
302    def metadata_isdir(name):
303        """Is the named metadata a directory?  (like ``os.path.isdir()``)"""
304
305    def metadata_listdir(name):
306        """List of metadata names in the directory (like ``os.listdir()``)"""
307
308    def run_script(script_name, namespace):
309        """Execute the named script in the supplied namespace dictionary"""
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329class IResourceProvider(IMetadataProvider):
330    """An object that provides access to package resources"""
331
332    def get_resource_filename(manager, resource_name):
333        """Return a true filesystem path for `resource_name`
334
335        `manager` must be an ``IResourceManager``"""
336
337    def get_resource_stream(manager, resource_name):
338        """Return a readable file-like object for `resource_name`
339
340        `manager` must be an ``IResourceManager``"""
341
342    def get_resource_string(manager, resource_name):
343        """Return a string containing the contents of `resource_name`
344
345        `manager` must be an ``IResourceManager``"""
346
347    def has_resource(resource_name):
348        """Does the package contain the named resource?"""
349
350    def resource_isdir(resource_name):
351        """Is the named resource a directory?  (like ``os.path.isdir()``)"""
352
353    def resource_listdir(resource_name):
354        """List of resource names in the directory (like ``os.listdir()``)"""
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370class WorkingSet(object):
371    """A collection of active distributions on sys.path (or a similar list)"""
372
373    def __init__(self, entries=None):
374        """Create working set from list of path entries (default=sys.path)"""
375        self.entries = []
376        self.entry_keys = {}
377        self.by_key = {}
378        self.callbacks = []
379
380        if entries is None:
381            entries = sys.path
382
383        for entry in entries:
384            self.add_entry(entry)
385
386
387    def add_entry(self, entry):
388        """Add a path item to ``.entries``, finding any distributions on it
389
390        ``find_distributions(entry,False)`` is used to find distributions
391        corresponding to the path entry, and they are added.  `entry` is
392        always appended to ``.entries``, even if it is already present.
393        (This is because ``sys.path`` can contain the same value more than
394        once, and the ``.entries`` of the ``sys.path`` WorkingSet should always
395        equal ``sys.path``.)
396        """
397        self.entry_keys.setdefault(entry, [])
398        self.entries.append(entry)
399        for dist in find_distributions(entry, True):
400            self.add(dist, entry, False)
401
402
403    def __contains__(self,dist):
404        """True if `dist` is the active distribution for its project"""
405        return self.by_key.get(dist.key) == dist
406
407
408
409
410
411    def find(self, req):
412        """Find a distribution matching requirement `req`
413
414        If there is an active distribution for the requested project, this
415        returns it as long as it meets the version requirement specified by
416        `req`.  But, if there is an active distribution for the project and it
417        does *not* meet the `req` requirement, ``VersionConflict`` is raised.
418        If there is no active distribution for the requested project, ``None``
419        is returned.
420        """
421        dist = self.by_key.get(req.key)
422        if dist is not None and dist not in req:
423            raise VersionConflict(dist,req)     # XXX add more info
424        else:
425            return dist
426
427    def iter_entry_points(self, group, name=None):
428        """Yield entry point objects from `group` matching `name`
429
430        If `name` is None, yields all entry points in `group` from all
431        distributions in the working set, otherwise only ones matching
432        both `group` and `name` are yielded (in distribution order).
433        """
434        for dist in self:
435            entries = dist.get_entry_map(group)
436            if name is None:
437                for ep in entries.values():
438                    yield ep
439            elif name in entries:
440                yield entries[name]
441
442    def run_script(self, requires, script_name):
443        """Locate distribution for `requires` and run `script_name` script"""
444        ns = sys._getframe(1).f_globals
445        name = ns['__name__']
446        ns.clear()
447        ns['__name__'] = name
448        self.require(requires)[0].run_script(script_name, ns)
449
450
451
452    def __iter__(self):
453        """Yield distributions for non-duplicate projects in the working set
454
455        The yield order is the order in which the items' path entries were
456        added to the working set.
457        """
458        seen = {}
459        for item in self.entries:
460            for key in self.entry_keys[item]:
461                if key not in seen:
462                    seen[key]=1
463                    yield self.by_key[key]
464
465    def add(self, dist, entry=None, insert=True):
466        """Add `dist` to working set, associated with `entry`
467
468        If `entry` is unspecified, it defaults to the ``.location`` of `dist`.
469        On exit from this routine, `entry` is added to the end of the working
470        set's ``.entries`` (if it wasn't already present).
471
472        `dist` is only added to the working set if it's for a project that
473        doesn't already have a distribution in the set.  If it's added, any
474        callbacks registered with the ``subscribe()`` method will be called.
475        """
476        if insert:
477            dist.insert_on(self.entries, entry)
478
479        if entry is None:
480            entry = dist.location
481        keys = self.entry_keys.setdefault(entry,[])
482        keys2 = self.entry_keys.setdefault(dist.location,[])
483        if dist.key in self.by_key:
484            return      # ignore hidden distros
485
486        self.by_key[dist.key] = dist
487        if dist.key not in keys:
488            keys.append(dist.key)
489        if dist.key not in keys2:
490            keys2.append(dist.key)
491        self._added_new(dist)
492
493    def resolve(self, requirements, env=None, installer=None):
494        """List all distributions needed to (recursively) meet `requirements`
495
496        `requirements` must be a sequence of ``Requirement`` objects.  `env`,
497        if supplied, should be an ``Environment`` instance.  If
498        not supplied, it defaults to all distributions available within any
499        entry or distribution in the working set.  `installer`, if supplied,
500        will be invoked with each requirement that cannot be met by an
501        already-installed distribution; it should return a ``Distribution`` or
502        ``None``.
503        """
504
505        requirements = list(requirements)[::-1]  # set up the stack
506        processed = {}  # set of processed requirements
507        best = {}  # key -> dist
508        to_activate = []
509
510        while requirements:
511            req = requirements.pop(0)   # process dependencies breadth-first
512            if req in processed:
513                # Ignore cyclic or redundant dependencies
514                continue
515            dist = best.get(req.key)
516            if dist is None:
517                # Find the best distribution and add it to the map
518                dist = self.by_key.get(req.key)
519                if dist is None:
520                    if env is None:
521                        env = Environment(self.entries)
522                    dist = best[req.key] = env.best_match(req, self, installer)
523                    if dist is None:
524                        raise DistributionNotFound(req)  # XXX put more info here
525                to_activate.append(dist)
526            if dist not in req:
527                # Oops, the "best" so far conflicts with a dependency
528                raise VersionConflict(dist,req) # XXX put more info here
529            requirements.extend(dist.requires(req.extras)[::-1])
530            processed[req] = True
531
532        return to_activate    # return list of distros to activate
533
534    def find_plugins(self,
535        plugin_env, full_env=None, installer=None, fallback=True
536    ):
537        """Find all activatable distributions in `plugin_env`
538
539        Example usage::
540
541            distributions, errors = working_set.find_plugins(
542                Environment(plugin_dirlist)
543            )
544            map(working_set.add, distributions)  # add plugins+libs to sys.path
545            print "Couldn't load", errors        # display errors
546
547        The `plugin_env` should be an ``Environment`` instance that contains
548        only distributions that are in the project's "plugin directory" or
549        directories. The `full_env`, if supplied, should be an ``Environment``
550        contains all currently-available distributions.  If `full_env` is not
551        supplied, one is created automatically from the ``WorkingSet`` this
552        method is called on, which will typically mean that every directory on
553        ``sys.path`` will be scanned for distributions.
554
555        `installer` is a standard installer callback as used by the
556        ``resolve()`` method. The `fallback` flag indicates whether we should
557        attempt to resolve older versions of a plugin if the newest version
558        cannot be resolved.
559
560        This method returns a 2-tuple: (`distributions`, `error_info`), where
561        `distributions` is a list of the distributions found in `plugin_env`
562        that were loadable, along with any other distributions that are needed
563        to resolve their dependencies.  `error_info` is a dictionary mapping
564        unloadable plugin distributions to an exception instance describing the
565        error that occurred. Usually this will be a ``DistributionNotFound`` or
566        ``VersionConflict`` instance.
567        """
568
569        plugin_projects = list(plugin_env)
570        plugin_projects.sort()  # scan project names in alphabetic order
571
572        error_info = {}
573        distributions = {}
574
575        if full_env is None:
576            env = Environment(self.entries)
577            env += plugin_env
578        else:
579            env = full_env + plugin_env
580
581        shadow_set = self.__class__([])
582        map(shadow_set.add, self)   # put all our entries in shadow_set
583
584        for project_name in plugin_projects:
585
586            for dist in plugin_env[project_name]:
587
588                req = [dist.as_requirement()]
589
590                try:
591                    resolvees = shadow_set.resolve(req, env, installer)
592
593                except ResolutionError,v:
594                    error_info[dist] = v    # save error info
595                    if fallback:
596                        continue    # try the next older version of project
597                    else:
598                        break       # give up on this project, keep going
599
600                else:
601                    map(shadow_set.add, resolvees)
602                    distributions.update(dict.fromkeys(resolvees))
603
604                    # success, no need to try any more versions of this project
605                    break
606
607        distributions = list(distributions)
608        distributions.sort()
609
610        return distributions, error_info
611
612
613
614
615
616    def require(self, *requirements):
617        """Ensure that distributions matching `requirements` are activated
618
619        `requirements` must be a string or a (possibly-nested) sequence
620        thereof, specifying the distributions and versions required.  The
621        return value is a sequence of the distributions that needed to be
622        activated to fulfill the requirements; all relevant distributions are
623        included, even if they were already activated in this working set.
624        """
625
626        needed = self.resolve(parse_requirements(requirements))
627
628        for dist in needed:
629            self.add(dist)
630
631        return needed
632
633
634    def subscribe(self, callback):
635        """Invoke `callback` for all distributions (including existing ones)"""
636        if callback in self.callbacks:
637            return
638        self.callbacks.append(callback)
639        for dist in self:
640            callback(dist)
641
642
643    def _added_new(self, dist):
644        for callback in self.callbacks:
645            callback(dist)
646
647
648
649
650
651
652
653
654
655
656
657class Environment(object):
658    """Searchable snapshot of distributions on a search path"""
659
660    def __init__(self, search_path=None, platform=get_supported_platform(), python=PY_MAJOR):
661        """Snapshot distributions available on a search path
662
663        Any distributions found on `search_path` are added to the environment.
664        `search_path` should be a sequence of ``sys.path`` items.  If not
665        supplied, ``sys.path`` is used.
666
667        `platform` is an optional string specifying the name of the platform
668        that platform-specific distributions must be compatible with.  If
669        unspecified, it defaults to the current platform.  `python` is an
670        optional string naming the desired version of Python (e.g. ``'2.4'``);
671        it defaults to the current version.
672
673        You may explicitly set `platform` (and/or `python`) to ``None`` if you
674        wish to map *all* distributions, not just those compatible with the
675        running platform or Python version.
676        """
677        self._distmap = {}
678        self._cache = {}
679        self.platform = platform
680        self.python = python
681        self.scan(search_path)
682
683    def can_add(self, dist):
684        """Is distribution `dist` acceptable for this environment?
685
686        The distribution must match the platform and python version
687        requirements specified when this environment was created, or False
688        is returned.
689        """
690        return (self.python is None or dist.py_version is None
691            or dist.py_version==self.python) \
692           and compatible_platforms(dist.platform,self.platform)
693
694    def remove(self, dist):
695        """Remove `dist` from the environment"""
696        self._distmap[dist.key].remove(dist)
697
698    def scan(self, search_path=None):
699        """Scan `search_path` for distributions usable in this environment
700
701        Any distributions found are added to the environment.
702        `search_path` should be a sequence of ``sys.path`` items.  If not
703        supplied, ``sys.path`` is used.  Only distributions conforming to
704        the platform/python version defined at initialization are added.
705        """
706        if search_path is None:
707            search_path = sys.path
708
709        for item in search_path:
710            for dist in find_distributions(item):
711                self.add(dist)
712
713    def __getitem__(self,project_name):
714        """Return a newest-to-oldest list of distributions for `project_name`
715        """
716        try:
717            return self._cache[project_name]
718        except KeyError:
719            project_name = project_name.lower()
720            if project_name not in self._distmap:
721                return []
722
723        if project_name not in self._cache:
724            dists = self._cache[project_name] = self._distmap[project_name]
725            _sort_dists(dists)
726
727        return self._cache[project_name]
728
729    def add(self,dist):
730        """Add `dist` if we ``can_add()`` it and it isn't already added"""
731        if self.can_add(dist) and dist.has_version():
732            dists = self._distmap.setdefault(dist.key,[])
733            if dist not in dists:
734                dists.append(dist)
735                if dist.key in self._cache:
736                    _sort_dists(self._cache[dist.key])
737
738
739    def best_match(self, req, working_set, installer=None):
740        """Find distribution best matching `req` and usable on `working_set`
741
742        This calls the ``find(req)`` method of the `working_set` to see if a
743        suitable distribution is already active.  (This may raise
744        ``VersionConflict`` if an unsuitable version of the project is already
745        active in the specified `working_set`.)  If a suitable distribution
746        isn't active, this method returns the newest distribution in the
747        environment that meets the ``Requirement`` in `req`.  If no suitable
748        distribution is found, and `installer` is supplied, then the result of
749        calling the environment's ``obtain(req, installer)`` method will be
750        returned.
751        """
752        dist = working_set.find(req)
753        if dist is not None:
754            return dist
755        for dist in self[req.key]:
756            if dist in req:
757                return dist
758        return self.obtain(req, installer) # try and download/install
759
760    def obtain(self, requirement, installer=None):
761        """Obtain a distribution matching `requirement` (e.g. via download)
762
763        Obtain a distro that matches requirement (e.g. via download).  In the
764        base ``Environment`` class, this routine just returns
765        ``installer(requirement)``, unless `installer` is None, in which case
766        None is returned instead.  This method is a hook that allows subclasses
767        to attempt other ways of obtaining a distribution before falling back
768        to the `installer` argument."""
769        if installer is not None:
770            return installer(requirement)
771
772    def __iter__(self):
773        """Yield the unique project names of the available distributions"""
774        for key in self._distmap.keys():
775            if self[key]: yield key
776
777
778
779
780    def __iadd__(self, other):
781        """In-place addition of a distribution or environment"""
782        if isinstance(other,Distribution):
783            self.add(other)
784        elif isinstance(other,Environment):
785            for project in other:
786                for dist in other[project]:
787                    self.add(dist)
788        else:
789            raise TypeError("Can't add %r to environment" % (other,))
790        return self
791
792    def __add__(self, other):
793        """Add an environment or distribution to an environment"""
794        new = self.__class__([], platform=None, python=None)
795        for env in self, other:
796            new += env
797        return new
798
799
800AvailableDistributions = Environment    # XXX backward compatibility
801
802
803class ExtractionError(RuntimeError):
804    """An error occurred extracting a resource
805
806    The following attributes are available from instances of this exception:
807
808    manager
809        The resource manager that raised this exception
810
811    cache_path
812        The base directory for resource extraction
813
814    original_error
815        The exception instance that caused extraction to fail
816    """
817
818
819
820
821class ResourceManager:
822    """Manage resource extraction and packages"""
823    extraction_path = None
824
825    def __init__(self):
826        self.cached_files = {}
827
828    def resource_exists(self, package_or_requirement, resource_name):
829        """Does the named resource exist?"""
830        return get_provider(package_or_requirement).has_resource(resource_name)
831
832    def resource_isdir(self, package_or_requirement, resource_name):
833        """Is the named resource an existing directory?"""
834        return get_provider(package_or_requirement).resource_isdir(
835            resource_name
836        )
837
838    def resource_filename(self, package_or_requirement, resource_name):
839        """Return a true filesystem path for specified resource"""
840        return get_provider(package_or_requirement).get_resource_filename(
841            self, resource_name
842        )
843
844    def resource_stream(self, package_or_requirement, resource_name):
845        """Return a readable file-like object for specified resource"""
846        return get_provider(package_or_requirement).get_resource_stream(
847            self, resource_name
848        )
849
850    def resource_string(self, package_or_requirement, resource_name):
851        """Return specified resource as a string"""
852        return get_provider(package_or_requirement).get_resource_string(
853            self, resource_name
854        )
855
856    def resource_listdir(self, package_or_requirement, resource_name):
857        """List the contents of the named resource directory"""
858        return get_provider(package_or_requirement).resource_listdir(
859            resource_name
860        )
861
862    def extraction_error(self):
863        """Give an error message for problems extracting file(s)"""
864
865        old_exc = sys.exc_info()[1]
866        cache_path = self.extraction_path or get_default_cache()
867
868        err = ExtractionError("""Can't extract file(s) to egg cache
869
870The following error occurred while trying to extract file(s) to the Python egg
871cache:
872
873  %s
874
875The Python egg cache directory is currently set to:
876
877  %s
878
879Perhaps your account does not have write access to this directory?  You can
880change the cache directory by setting the PYTHON_EGG_CACHE environment
881variable to point to an accessible directory.
882"""         % (old_exc, cache_path)
883        )
884        err.manager        = self
885        err.cache_path     = cache_path
886        err.original_error = old_exc
887        raise err
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903    def get_cache_path(self, archive_name, names=()):
904        """Return absolute location in cache for `archive_name` and `names`
905
906        The parent directory of the resulting path will be created if it does
907        not already exist.  `archive_name` should be the base filename of the
908        enclosing egg (which may not be the name of the enclosing zipfile!),
909        including its ".egg" extension.  `names`, if provided, should be a
910        sequence of path name parts "under" the egg's extraction location.
911
912        This method should only be called by resource providers that need to
913        obtain an extraction location, and only for names they intend to
914        extract, as it tracks the generated names for possible cleanup later.
915        """
916        extract_path = self.extraction_path or get_default_cache()
917        target_path = os.path.join(extract_path, archive_name+'-tmp', *names)
918        try:
919            ensure_directory(target_path)
920        except:
921            self.extraction_error()
922
923        self.cached_files[target_path] = 1
924        return target_path
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944    def postprocess(self, tempname, filename):
945        """Perform any platform-specific postprocessing of `tempname`
946
947        This is where Mac header rewrites should be done; other platforms don't
948        have anything special they should do.
949
950        Resource providers should call this method ONLY after successfully
951        extracting a compressed resource.  They must NOT call it on resources
952        that are already in the filesystem.
953
954        `tempname` is the current (temporary) name of the file, and `filename`
955        is the name it will be renamed to by the caller after this routine
956        returns.
957        """
958
959        if os.name == 'posix':
960            # Make the resource executable
961            mode = ((os.stat(tempname).st_mode) | 0555) & 07777
962            os.chmod(tempname, mode)
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985    def set_extraction_path(self, path):
986        """Set the base path where resources will be extracted to, if needed.
987
988        If you do not call this routine before any extractions take place, the
989        path defaults to the return value of ``get_default_cache()``.  (Which
990        is based on the ``PYTHON_EGG_CACHE`` environment variable, with various
991        platform-specific fallbacks.  See that routine's documentation for more
992        details.)
993
994        Resources are extracted to subdirectories of this path based upon
995        information given by the ``IResourceProvider``.  You may set this to a
996        temporary directory, but then you must call ``cleanup_resources()`` to
997        delete the extracted files when done.  There is no guarantee that
998        ``cleanup_resources()`` will be able to remove all extracted files.
999
1000        (Note: you may not change the extraction path for a given resource
1001        manager once resources have been extracted, unless you first call
1002        ``cleanup_resources()``.)
1003        """
1004        if self.cached_files:
1005            raise ValueError(
1006                "Can't change extraction path, files already extracted"
1007            )
1008
1009        self.extraction_path = path
1010
1011    def cleanup_resources(self, force=False):
1012        """
1013        Delete all extracted resource files and directories, returning a list
1014        of the file and directory names that could not be successfully removed.
1015        This function does not have any concurrency protection, so it should
1016        generally only be called when the extraction path is a temporary
1017        directory exclusive to a single process.  This method is not
1018        automatically called; you must call it explicitly or register it as an
1019        ``atexit`` function if you wish to ensure cleanup of a temporary
1020        directory used for extractions.
1021        """
1022        # XXX
1023
1024
1025
1026def get_default_cache():
1027    """Determine the default cache location
1028
1029    This returns the ``PYTHON_EGG_CACHE`` environment variable, if set.
1030    Otherwise, on Windows, it returns a "Python-Eggs" subdirectory of the
1031    "Application Data" directory.  On all other systems, it's "~/.python-eggs".
1032    """
1033    try:
1034        return os.environ['PYTHON_EGG_CACHE']
1035    except KeyError:
1036        pass
1037
1038    if os.name!='nt':
1039        return os.path.expanduser('~/.python-eggs')
1040
1041    app_data = 'Application Data'   # XXX this may be locale-specific!
1042    app_homes = [
1043        (('APPDATA',), None),       # best option, should be locale-safe
1044        (('USERPROFILE',), app_data),
1045        (('HOMEDRIVE','HOMEPATH'), app_data),
1046        (('HOMEPATH',), app_data),
1047        (('HOME',), None),
1048        (('WINDIR',), app_data),    # 95/98/ME
1049    ]
1050
1051    for keys, subdir in app_homes:
1052        dirname = ''
1053        for key in keys:
1054            if key in os.environ:
1055                dirname = os.path.join(dirname, os.environ[key])
1056            else:
1057                break
1058        else:
1059            if subdir:
1060                dirname = os.path.join(dirname,subdir)
1061            return os.path.join(dirname, 'Python-Eggs')
1062    else:
1063        raise RuntimeError(
1064            "Please set the PYTHON_EGG_CACHE enviroment variable"
1065        )
1066
1067def safe_name(name):
1068    """Convert an arbitrary string to a standard distribution name
1069
1070    Any runs of non-alphanumeric/. characters are replaced with a single '-'.
1071    """
1072    return re.sub('[^A-Za-z0-9.]+', '-', name)
1073
1074
1075def safe_version(version):
1076    """Convert an arbitrary string to a standard version string
1077
1078    Spaces become dots, and all other non-alphanumeric characters become
1079    dashes, with runs of multiple dashes condensed to a single dash.
1080    """
1081    version = version.replace(' ','.')
1082    return re.sub('[^A-Za-z0-9.]+', '-', version)
1083
1084
1085def safe_extra(extra):
1086    """Convert an arbitrary string to a standard 'extra' name
1087
1088    Any runs of non-alphanumeric characters are replaced with a single '_',
1089    and the result is always lowercased.
1090    """
1091    return re.sub('[^A-Za-z0-9.]+', '_', extra).lower()
1092
1093
1094def to_filename(name):
1095    """Convert a project or version name to its filename-escaped form
1096
1097    Any '-' characters are currently replaced with '_'.
1098    """
1099    return name.replace('-','_')
1100
1101
1102
1103
1104
1105
1106
1107
1108class NullProvider:
1109    """Try to implement resources and metadata for arbitrary PEP 302 loaders"""
1110
1111    egg_name = None
1112    egg_info = None
1113    loader = None
1114
1115    def __init__(self, module):
1116        self.loader = getattr(module, '__loader__', None)
1117        self.module_path = os.path.dirname(getattr(module, '__file__', ''))
1118
1119    def get_resource_filename(self, manager, resource_name):
1120        return self._fn(self.module_path, resource_name)
1121
1122    def get_resource_stream(self, manager, resource_name):
1123        return StringIO(self.get_resource_string(manager, resource_name))
1124
1125    def get_resource_string(self, manager, resource_name):
1126        return self._get(self._fn(self.module_path, resource_name))
1127
1128    def has_resource(self, resource_name):
1129        return self._has(self._fn(self.module_path, resource_name))
1130
1131    def has_metadata(self, name):
1132        return self.egg_info and self._has(self._fn(self.egg_info,name))
1133
1134    def get_metadata(self, name):
1135        if not self.egg_info:
1136            return ""
1137        return self._get(self._fn(self.egg_info,name))
1138
1139    def get_metadata_lines(self, name):
1140        return yield_lines(self.get_metadata(name))
1141
1142    def resource_isdir(self,resource_name):
1143        return self._isdir(self._fn(self.module_path, resource_name))
1144
1145    def metadata_isdir(self,name):
1146        return self.egg_info and self._isdir(self._fn(self.egg_info,name))
1147
1148
1149    def resource_listdir(self,resource_name):
1150        return self._listdir(self._fn(self.module_path,resource_name))
1151
1152    def metadata_listdir(self,name):
1153        if self.egg_info:
1154            return self._listdir(self._fn(self.egg_info,name))
1155        return []
1156
1157    def run_script(self,script_name,namespace):
1158        script = 'scripts/'+script_name
1159        if not self.has_metadata(script):
1160            raise ResolutionError("No script named %r" % script_name)
1161        script_text = self.get_metadata(script).replace('\r\n','\n')
1162        script_text = script_text.replace('\r','\n')
1163        script_filename = self._fn(self.egg_info,script)
1164        namespace['__file__'] = script_filename
1165        if os.path.exists(script_filename):
1166            execfile(script_filename, namespace, namespace)
1167        else:
1168            from linecache import cache
1169            cache[script_filename] = (
1170                len(script_text), 0, script_text.split('\n'), script_filename
1171            )
1172            script_code = compile(script_text,script_filename,'exec')
1173            exec script_code in namespace, namespace
1174
1175    def _has(self, path):
1176        raise NotImplementedError(
1177            "Can't perform this operation for unregistered loader type"
1178        )
1179
1180    def _isdir(self, path):
1181        raise NotImplementedError(
1182            "Can't perform this operation for unregistered loader type"
1183        )
1184
1185    def _listdir(self, path):
1186        raise NotImplementedError(
1187            "Can't perform this operation for unregistered loader type"
1188        )
1189
1190    def _fn(self, base, resource_name):
1191        return os.path.join(base, *resource_name.split('/'))
1192
1193    def _get(self, path):
1194        if hasattr(self.loader, 'get_data'):
1195            return self.loader.get_data(path)
1196        raise NotImplementedError(
1197            "Can't perform this operation for loaders without 'get_data()'"
1198        )
1199
1200register_loader_type(object, NullProvider)
1201
1202
1203class EggProvider(NullProvider):
1204    """Provider based on a virtual filesystem"""
1205
1206    def __init__(self,module):
1207        NullProvider.__init__(self,module)
1208        self._setup_prefix()
1209
1210    def _setup_prefix(self):
1211        # we assume here that our metadata may be nested inside a "basket"
1212        # of multiple eggs; that's why we use module_path instead of .archive
1213        path = self.module_path
1214        old = None
1215        while path!=old:
1216            if path.lower().endswith('.egg'):
1217                self.egg_name = os.path.basename(path)
1218                self.egg_info = os.path.join(path, 'EGG-INFO')
1219                self.egg_root = path
1220                break
1221            old = path
1222            path, base = os.path.split(path)
1223
1224
1225
1226
1227
1228
1229
1230
1231class DefaultProvider(EggProvider):
1232    """Provides access to package resources in the filesystem"""
1233
1234    def _has(self, path):
1235        return os.path.exists(path)
1236
1237    def _isdir(self,path):
1238        return os.path.isdir(path)
1239
1240    def _listdir(self,path):
1241        return os.listdir(path)
1242
1243    def get_resource_stream(self, manager, resource_name):
1244        return open(self._fn(self.module_path, resource_name), 'rb')
1245
1246    def _get(self, path):
1247        stream = open(path, 'rb')
1248        try:
1249            return stream.read()
1250        finally:
1251            stream.close()
1252
1253register_loader_type(type(None), DefaultProvider)
1254
1255
1256class EmptyProvider(NullProvider):
1257    """Provider that returns nothing for all requests"""
1258
1259    _isdir = _has = lambda self,path: False
1260    _get          = lambda self,path: ''
1261    _listdir      = lambda self,path: []
1262    module_path   = None
1263
1264    def __init__(self):
1265        pass
1266
1267empty_provider = EmptyProvider()
1268
1269
1270
1271
1272class ZipProvider(EggProvider):
1273    """Resource support for zips and eggs"""
1274
1275    eagers = None
1276
1277    def __init__(self, module):
1278        EggProvider.__init__(self,module)
1279        self.zipinfo = zipimport._zip_directory_cache[self.loader.archive]
1280        self.zip_pre = self.loader.archive+os.sep
1281
1282    def _zipinfo_name(self, fspath):
1283        # Convert a virtual filename (full path to file) into a zipfile subpath
1284        # usable with the zipimport directory cache for our target archive
1285        if fspath.startswith(self.zip_pre):
1286            return fspath[len(self.zip_pre):]
1287        raise AssertionError(
1288            "%s is not a subpath of %s" % (fspath,self.zip_pre)
1289        )
1290
1291    def _parts(self,zip_path):
1292        # Convert a zipfile subpath into an egg-relative path part list
1293        fspath = self.zip_pre+zip_path  # pseudo-fs path
1294        if fspath.startswith(self.egg_root+os.sep):
1295            return fspath[len(self.egg_root)+1:].split(os.sep)
1296        raise AssertionError(
1297            "%s is not a subpath of %s" % (fspath,self.egg_root)
1298        )
1299
1300    def get_resource_filename(self, manager, resource_name):
1301        if not self.egg_name:
1302            raise NotImplementedError(
1303                "resource_filename() only supported for .egg, not .zip"
1304            )
1305        # no need to lock for extraction, since we use temp names
1306        zip_path = self._resource_to_zip(resource_name)
1307        eagers = self._get_eager_resources()
1308        if '/'.join(self._parts(zip_path)) in eagers:
1309            for name in eagers:
1310                self._extract_resource(manager, self._eager_to_zip(name))
1311        return self._extract_resource(manager, zip_path)
1312
1313    def _extract_resource(self, manager, zip_path):
1314
1315        if zip_path in self._index():
1316            for name in self._index()[zip_path]:
1317                last = self._extract_resource(
1318                    manager, os.path.join(zip_path, name)
1319                )
1320            return os.path.dirname(last)  # return the extracted directory name
1321
1322        zip_stat = self.zipinfo[zip_path]
1323        t,d,size = zip_stat[5], zip_stat[6], zip_stat[3]
1324        date_time = (
1325            (d>>9)+1980, (d>>5)&0xF, d&0x1F,                      # ymd
1326            (t&0xFFFF)>>11, (t>>5)&0x3F, (t&0x1F) * 2, 0, 0, -1   # hms, etc.
1327        )
1328        timestamp = time.mktime(date_time)
1329
1330        try:
1331            real_path = manager.get_cache_path(
1332                self.egg_name, self._parts(zip_path)
1333            )
1334
1335            if os.path.isfile(real_path):
1336                stat = os.stat(real_path)
1337                if stat.st_size==size and stat.st_mtime==timestamp:
1338                    # size and stamp match, don't bother extracting
1339                    return real_path
1340
1341            outf, tmpnam = _mkstemp(".$extract", dir=os.path.dirname(real_path))
1342            os.write(outf, self.loader.get_data(zip_path))
1343            os.close(outf)
1344            utime(tmpnam, (timestamp,timestamp))
1345            manager.postprocess(tmpnam, real_path)
1346
1347            try:
1348                rename(tmpnam, real_path)
1349
1350            except os.error:
1351                if os.path.isfile(real_path):
1352                    stat = os.stat(real_path)
1353
1354                    if stat.st_size==size and stat.st_mtime==timestamp:
1355                        # size and stamp match, somebody did it just ahead of
1356                        # us, so we're done
1357                        return real_path
1358                    elif os.name=='nt':     # Windows, del old file and retry
1359                        unlink(real_path)
1360                        rename(tmpnam, real_path)
1361                        return real_path
1362                raise
1363
1364        except os.error:
1365            manager.extraction_error()  # report a user-friendly error
1366
1367        return real_path
1368
1369    def _get_eager_resources(self):
1370        if self.eagers is None:
1371            eagers = []
1372            for name in ('native_libs.txt', 'eager_resources.txt'):
1373                if self.has_metadata(name):
1374                    eagers.extend(self.get_metadata_lines(name))
1375            self.eagers = eagers
1376        return self.eagers
1377
1378    def _index(self):
1379        try:
1380            return self._dirindex
1381        except AttributeError:
1382            ind = {}
1383            for path in self.zipinfo:
1384                parts = path.split(os.sep)
1385                while parts:
1386                    parent = os.sep.join(parts[:-1])
1387                    if parent in ind:
1388                        ind[parent].append(parts[-1])
1389                        break
1390                    else:
1391                        ind[parent] = [parts.pop()]
1392            self._dirindex = ind
1393            return ind
1394
1395    def _has(self, fspath):
1396        zip_path = self._zipinfo_name(fspath)
1397        return zip_path in self.zipinfo or zip_path in self._index()
1398
1399    def _isdir(self,fspath):
1400        return self._zipinfo_name(fspath) in self._index()
1401
1402    def _listdir(self,fspath):
1403        return list(self._index().get(self._zipinfo_name(fspath), ()))
1404
1405    def _eager_to_zip(self,resource_name):
1406        return self._zipinfo_name(self._fn(self.egg_root,resource_name))
1407
1408    def _resource_to_zip(self,resource_name):
1409        return self._zipinfo_name(self._fn(self.module_path,resource_name))
1410
1411register_loader_type(zipimport.zipimporter, ZipProvider)
1412
1413
1414
1415
1416
1417
1418
1419
1420
1421
1422
1423
1424
1425
1426
1427
1428
1429
1430
1431
1432
1433
1434
1435
1436class FileMetadata(EmptyProvider):
1437    """Metadata handler for standalone PKG-INFO files
1438
1439    Usage::
1440
1441        metadata = FileMetadata("/path/to/PKG-INFO")
1442
1443    This provider rejects all data and metadata requests except for PKG-INFO,
1444    which is treated as existing, and will be the contents of the file at
1445    the provided location.
1446    """
1447
1448    def __init__(self,path):
1449        self.path = path
1450
1451    def has_metadata(self,name):
1452        return name=='PKG-INFO'
1453
1454    def get_metadata(self,name):
1455        if name=='PKG-INFO':
1456            return open(self.path,'rU').read()
1457        raise KeyError("No metadata except PKG-INFO is available")
1458
1459    def get_metadata_lines(self,name):
1460        return yield_lines(self.get_metadata(name))
1461
1462
1463
1464
1465
1466
1467
1468
1469
1470
1471
1472
1473
1474
1475
1476
1477class PathMetadata(DefaultProvider):
1478    """Metadata provider for egg directories
1479
1480    Usage::
1481
1482        # Development eggs:
1483
1484        egg_info = "/path/to/PackageName.egg-info"
1485        base_dir = os.path.dirname(egg_info)
1486        metadata = PathMetadata(base_dir, egg_info)
1487        dist_name = os.path.splitext(os.path.basename(egg_info))[0]
1488        dist = Distribution(basedir,project_name=dist_name,metadata=metadata)
1489
1490        # Unpacked egg directories:
1491
1492        egg_path = "/path/to/PackageName-ver-pyver-etc.egg"
1493        metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO'))
1494        dist = Distribution.from_filename(egg_path, metadata=metadata)
1495    """
1496
1497    def __init__(self, path, egg_info):
1498        self.module_path = path
1499        self.egg_info = egg_info
1500
1501
1502class EggMetadata(ZipProvider):
1503    """Metadata provider for .egg files"""
1504
1505    def __init__(self, importer):
1506        """Create a metadata provider from a zipimporter"""
1507
1508        self.zipinfo = zipimport._zip_directory_cache[importer.archive]
1509        self.zip_pre = importer.archive+os.sep
1510        self.loader = importer
1511        if importer.prefix:
1512            self.module_path = os.path.join(importer.archive, importer.prefix)
1513        else:
1514            self.module_path = importer.archive
1515        self._setup_prefix()
1516
1517
1518class ImpWrapper:
1519    """PEP 302 Importer that wraps Python's "normal" import algorithm"""
1520
1521    def __init__(self, path=None):
1522        self.path = path
1523
1524    def find_module(self, fullname, path=None):
1525        subname = fullname.split(".")[-1]
1526        if subname != fullname and self.path is None:
1527            return None
1528        if self.path is None:
1529            path = None
1530        else:
1531            path = [self.path]
1532        try:
1533            file, filename, etc = imp.find_module(subname, path)
1534        except ImportError:
1535            return None
1536        return ImpLoader(file, filename, etc)
1537
1538
1539class ImpLoader:
1540    """PEP 302 Loader that wraps Python's "normal" import algorithm"""
1541
1542    def __init__(self, file, filename, etc):
1543        self.file = file
1544        self.filename = filename
1545        self.etc = etc
1546
1547    def load_module(self, fullname):
1548        try:
1549            mod = imp.load_module(fullname, self.file, self.filename, self.etc)
1550        finally:
1551            if self.file: self.file.close()
1552        # Note: we don't set __loader__ because we want the module to look
1553        # normal; i.e. this is just a wrapper for standard import machinery
1554        return mod
1555
1556
1557
1558
1559def get_importer(path_item):
1560    """Retrieve a PEP 302 "importer" for the given path item
1561
1562    If there is no importer, this returns a wrapper around the builtin import
1563    machinery.  The returned importer is only cached if it was created by a
1564    path hook.
1565    """
1566    try:
1567        importer = sys.path_importer_cache[path_item]
1568    except KeyError:
1569        for hook in sys.path_hooks:
1570            try:
1571                importer = hook(path_item)
1572            except ImportError:
1573                pass
1574            else:
1575                break
1576        else:
1577            importer = None
1578
1579    sys.path_importer_cache.setdefault(path_item,importer)
1580    if importer is None:
1581        try:
1582            importer = ImpWrapper(path_item)
1583        except ImportError:
1584            pass
1585    return importer
1586
1587try:
1588    from pkgutil import get_importer, ImpImporter
1589except ImportError:
1590    pass    # Python 2.3 or 2.4, use our own implementation
1591else:
1592    ImpWrapper = ImpImporter    # Python 2.5, use pkgutil's implementation
1593    del ImpLoader, ImpImporter
1594
1595
1596
1597
1598
1599
1600_distribution_finders = {}
1601
1602def register_finder(importer_type, distribution_finder):
1603    """Register `distribution_finder` to find distributions in sys.path items
1604
1605    `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
1606    handler), and `distribution_finder` is a callable that, passed a path
1607    item and the importer instance, yields ``Distribution`` instances found on
1608    that path item.  See ``pkg_resources.find_on_path`` for an example."""
1609    _distribution_finders[importer_type] = distribution_finder
1610
1611
1612def find_distributions(path_item, only=False):
1613    """Yield distributions accessible via `path_item`"""
1614    importer = get_importer(path_item)
1615    finder = _find_adapter(_distribution_finders, importer)
1616    return finder(importer, path_item, only)
1617
1618def find_in_zip(importer, path_item, only=False):
1619    metadata = EggMetadata(importer)
1620    if metadata.has_metadata('PKG-INFO'):
1621        yield Distribution.from_filename(path_item, metadata=metadata)
1622    if only:
1623        return  # don't yield nested distros
1624    for subitem in metadata.resource_listdir('/'):
1625        if subitem.endswith('.egg'):
1626            subpath = os.path.join(path_item, subitem)
1627            for dist in find_in_zip(zipimport.zipimporter(subpath), subpath):
1628                yield dist
1629
1630register_finder(zipimport.zipimporter, find_in_zip)
1631
1632def StringIO(*args, **kw):
1633    """Thunk to load the real StringIO on demand"""
1634    global StringIO
1635    try:
1636        from cStringIO import StringIO
1637    except ImportError:
1638        from StringIO import StringIO
1639    return StringIO(*args,**kw)
1640
1641def find_nothing(importer, path_item, only=False):
1642    return ()
1643register_finder(object,find_nothing)
1644
1645def find_on_path(importer, path_item, only=False):
1646    """Yield distributions accessible on a sys.path directory"""
1647    path_item = _normalize_cached(path_item)
1648
1649    if os.path.isdir(path_item):
1650        if path_item.lower().endswith('.egg'):
1651            # unpacked egg
1652            yield Distribution.from_filename(
1653                path_item, metadata=PathMetadata(
1654                    path_item, os.path.join(path_item,'EGG-INFO')
1655                )
1656            )
1657        else:
1658            # scan for .egg and .egg-info in directory
1659            for entry in os.listdir(path_item):
1660                lower = entry.lower()
1661                if lower.endswith('.egg-info'):
1662                    fullpath = os.path.join(path_item, entry)
1663                    if os.path.isdir(fullpath):
1664                        # egg-info directory, allow getting metadata
1665                        metadata = PathMetadata(path_item, fullpath)
1666                    else:
1667                        metadata = FileMetadata(fullpath)
1668                    yield Distribution.from_location(
1669                        path_item,entry,metadata,precedence=DEVELOP_DIST
1670                    )
1671                elif not only and lower.endswith('.egg'):
1672                    for dist in find_distributions(os.path.join(path_item, entry)):
1673                        yield dist
1674                elif not only and lower.endswith('.egg-link'):
1675                    for line in file(os.path.join(path_item, entry)):
1676                        if not line.strip(): continue
1677                        for item in find_distributions(os.path.join(path_item,line.rstrip())):
1678                            yield item
1679                        break
1680register_finder(ImpWrapper,find_on_path)
1681
1682_namespace_handlers = {}
1683_namespace_packages = {}
1684
1685def register_namespace_handler(importer_type, namespace_handler):
1686    """Register `namespace_handler` to declare namespace packages
1687
1688    `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
1689    handler), and `namespace_handler` is a callable like this::
1690
1691        def namespace_handler(importer,path_entry,moduleName,module):
1692            # return a path_entry to use for child packages
1693
1694    Namespace handlers are only called if the importer object has already
1695    agreed that it can handle the relevant path item, and they should only
1696    return a subpath if the module __path__ does not already contain an
1697    equivalent subpath.  For an example namespace handler, see
1698    ``pkg_resources.file_ns_handler``.
1699    """
1700    _namespace_handlers[importer_type] = namespace_handler
1701
1702def _handle_ns(packageName, path_item):
1703    """Ensure that named package includes a subpath of path_item (if needed)"""
1704    importer = get_importer(path_item)
1705    if importer is None:
1706        return None
1707    loader = importer.find_module(packageName)
1708    if loader is None:
1709        return None
1710    module = sys.modules.get(packageName)
1711    if module is None:
1712        module = sys.modules[packageName] = new.module(packageName)
1713        module.__path__ = []; _set_parent_ns(packageName)
1714    elif not hasattr(module,'__path__'):
1715        raise TypeError("Not a package:", packageName)
1716    handler = _find_adapter(_namespace_handlers, importer)
1717    subpath = handler(importer,path_item,packageName,module)
1718    if subpath is not None:
1719        path = module.__path__; path.append(subpath)
1720        loader.load_module(packageName); module.__path__ = path
1721    return subpath
1722
1723def declare_namespace(packageName):
1724    """Declare that package 'packageName' is a namespace package"""
1725
1726    imp.acquire_lock()
1727    try:
1728        if packageName in _namespace_packages:
1729            return
1730
1731        path, parent = sys.path, None
1732        if '.' in packageName:
1733            parent = '.'.join(packageName.split('.')[:-1])
1734            declare_namespace(parent)
1735            __import__(parent)
1736            try:
1737                path = sys.modules[parent].__path__
1738            except AttributeError:
1739                raise TypeError("Not a package:", parent)
1740
1741        # Track what packages are namespaces, so when new path items are added,
1742        # they can be updated
1743        _namespace_packages.setdefault(parent,[]).append(packageName)
1744        _namespace_packages.setdefault(packageName,[])
1745
1746        for path_item in path:
1747            # Ensure all the parent's path items are reflected in the child,
1748            # if they apply
1749            _handle_ns(packageName, path_item)
1750
1751    finally:
1752        imp.release_lock()
1753
1754def fixup_namespace_packages(path_item, parent=None):
1755    """Ensure that previously-declared namespace packages include path_item"""
1756    imp.acquire_lock()
1757    try:
1758        for package in _namespace_packages.get(parent,()):
1759            subpath = _handle_ns(package, path_item)
1760            if subpath: fixup_namespace_packages(subpath,package)
1761    finally:
1762        imp.release_lock()
1763
1764def file_ns_handler(importer, path_item, packageName, module):
1765    """Compute an ns-package subpath for a filesystem or zipfile importer"""
1766
1767    subpath = os.path.join(path_item, packageName.split('.')[-1])
1768    normalized = _normalize_cached(subpath)
1769    for item in module.__path__:
1770        if _normalize_cached(item)==normalized:
1771            break
1772    else:
1773        # Only return the path if it's not already there
1774        return subpath
1775
1776register_namespace_handler(ImpWrapper,file_ns_handler)
1777register_namespace_handler(zipimport.zipimporter,file_ns_handler)
1778
1779
1780def null_ns_handler(importer, path_item, packageName, module):
1781    return None
1782
1783register_namespace_handler(object,null_ns_handler)
1784
1785
1786def normalize_path(filename):
1787    """Normalize a file/dir name for comparison purposes"""
1788    return os.path.normcase(os.path.realpath(filename))
1789
1790def _normalize_cached(filename,_cache={}):
1791    try:
1792        return _cache[filename]
1793    except KeyError:
1794        _cache[filename] = result = normalize_path(filename)
1795        return result
1796
1797def _set_parent_ns(packageName):
1798    parts = packageName.split('.')
1799    name = parts.pop()
1800    if parts:
1801        parent = '.'.join(parts)
1802        setattr(sys.modules[parent], name, sys.modules[packageName])
1803
1804
1805def yield_lines(strs):
1806    """Yield non-empty/non-comment lines of a ``basestring`` or sequence"""
1807    if isinstance(strs,basestring):
1808        for s in strs.splitlines():
1809            s = s.strip()
1810            if s and not s.startswith('#'):     # skip blank lines/comments
1811                yield s
1812    else:
1813        for ss in strs:
1814            for s in yield_lines(ss):
1815                yield s
1816
1817LINE_END = re.compile(r"\s*(#.*)?$").match         # whitespace and comment
1818CONTINUE = re.compile(r"\s*\\\s*(#.*)?$").match    # line continuation
1819DISTRO   = re.compile(r"\s*((\w|[-.])+)").match    # Distribution or extra
1820VERSION  = re.compile(r"\s*(<=?|>=?|==|!=)\s*((\w|[-.])+)").match  # ver. info
1821COMMA    = re.compile(r"\s*,").match               # comma between items
1822OBRACKET = re.compile(r"\s*\[").match
1823CBRACKET = re.compile(r"\s*\]").match
1824MODULE   = re.compile(r"\w+(\.\w+)*$").match
1825EGG_NAME = re.compile(
1826    r"(?P<name>[^-]+)"
1827    r"( -(?P<ver>[^-]+) (-py(?P<pyver>[^-]+) (-(?P<plat>.+))? )? )?",
1828    re.VERBOSE | re.IGNORECASE
1829).match
1830
1831component_re = re.compile(r'(\d+ | [a-z]+ | \.| -)', re.VERBOSE)
1832replace = {'pre':'c', 'preview':'c','-':'final-','rc':'c','dev':'@'}.get
1833
1834def _parse_version_parts(s):
1835    for part in component_re.split(s):
1836        part = replace(part,part)
1837        if not part or part=='.':
1838            continue
1839        if part[:1] in '0123456789':
1840            yield part.zfill(8)    # pad for numeric comparison
1841        else:
1842            yield '*'+part
1843
1844    yield '*final'  # ensure that alpha/beta/candidate are before final
1845
1846def parse_version(s):
1847    """Convert a version string to a chronologically-sortable key
1848
1849    This is a rough cross between distutils' StrictVersion and LooseVersion;
1850    if you give it versions that would work with StrictVersion, then it behaves
1851    the same; otherwise it acts like a slightly-smarter LooseVersion. It is
1852    *possible* to create pathological version coding schemes that will fool
1853    this parser, but they should be very rare in practice.
1854
1855    The returned value will be a tuple of strings.  Numeric portions of the
1856    version are padded to 8 digits so they will compare numerically, but
1857    without relying on how numbers compare relative to strings.  Dots are
1858    dropped, but dashes are retained.  Trailing zeros between alpha segments
1859    or dashes are suppressed, so that e.g. "2.4.0" is considered the same as
1860    "2.4". Alphanumeric parts are lower-cased.
1861
1862    The algorithm assumes that strings like "-" and any alpha string that
1863    alphabetically follows "final"  represents a "patch level".  So, "2.4-1"
1864    is assumed to be a branch or patch of "2.4", and therefore "2.4.1" is
1865    considered newer than "2.4-1", whic in turn is newer than "2.4".
1866
1867    Strings like "a", "b", "c", "alpha", "beta", "candidate" and so on (that
1868    come before "final" alphabetically) are assumed to be pre-release versions,
1869    so that the version "2.4" is considered newer than "2.4a1".
1870
1871    Finally, to handle miscellaneous cases, the strings "pre", "preview", and
1872    "rc" are treated as if they were "c", i.e. as though they were release
1873    candidates, and therefore are not as new as a version string that does not
1874    contain them.
1875    """
1876    parts = []
1877    for part in _parse_version_parts(s.lower()):
1878        if part.startswith('*'):
1879            if part<'*final':   # remove '-' before a prerelease tag
1880                while parts and parts[-1]=='*final-': parts.pop()
1881            # remove trailing zeros from each series of numeric parts
1882            while parts and parts[-1]=='00000000':
1883                parts.pop()
1884        parts.append(part)
1885    return tuple(parts)
1886
1887class EntryPoint(object):
1888    """Object representing an advertised importable object"""
1889
1890    def __init__(self, name, module_name, attrs=(), extras=(), dist=None):
1891        if not MODULE(module_name):
1892            raise ValueError("Invalid module name", module_name)
1893        self.name = name
1894        self.module_name = module_name
1895        self.attrs = tuple(attrs)
1896        self.extras = Requirement.parse(("x[%s]" % ','.join(extras))).extras
1897        self.dist = dist
1898
1899    def __str__(self):
1900        s = "%s = %s" % (self.name, self.module_name)
1901        if self.attrs:
1902            s += ':' + '.'.join(self.attrs)
1903        if self.extras:
1904            s += ' [%s]' % ','.join(self.extras)
1905        return s
1906
1907    def __repr__(self):
1908        return "EntryPoint.parse(%r)" % str(self)
1909
1910    def load(self, require=True, env=None, installer=None):
1911        if require: self.require(env, installer)
1912        entry = __import__(self.module_name, globals(),globals(), ['__name__'])
1913        for attr in self.attrs:
1914            try:
1915                entry = getattr(entry,attr)
1916            except AttributeError:
1917                raise ImportError("%r has no %r attribute" % (entry,attr))
1918        return entry
1919
1920    def require(self, env=None, installer=None):
1921        if self.extras and not self.dist:
1922            raise UnknownExtra("Can't require() without a distribution", self)
1923        map(working_set.add,
1924            working_set.resolve(self.dist.requires(self.extras),env,installer))
1925
1926
1927
1928    #@classmethod
1929    def parse(cls, src, dist=None):
1930        """Parse a single entry point from string `src`
1931
1932        Entry point syntax follows the form::
1933
1934            name = some.module:some.attr [extra1,extra2]
1935
1936        The entry name and module name are required, but the ``:attrs`` and
1937        ``[extras]`` parts are optional
1938        """
1939        try:
1940            attrs = extras = ()
1941            name,value = src.split('=',1)
1942            if '[' in value:
1943                value,extras = value.split('[',1)
1944                req = Requirement.parse("x["+extras)
1945                if req.specs: raise ValueError
1946                extras = req.extras
1947            if ':' in value:
1948                value,attrs = value.split(':',1)
1949                if not MODULE(attrs.rstrip()):
1950                    raise ValueError
1951                attrs = attrs.rstrip().split('.')
1952        except ValueError:
1953            raise ValueError(
1954                "EntryPoint must be in 'name=module:attrs [extras]' format",
1955                src
1956            )
1957        else:
1958            return cls(name.strip(), value.strip(), attrs, extras, dist)
1959
1960    parse = classmethod(parse)
1961
1962
1963
1964
1965
1966
1967
1968
1969    #@classmethod
1970    def parse_group(cls, group, lines, dist=None):
1971        """Parse an entry point group"""
1972        if not MODULE(group):
1973            raise ValueError("Invalid group name", group)
1974        this = {}
1975        for line in yield_lines(lines):
1976            ep = cls.parse(line, dist)
1977            if ep.name in this:
1978                raise ValueError("Duplicate entry point", group, ep.name)
1979            this[ep.name]=ep
1980        return this
1981
1982    parse_group = classmethod(parse_group)
1983
1984    #@classmethod
1985    def parse_map(cls, data, dist=None):
1986        """Parse a map of entry point groups"""
1987        if isinstance(data,dict):
1988            data = data.items()
1989        else:
1990            data = split_sections(data)
1991        maps = {}
1992        for group, lines in data:
1993            if group is None:
1994                if not lines:
1995                    continue
1996                raise ValueError("Entry points must be listed in groups")
1997            group = group.strip()
1998            if group in maps:
1999                raise ValueError("Duplicate group name", group)
2000            maps[group] = cls.parse_group(group, lines, dist)
2001        return maps
2002
2003    parse_map = classmethod(parse_map)
2004
2005
2006
2007
2008
2009
2010class Distribution(object):
2011    """Wrap an actual or potential sys.path entry w/metadata"""
2012    def __init__(self,
2013        location=None, metadata=None, project_name=None, version=None,
2014        py_version=PY_MAJOR, platform=None, precedence = EGG_DIST
2015    ):
2016        self.project_name = safe_name(project_name or 'Unknown')
2017        if version is not None:
2018            self._version = safe_version(version)
2019        self.py_version = py_version
2020        self.platform = platform
2021        self.location = location
2022        self.precedence = precedence
2023        self._provider = metadata or empty_provider
2024
2025    #@classmethod
2026    def from_location(cls,location,basename,metadata=None,**kw):
2027        project_name, version, py_version, platform = [None]*4
2028        basename, ext = os.path.splitext(basename)
2029        if ext.lower() in (".egg",".egg-info"):
2030            match = EGG_NAME(basename)
2031            if match:
2032                project_name, version, py_version, platform = match.group(
2033                    'name','ver','pyver','plat'
2034                )
2035        return cls(
2036            location, metadata, project_name=project_name, version=version,
2037            py_version=py_version, platform=platform, **kw
2038        )
2039    from_location = classmethod(from_location)
2040
2041    hashcmp = property(
2042        lambda self: (
2043            getattr(self,'parsed_version',()), self.precedence, self.key,
2044            -len(self.location or ''), self.location, self.py_version,
2045            self.platform
2046        )
2047    )
2048    def __cmp__(self, other): return cmp(self.hashcmp, other)
2049    def __hash__(self): return hash(self.hashcmp)
2050
2051    # These properties have to be lazy so that we don't have to load any
2052    # metadata until/unless it's actually needed.  (i.e., some distributions
2053    # may not know their name or version without loading PKG-INFO)
2054
2055    #@property
2056    def key(self):
2057        try:
2058            return self._key
2059        except AttributeError:
2060            self._key = key = self.project_name.lower()
2061            return key
2062    key = property(key)
2063
2064    #@property
2065    def parsed_version(self):
2066        try:
2067            return self._parsed_version
2068        except AttributeError:
2069            self._parsed_version = pv = parse_version(self.version)
2070            return pv
2071
2072    parsed_version = property(parsed_version)
2073
2074    #@property
2075    def version(self):
2076        try:
2077            return self._version
2078        except AttributeError:
2079            for line in self._get_metadata('PKG-INFO'):
2080                if line.lower().startswith('version:'):
2081                    self._version = safe_version(line.split(':',1)[1].strip())
2082                    return self._version
2083            else:
2084                raise ValueError(
2085                    "Missing 'Version:' header and/or PKG-INFO file", self
2086                )
2087    version = property(version)
2088
2089
2090
2091
2092    #@property
2093    def _dep_map(self):
2094        try:
2095            return self.__dep_map
2096        except AttributeError:
2097            dm = self.__dep_map = {None: []}
2098            for name in 'requires.txt', 'depends.txt':
2099                for extra,reqs in split_sections(self._get_metadata(name)):
2100                    if extra: extra = safe_extra(extra)
2101                    dm.setdefault(extra,[]).extend(parse_requirements(reqs))
2102            return dm
2103    _dep_map = property(_dep_map)
2104
2105    def requires(self,extras=()):
2106        """List of Requirements needed for this distro if `extras` are used"""
2107        dm = self._dep_map
2108        deps = []
2109        deps.extend(dm.get(None,()))
2110        for ext in extras:
2111            try:
2112                deps.extend(dm[safe_extra(ext)])
2113            except KeyError:
2114                raise UnknownExtra(
2115                    "%s has no such extra feature %r" % (self, ext)
2116                )
2117        return deps
2118
2119    def _get_metadata(self,name):
2120        if self.has_metadata(name):
2121            for line in self.get_metadata_lines(name):
2122                yield line
2123
2124    def activate(self,path=None):
2125        """Ensure distribution is importable on `path` (default=sys.path)"""
2126        if path is None: path = sys.path
2127        self.insert_on(path)
2128        if path is sys.path:
2129            fixup_namespace_packages(self.location)
2130            map(declare_namespace, self._get_metadata('namespace_packages.txt'))
2131
2132
2133    def egg_name(self):
2134        """Return what this distribution's standard .egg filename should be"""
2135        filename = "%s-%s-py%s" % (
2136            to_filename(self.project_name), to_filename(self.version),
2137            self.py_version or PY_MAJOR
2138        )
2139
2140        if self.platform:
2141            filename += '-'+self.platform
2142        return filename
2143
2144    def __repr__(self):
2145        if self.location:
2146            return "%s (%s)" % (self,self.location)
2147        else:
2148            return str(self)
2149
2150    def __str__(self):
2151        try: version = getattr(self,'version',None)
2152        except ValueError: version = None
2153        version = version or "[unknown version]"
2154        return "%s %s" % (self.project_name,version)
2155
2156    def __getattr__(self,attr):
2157        """Delegate all unrecognized public attributes to .metadata provider"""
2158        if attr.startswith('_'):
2159            raise AttributeError,attr
2160        return getattr(self._provider, attr)
2161
2162    #@classmethod
2163    def from_filename(cls,filename,metadata=None, **kw):
2164        return cls.from_location(
2165            _normalize_cached(filename), os.path.basename(filename), metadata,
2166            **kw
2167        )
2168    from_filename = classmethod(from_filename)
2169
2170    def as_requirement(self):
2171        """Return a ``Requirement`` that matches this distribution exactly"""
2172        return Requirement.parse('%s==%s' % (self.project_name, self.version))
2173
2174    def load_entry_point(self, group, name):
2175        """Return the `name` entry point of `group` or raise ImportError"""
2176        ep = self.get_entry_info(group,name)
2177        if ep is None:
2178            raise ImportError("Entry point %r not found" % ((group,name),))
2179        return ep.load()
2180
2181    def get_entry_map(self, group=None):
2182        """Return the entry point map for `group`, or the full entry map"""
2183        try:
2184            ep_map = self._ep_map
2185        except AttributeError:
2186            ep_map = self._ep_map = EntryPoint.parse_map(
2187                self._get_metadata('entry_points.txt'), self
2188            )
2189        if group is not None:
2190            return ep_map.get(group,{})
2191        return ep_map
2192
2193    def get_entry_info(self, group, name):
2194        """Return the EntryPoint object for `group`+`name`, or ``None``"""
2195        return self.get_entry_map(group).get(name)
2196
2197
2198
2199
2200
2201
2202
2203
2204
2205
2206
2207
2208
2209
2210
2211
2212
2213
2214
2215    def insert_on(self, path, loc = None):
2216        """Insert self.location in path before its nearest parent directory"""
2217
2218        loc = loc or self.location
2219        if not loc:
2220            return
2221
2222        if path is sys.path:
2223            self.check_version_conflict()
2224
2225        nloc = _normalize_cached(loc)
2226        bdir = os.path.dirname(nloc)
2227        npath= map(_normalize_cached, path)
2228
2229        bp = None
2230        for p, item in enumerate(npath):
2231            if item==nloc:
2232                break
2233            elif item==bdir and self.precedence==EGG_DIST:
2234                # if it's an .egg, give it precedence over its directory
2235                path.insert(p, loc)
2236                npath.insert(p, nloc)
2237                break
2238        else:
2239            path.append(loc)
2240            return
2241
2242        # p is the spot where we found or inserted loc; now remove duplicates
2243        while 1:
2244            try:
2245                np = npath.index(nloc, p+1)
2246            except ValueError:
2247                break
2248            else:
2249                del npath[np], path[np]
2250                p = np  # ha!
2251
2252        return
2253
2254
2255
2256    def check_version_conflict(self):
2257        if self.key=='setuptools':
2258            return      # ignore the inevitable setuptools self-conflicts  :(
2259
2260        nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt'))
2261        loc = normalize_path(self.location)
2262        for modname in self._get_metadata('top_level.txt'):
2263            if (modname not in sys.modules or modname in nsp
2264                or modname in _namespace_packages
2265            ):
2266                continue
2267
2268            fn = getattr(sys.modules[modname], '__file__', None)
2269            if fn and normalize_path(fn).startswith(loc):
2270                continue
2271            issue_warning(
2272                "Module %s was already imported from %s, but %s is being added"
2273                " to sys.path" % (modname, fn, self.location),
2274            )
2275
2276    def has_version(self):
2277        try:
2278            self.version
2279        except ValueError:
2280            issue_warning("Unbuilt egg for "+repr(self))
2281            return False
2282        return True
2283
2284    def clone(self,**kw):
2285        """Copy this distribution, substituting in any changed keyword args"""
2286        for attr in (
2287            'project_name', 'version', 'py_version', 'platform', 'location',
2288            'precedence'
2289        ):
2290            kw.setdefault(attr, getattr(self,attr,None))
2291        kw.setdefault('metadata', self._provider)
2292        return self.__class__(**kw)
2293
2294
2295
2296
2297    #@property
2298    def extras(self):
2299        return [dep for dep in self._dep_map if dep]
2300    extras = property(extras)
2301
2302
2303def issue_warning(*args,**kw):
2304    level = 1
2305    g = globals()
2306    try:
2307        # find the first stack frame that is *not* code in
2308        # the pkg_resources module, to use for the warning
2309        while sys._getframe(level).f_globals is g:
2310            level += 1
2311    except ValueError:
2312        pass
2313    from warnings import warn
2314    warn(stacklevel = level+1, *args, **kw)
2315
2316
2317
2318
2319
2320
2321
2322
2323
2324
2325
2326
2327
2328
2329
2330
2331
2332
2333
2334
2335
2336
2337
2338def parse_requirements(strs):
2339    """Yield ``Requirement`` objects for each specification in `strs`
2340
2341    `strs` must be an instance of ``basestring``, or a (possibly-nested)
2342    iterable thereof.
2343    """
2344    # create a steppable iterator, so we can handle \-continuations
2345    lines = iter(yield_lines(strs))
2346
2347    def scan_list(ITEM,TERMINATOR,line,p,groups,item_name):
2348
2349        items = []
2350
2351        while not TERMINATOR(line,p):
2352            if CONTINUE(line,p):
2353                try:
2354                    line = lines.next(); p = 0
2355                except StopIteration:
2356                    raise ValueError(
2357                        "\\ must not appear on the last nonblank line"
2358                    )
2359
2360            match = ITEM(line,p)
2361            if not match:
2362                raise ValueError("Expected "+item_name+" in",line,"at",line[p:])
2363
2364            items.append(match.group(*groups))
2365            p = match.end()
2366
2367            match = COMMA(line,p)
2368            if match:
2369                p = match.end() # skip the comma
2370            elif not TERMINATOR(line,p):
2371                raise ValueError(
2372                    "Expected ',' or end-of-list in",line,"at",line[p:]
2373                )
2374
2375        match = TERMINATOR(line,p)
2376        if match: p = match.end()   # skip the terminator, if any
2377        return line, p, items
2378
2379    for line in lines:
2380        match = DISTRO(line)
2381        if not match:
2382            raise ValueError("Missing distribution spec", line)
2383        project_name = match.group(1)
2384        p = match.end()
2385        extras = []
2386
2387        match = OBRACKET(line,p)
2388        if match:
2389            p = match.end()
2390            line, p, extras = scan_list(
2391                DISTRO, CBRACKET, line, p, (1,), "'extra' name"
2392            )
2393
2394        line, p, specs = scan_list(VERSION,LINE_END,line,p,(1,2),"version spec")
2395        specs = [(op,safe_version(val)) for op,val in specs]
2396        yield Requirement(project_name, specs, extras)
2397
2398
2399def _sort_dists(dists):
2400    tmp = [(dist.hashcmp,dist) for dist in dists]
2401    tmp.sort()
2402    dists[::-1] = [d for hc,d in tmp]
2403
2404
2405
2406
2407
2408
2409
2410
2411
2412
2413
2414
2415
2416
2417
2418
2419
2420class Requirement:
2421    def __init__(self, project_name, specs, extras):
2422        """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!"""
2423        self.unsafe_name, project_name = project_name, safe_name(project_name)
2424        self.project_name, self.key = project_name, project_name.lower()
2425        index = [(parse_version(v),state_machine[op],op,v) for op,v in specs]
2426        index.sort()
2427        self.specs = [(op,ver) for parsed,trans,op,ver in index]
2428        self.index, self.extras = index, tuple(map(safe_extra,extras))
2429        self.hashCmp = (
2430            self.key, tuple([(op,parsed) for parsed,trans,op,ver in index]),
2431            frozenset(self.extras)
2432        )
2433        self.__hash = hash(self.hashCmp)
2434
2435    def __str__(self):
2436        specs = ','.join([''.join(s) for s in self.specs])
2437        extras = ','.join(self.extras)
2438        if extras: extras = '[%s]' % extras
2439        return '%s%s%s' % (self.project_name, extras, specs)
2440
2441    def __eq__(self,other):
2442        return isinstance(other,Requirement) and self.hashCmp==other.hashCmp
2443
2444    def __contains__(self,item):
2445        if isinstance(item,Distribution):
2446            if item.key <> self.key: return False
2447            if self.index: item = item.parsed_version  # only get if we need it
2448        elif isinstance(item,basestring):
2449            item = parse_version(item)
2450        last = None
2451        for parsed,trans,op,ver in self.index:
2452            action = trans[cmp(item,parsed)]
2453            if action=='F':     return False
2454            elif action=='T':   return True
2455            elif action=='+':   last = True
2456            elif action=='-' or last is None:   last = False
2457        if last is None: last = True    # no rules encountered
2458        return last
2459
2460
2461    def __hash__(self):
2462        return self.__hash
2463
2464    def __repr__(self): return "Requirement.parse(%r)" % str(self)
2465
2466    #@staticmethod
2467    def parse(s):
2468        reqs = list(parse_requirements(s))
2469        if reqs:
2470            if len(reqs)==1:
2471                return reqs[0]
2472            raise ValueError("Expected only one requirement", s)
2473        raise ValueError("No requirements found", s)
2474
2475    parse = staticmethod(parse)
2476
2477state_machine = {
2478    #       =><
2479    '<' :  '--T',
2480    '<=':  'T-T',
2481    '>' :  'F+F',
2482    '>=':  'T+F',
2483    '==':  'T..',
2484    '!=':  'F++',
2485}
2486
2487
2488def _get_mro(cls):
2489    """Get an mro for a type or classic class"""
2490    if not isinstance(cls,type):
2491        class cls(cls,object): pass
2492        return cls.__mro__[1:]
2493    return cls.__mro__
2494
2495def _find_adapter(registry, ob):
2496    """Return an adapter factory for `ob` from `registry`"""
2497    for t in _get_mro(getattr(ob, '__class__', type(ob))):
2498        if t in registry:
2499            return registry[t]
2500
2501
2502def ensure_directory(path):
2503    """Ensure that the parent directory of `path` exists"""
2504    dirname = os.path.dirname(path)
2505    if not os.path.isdir(dirname):
2506        os.makedirs(dirname)
2507
2508def split_sections(s):
2509    """Split a string or iterable thereof into (section,content) pairs
2510
2511    Each ``section`` is a stripped version of the section header ("[section]")
2512    and each ``content`` is a list of stripped lines excluding blank lines and
2513    comment-only lines.  If there are any such lines before the first section
2514    header, they're returned in a first ``section`` of ``None``.
2515    """
2516    section = None
2517    content = []
2518    for line in yield_lines(s):
2519        if line.startswith("["):
2520            if line.endswith("]"):
2521                if section or content:
2522                    yield section, content
2523                section = line[1:-1].strip()
2524                content = []
2525            else:
2526                raise ValueError("Invalid section heading", line)
2527        else:
2528            content.append(line)
2529
2530    # wrap up last segment
2531    yield section, content
2532
2533def _mkstemp(*args,**kw):
2534    from tempfile import mkstemp
2535    old_open = os.open
2536    try:
2537        os.open = os_open   # temporarily bypass sandboxing
2538        return mkstemp(*args,**kw)
2539    finally:
2540        os.open = old_open  # and then put it back
2541
2542
2543# Set up global resource manager
2544_manager = ResourceManager()
2545def _initialize(g):
2546    for name in dir(_manager):
2547        if not name.startswith('_'):
2548            g[name] = getattr(_manager, name)
2549_initialize(globals())
2550
2551# Prepare the master working set and make the ``require()`` API available
2552working_set = WorkingSet()
2553try:
2554    # Does the main program list any requirements?
2555    from __main__ import __requires__
2556except ImportError:
2557    pass # No: just use the default working set based on sys.path
2558else:
2559    # Yes: ensure the requirements are met, by prefixing sys.path if necessary
2560    try:
2561        working_set.require(__requires__)
2562    except VersionConflict:     # try it without defaults already on sys.path
2563        working_set = WorkingSet([])    # by starting with an empty path
2564        for dist in working_set.resolve(
2565            parse_requirements(__requires__), Environment()
2566        ):
2567            working_set.add(dist)
2568        for entry in sys.path:  # add any missing entries from sys.path
2569            if entry not in working_set.entries:
2570                working_set.add_entry(entry)
2571        sys.path[:] = working_set.entries   # then copy back to sys.path
2572
2573require = working_set.require
2574iter_entry_points = working_set.iter_entry_points
2575add_activation_listener = working_set.subscribe
2576run_script = working_set.run_script
2577run_main = run_script   # backward compatibility
2578# Activate all distributions already on sys.path, and ensure that
2579# all distributions added to the working set in the future (e.g. by
2580# calling ``require()``) will get activated as well.
2581add_activation_listener(lambda dist: dist.activate())
2582working_set.entries=[]; map(working_set.add_entry,sys.path) # match order
2583
2584