1# coding: utf-8 2""" 3Package resource API 4-------------------- 5 6A resource is a logical file contained within a package, or a logical 7subdirectory thereof. The package resource API expects resource names 8to have their path parts separated with ``/``, *not* whatever the local 9path separator is. Do not use os.path operations to manipulate resource 10names being passed into the API. 11 12The package resource API is designed to work with normal filesystem packages, 13.egg files, and unpacked .egg files. It can also work in a limited way with 14.zip files and with custom PEP 302 loaders that support the ``get_data()`` 15method. 16""" 17 18from __future__ import absolute_import 19 20import sys 21import os 22import io 23import time 24import re 25import types 26import zipfile 27import zipimport 28import warnings 29import stat 30import functools 31import pkgutil 32import operator 33import platform 34import collections 35import plistlib 36import email.parser 37import errno 38import tempfile 39import textwrap 40import itertools 41import inspect 42import ntpath 43import posixpath 44from pkgutil import get_importer 45 46try: 47 import _imp 48except ImportError: 49 # Python 3.2 compatibility 50 import imp as _imp 51 52try: 53 FileExistsError 54except NameError: 55 FileExistsError = OSError 56 57from pkg_resources.extern import six 58from pkg_resources.extern.six.moves import urllib, map, filter 59 60# capture these to bypass sandboxing 61from os import utime 62try: 63 from os import mkdir, rename, unlink 64 WRITE_SUPPORT = True 65except ImportError: 66 # no write support, probably under GAE 67 WRITE_SUPPORT = False 68 69from os import open as os_open 70from os.path import isdir, split 71 72try: 73 import importlib.machinery as importlib_machinery 74 # access attribute to force import under delayed import mechanisms. 75 importlib_machinery.__name__ 76except ImportError: 77 importlib_machinery = None 78 79from . import py31compat 80from pkg_resources.extern import appdirs 81from pkg_resources.extern import packaging 82__import__('pkg_resources.extern.packaging.version') 83__import__('pkg_resources.extern.packaging.specifiers') 84__import__('pkg_resources.extern.packaging.requirements') 85__import__('pkg_resources.extern.packaging.markers') 86 87 88__metaclass__ = type 89 90 91if (3, 0) < sys.version_info < (3, 4): 92 raise RuntimeError("Python 3.4 or later is required") 93 94if six.PY2: 95 # Those builtin exceptions are only defined in Python 3 96 PermissionError = None 97 NotADirectoryError = None 98 99# declare some globals that will be defined later to 100# satisfy the linters. 101require = None 102working_set = None 103add_activation_listener = None 104resources_stream = None 105cleanup_resources = None 106resource_dir = None 107resource_stream = None 108set_extraction_path = None 109resource_isdir = None 110resource_string = None 111iter_entry_points = None 112resource_listdir = None 113resource_filename = None 114resource_exists = None 115_distribution_finders = None 116_namespace_handlers = None 117_namespace_packages = None 118 119 120class PEP440Warning(RuntimeWarning): 121 """ 122 Used when there is an issue with a version or specifier not complying with 123 PEP 440. 124 """ 125 126 127def parse_version(v): 128 try: 129 return packaging.version.Version(v) 130 except packaging.version.InvalidVersion: 131 return packaging.version.LegacyVersion(v) 132 133 134_state_vars = {} 135 136 137def _declare_state(vartype, **kw): 138 globals().update(kw) 139 _state_vars.update(dict.fromkeys(kw, vartype)) 140 141 142def __getstate__(): 143 state = {} 144 g = globals() 145 for k, v in _state_vars.items(): 146 state[k] = g['_sget_' + v](g[k]) 147 return state 148 149 150def __setstate__(state): 151 g = globals() 152 for k, v in state.items(): 153 g['_sset_' + _state_vars[k]](k, g[k], v) 154 return state 155 156 157def _sget_dict(val): 158 return val.copy() 159 160 161def _sset_dict(key, ob, state): 162 ob.clear() 163 ob.update(state) 164 165 166def _sget_object(val): 167 return val.__getstate__() 168 169 170def _sset_object(key, ob, state): 171 ob.__setstate__(state) 172 173 174_sget_none = _sset_none = lambda *args: None 175 176 177def get_supported_platform(): 178 """Return this platform's maximum compatible version. 179 180 distutils.util.get_platform() normally reports the minimum version 181 of Mac OS X that would be required to *use* extensions produced by 182 distutils. But what we want when checking compatibility is to know the 183 version of Mac OS X that we are *running*. To allow usage of packages that 184 explicitly require a newer version of Mac OS X, we must also know the 185 current version of the OS. 186 187 If this condition occurs for any other platform with a version in its 188 platform strings, this function should be extended accordingly. 189 """ 190 plat = get_build_platform() 191 m = macosVersionString.match(plat) 192 if m is not None and sys.platform == "darwin": 193 try: 194 plat = 'macosx-%s-%s' % ('.'.join(_macosx_vers()[:2]), m.group(3)) 195 except ValueError: 196 # not Mac OS X 197 pass 198 return plat 199 200 201__all__ = [ 202 # Basic resource access and distribution/entry point discovery 203 'require', 'run_script', 'get_provider', 'get_distribution', 204 'load_entry_point', 'get_entry_map', 'get_entry_info', 205 'iter_entry_points', 206 'resource_string', 'resource_stream', 'resource_filename', 207 'resource_listdir', 'resource_exists', 'resource_isdir', 208 209 # Environmental control 210 'declare_namespace', 'working_set', 'add_activation_listener', 211 'find_distributions', 'set_extraction_path', 'cleanup_resources', 212 'get_default_cache', 213 214 # Primary implementation classes 215 'Environment', 'WorkingSet', 'ResourceManager', 216 'Distribution', 'Requirement', 'EntryPoint', 217 218 # Exceptions 219 'ResolutionError', 'VersionConflict', 'DistributionNotFound', 220 'UnknownExtra', 'ExtractionError', 221 222 # Warnings 223 'PEP440Warning', 224 225 # Parsing functions and string utilities 226 'parse_requirements', 'parse_version', 'safe_name', 'safe_version', 227 'get_platform', 'compatible_platforms', 'yield_lines', 'split_sections', 228 'safe_extra', 'to_filename', 'invalid_marker', 'evaluate_marker', 229 230 # filesystem utilities 231 'ensure_directory', 'normalize_path', 232 233 # Distribution "precedence" constants 234 'EGG_DIST', 'BINARY_DIST', 'SOURCE_DIST', 'CHECKOUT_DIST', 'DEVELOP_DIST', 235 236 # "Provider" interfaces, implementations, and registration/lookup APIs 237 'IMetadataProvider', 'IResourceProvider', 'FileMetadata', 238 'PathMetadata', 'EggMetadata', 'EmptyProvider', 'empty_provider', 239 'NullProvider', 'EggProvider', 'DefaultProvider', 'ZipProvider', 240 'register_finder', 'register_namespace_handler', 'register_loader_type', 241 'fixup_namespace_packages', 'get_importer', 242 243 # Warnings 244 'PkgResourcesDeprecationWarning', 245 246 # Deprecated/backward compatibility only 247 'run_main', 'AvailableDistributions', 248] 249 250 251class ResolutionError(Exception): 252 """Abstract base for dependency resolution errors""" 253 254 def __repr__(self): 255 return self.__class__.__name__ + repr(self.args) 256 257 258class VersionConflict(ResolutionError): 259 """ 260 An already-installed version conflicts with the requested version. 261 262 Should be initialized with the installed Distribution and the requested 263 Requirement. 264 """ 265 266 _template = "{self.dist} is installed but {self.req} is required" 267 268 @property 269 def dist(self): 270 return self.args[0] 271 272 @property 273 def req(self): 274 return self.args[1] 275 276 def report(self): 277 return self._template.format(**locals()) 278 279 def with_context(self, required_by): 280 """ 281 If required_by is non-empty, return a version of self that is a 282 ContextualVersionConflict. 283 """ 284 if not required_by: 285 return self 286 args = self.args + (required_by,) 287 return ContextualVersionConflict(*args) 288 289 290class ContextualVersionConflict(VersionConflict): 291 """ 292 A VersionConflict that accepts a third parameter, the set of the 293 requirements that required the installed Distribution. 294 """ 295 296 _template = VersionConflict._template + ' by {self.required_by}' 297 298 @property 299 def required_by(self): 300 return self.args[2] 301 302 303class DistributionNotFound(ResolutionError): 304 """A requested distribution was not found""" 305 306 _template = ("The '{self.req}' distribution was not found " 307 "and is required by {self.requirers_str}") 308 309 @property 310 def req(self): 311 return self.args[0] 312 313 @property 314 def requirers(self): 315 return self.args[1] 316 317 @property 318 def requirers_str(self): 319 if not self.requirers: 320 return 'the application' 321 return ', '.join(self.requirers) 322 323 def report(self): 324 return self._template.format(**locals()) 325 326 def __str__(self): 327 return self.report() 328 329 330class UnknownExtra(ResolutionError): 331 """Distribution doesn't have an "extra feature" of the given name""" 332 333 334_provider_factories = {} 335 336PY_MAJOR = sys.version[:3] 337EGG_DIST = 3 338BINARY_DIST = 2 339SOURCE_DIST = 1 340CHECKOUT_DIST = 0 341DEVELOP_DIST = -1 342 343 344def register_loader_type(loader_type, provider_factory): 345 """Register `provider_factory` to make providers for `loader_type` 346 347 `loader_type` is the type or class of a PEP 302 ``module.__loader__``, 348 and `provider_factory` is a function that, passed a *module* object, 349 returns an ``IResourceProvider`` for that module. 350 """ 351 _provider_factories[loader_type] = provider_factory 352 353 354def get_provider(moduleOrReq): 355 """Return an IResourceProvider for the named module or requirement""" 356 if isinstance(moduleOrReq, Requirement): 357 return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0] 358 try: 359 module = sys.modules[moduleOrReq] 360 except KeyError: 361 __import__(moduleOrReq) 362 module = sys.modules[moduleOrReq] 363 loader = getattr(module, '__loader__', None) 364 return _find_adapter(_provider_factories, loader)(module) 365 366 367def _macosx_vers(_cache=[]): 368 if not _cache: 369 version = platform.mac_ver()[0] 370 # fallback for MacPorts 371 if version == '': 372 plist = '/System/Library/CoreServices/SystemVersion.plist' 373 if os.path.exists(plist): 374 if hasattr(plistlib, 'readPlist'): 375 plist_content = plistlib.readPlist(plist) 376 if 'ProductVersion' in plist_content: 377 version = plist_content['ProductVersion'] 378 379 _cache.append(version.split('.')) 380 return _cache[0] 381 382 383def _macosx_arch(machine): 384 return {'PowerPC': 'ppc', 'Power_Macintosh': 'ppc'}.get(machine, machine) 385 386 387def get_build_platform(): 388 """Return this platform's string for platform-specific distributions 389 390 XXX Currently this is the same as ``distutils.util.get_platform()``, but it 391 needs some hacks for Linux and Mac OS X. 392 """ 393 from sysconfig import get_platform 394 395 plat = get_platform() 396 if sys.platform == "darwin" and not plat.startswith('macosx-'): 397 try: 398 version = _macosx_vers() 399 machine = os.uname()[4].replace(" ", "_") 400 return "macosx-%d.%d-%s" % ( 401 int(version[0]), int(version[1]), 402 _macosx_arch(machine), 403 ) 404 except ValueError: 405 # if someone is running a non-Mac darwin system, this will fall 406 # through to the default implementation 407 pass 408 return plat 409 410 411macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)") 412darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)") 413# XXX backward compat 414get_platform = get_build_platform 415 416 417def compatible_platforms(provided, required): 418 """Can code for the `provided` platform run on the `required` platform? 419 420 Returns true if either platform is ``None``, or the platforms are equal. 421 422 XXX Needs compatibility checks for Linux and other unixy OSes. 423 """ 424 if provided is None or required is None or provided == required: 425 # easy case 426 return True 427 428 # Mac OS X special cases 429 reqMac = macosVersionString.match(required) 430 if reqMac: 431 provMac = macosVersionString.match(provided) 432 433 # is this a Mac package? 434 if not provMac: 435 # this is backwards compatibility for packages built before 436 # setuptools 0.6. All packages built after this point will 437 # use the new macosx designation. 438 provDarwin = darwinVersionString.match(provided) 439 if provDarwin: 440 dversion = int(provDarwin.group(1)) 441 macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2)) 442 if dversion == 7 and macosversion >= "10.3" or \ 443 dversion == 8 and macosversion >= "10.4": 444 return True 445 # egg isn't macosx or legacy darwin 446 return False 447 448 # are they the same major version and machine type? 449 if provMac.group(1) != reqMac.group(1) or \ 450 provMac.group(3) != reqMac.group(3): 451 return False 452 453 # is the required OS major update >= the provided one? 454 if int(provMac.group(2)) > int(reqMac.group(2)): 455 return False 456 457 return True 458 459 # XXX Linux and other platforms' special cases should go here 460 return False 461 462 463def run_script(dist_spec, script_name): 464 """Locate distribution `dist_spec` and run its `script_name` script""" 465 ns = sys._getframe(1).f_globals 466 name = ns['__name__'] 467 ns.clear() 468 ns['__name__'] = name 469 require(dist_spec)[0].run_script(script_name, ns) 470 471 472# backward compatibility 473run_main = run_script 474 475 476def get_distribution(dist): 477 """Return a current distribution object for a Requirement or string""" 478 if isinstance(dist, six.string_types): 479 dist = Requirement.parse(dist) 480 if isinstance(dist, Requirement): 481 dist = get_provider(dist) 482 if not isinstance(dist, Distribution): 483 raise TypeError("Expected string, Requirement, or Distribution", dist) 484 return dist 485 486 487def load_entry_point(dist, group, name): 488 """Return `name` entry point of `group` for `dist` or raise ImportError""" 489 return get_distribution(dist).load_entry_point(group, name) 490 491 492def get_entry_map(dist, group=None): 493 """Return the entry point map for `group`, or the full entry map""" 494 return get_distribution(dist).get_entry_map(group) 495 496 497def get_entry_info(dist, group, name): 498 """Return the EntryPoint object for `group`+`name`, or ``None``""" 499 return get_distribution(dist).get_entry_info(group, name) 500 501 502class IMetadataProvider: 503 def has_metadata(name): 504 """Does the package's distribution contain the named metadata?""" 505 506 def get_metadata(name): 507 """The named metadata resource as a string""" 508 509 def get_metadata_lines(name): 510 """Yield named metadata resource as list of non-blank non-comment lines 511 512 Leading and trailing whitespace is stripped from each line, and lines 513 with ``#`` as the first non-blank character are omitted.""" 514 515 def metadata_isdir(name): 516 """Is the named metadata a directory? (like ``os.path.isdir()``)""" 517 518 def metadata_listdir(name): 519 """List of metadata names in the directory (like ``os.listdir()``)""" 520 521 def run_script(script_name, namespace): 522 """Execute the named script in the supplied namespace dictionary""" 523 524 525class IResourceProvider(IMetadataProvider): 526 """An object that provides access to package resources""" 527 528 def get_resource_filename(manager, resource_name): 529 """Return a true filesystem path for `resource_name` 530 531 `manager` must be an ``IResourceManager``""" 532 533 def get_resource_stream(manager, resource_name): 534 """Return a readable file-like object for `resource_name` 535 536 `manager` must be an ``IResourceManager``""" 537 538 def get_resource_string(manager, resource_name): 539 """Return a string containing the contents of `resource_name` 540 541 `manager` must be an ``IResourceManager``""" 542 543 def has_resource(resource_name): 544 """Does the package contain the named resource?""" 545 546 def resource_isdir(resource_name): 547 """Is the named resource a directory? (like ``os.path.isdir()``)""" 548 549 def resource_listdir(resource_name): 550 """List of resource names in the directory (like ``os.listdir()``)""" 551 552 553class WorkingSet: 554 """A collection of active distributions on sys.path (or a similar list)""" 555 556 def __init__(self, entries=None): 557 """Create working set from list of path entries (default=sys.path)""" 558 self.entries = [] 559 self.entry_keys = {} 560 self.by_key = {} 561 self.callbacks = [] 562 563 if entries is None: 564 entries = sys.path 565 566 for entry in entries: 567 self.add_entry(entry) 568 569 @classmethod 570 def _build_master(cls): 571 """ 572 Prepare the master working set. 573 """ 574 ws = cls() 575 try: 576 from __main__ import __requires__ 577 except ImportError: 578 # The main program does not list any requirements 579 return ws 580 581 # ensure the requirements are met 582 try: 583 ws.require(__requires__) 584 except VersionConflict: 585 return cls._build_from_requirements(__requires__) 586 587 return ws 588 589 @classmethod 590 def _build_from_requirements(cls, req_spec): 591 """ 592 Build a working set from a requirement spec. Rewrites sys.path. 593 """ 594 # try it without defaults already on sys.path 595 # by starting with an empty path 596 ws = cls([]) 597 reqs = parse_requirements(req_spec) 598 dists = ws.resolve(reqs, Environment()) 599 for dist in dists: 600 ws.add(dist) 601 602 # add any missing entries from sys.path 603 for entry in sys.path: 604 if entry not in ws.entries: 605 ws.add_entry(entry) 606 607 # then copy back to sys.path 608 sys.path[:] = ws.entries 609 return ws 610 611 def add_entry(self, entry): 612 """Add a path item to ``.entries``, finding any distributions on it 613 614 ``find_distributions(entry, True)`` is used to find distributions 615 corresponding to the path entry, and they are added. `entry` is 616 always appended to ``.entries``, even if it is already present. 617 (This is because ``sys.path`` can contain the same value more than 618 once, and the ``.entries`` of the ``sys.path`` WorkingSet should always 619 equal ``sys.path``.) 620 """ 621 self.entry_keys.setdefault(entry, []) 622 self.entries.append(entry) 623 for dist in find_distributions(entry, True): 624 self.add(dist, entry, False) 625 626 def __contains__(self, dist): 627 """True if `dist` is the active distribution for its project""" 628 return self.by_key.get(dist.key) == dist 629 630 def find(self, req): 631 """Find a distribution matching requirement `req` 632 633 If there is an active distribution for the requested project, this 634 returns it as long as it meets the version requirement specified by 635 `req`. But, if there is an active distribution for the project and it 636 does *not* meet the `req` requirement, ``VersionConflict`` is raised. 637 If there is no active distribution for the requested project, ``None`` 638 is returned. 639 """ 640 dist = self.by_key.get(req.key) 641 if dist is not None and dist not in req: 642 # XXX add more info 643 raise VersionConflict(dist, req) 644 return dist 645 646 def iter_entry_points(self, group, name=None): 647 """Yield entry point objects from `group` matching `name` 648 649 If `name` is None, yields all entry points in `group` from all 650 distributions in the working set, otherwise only ones matching 651 both `group` and `name` are yielded (in distribution order). 652 """ 653 return ( 654 entry 655 for dist in self 656 for entry in dist.get_entry_map(group).values() 657 if name is None or name == entry.name 658 ) 659 660 def run_script(self, requires, script_name): 661 """Locate distribution for `requires` and run `script_name` script""" 662 ns = sys._getframe(1).f_globals 663 name = ns['__name__'] 664 ns.clear() 665 ns['__name__'] = name 666 self.require(requires)[0].run_script(script_name, ns) 667 668 def __iter__(self): 669 """Yield distributions for non-duplicate projects in the working set 670 671 The yield order is the order in which the items' path entries were 672 added to the working set. 673 """ 674 seen = {} 675 for item in self.entries: 676 if item not in self.entry_keys: 677 # workaround a cache issue 678 continue 679 680 for key in self.entry_keys[item]: 681 if key not in seen: 682 seen[key] = 1 683 yield self.by_key[key] 684 685 def add(self, dist, entry=None, insert=True, replace=False): 686 """Add `dist` to working set, associated with `entry` 687 688 If `entry` is unspecified, it defaults to the ``.location`` of `dist`. 689 On exit from this routine, `entry` is added to the end of the working 690 set's ``.entries`` (if it wasn't already present). 691 692 `dist` is only added to the working set if it's for a project that 693 doesn't already have a distribution in the set, unless `replace=True`. 694 If it's added, any callbacks registered with the ``subscribe()`` method 695 will be called. 696 """ 697 if insert: 698 dist.insert_on(self.entries, entry, replace=replace) 699 700 if entry is None: 701 entry = dist.location 702 keys = self.entry_keys.setdefault(entry, []) 703 keys2 = self.entry_keys.setdefault(dist.location, []) 704 if not replace and dist.key in self.by_key: 705 # ignore hidden distros 706 return 707 708 self.by_key[dist.key] = dist 709 if dist.key not in keys: 710 keys.append(dist.key) 711 if dist.key not in keys2: 712 keys2.append(dist.key) 713 self._added_new(dist) 714 715 def resolve(self, requirements, env=None, installer=None, 716 replace_conflicting=False, extras=None): 717 """List all distributions needed to (recursively) meet `requirements` 718 719 `requirements` must be a sequence of ``Requirement`` objects. `env`, 720 if supplied, should be an ``Environment`` instance. If 721 not supplied, it defaults to all distributions available within any 722 entry or distribution in the working set. `installer`, if supplied, 723 will be invoked with each requirement that cannot be met by an 724 already-installed distribution; it should return a ``Distribution`` or 725 ``None``. 726 727 Unless `replace_conflicting=True`, raises a VersionConflict exception 728 if 729 any requirements are found on the path that have the correct name but 730 the wrong version. Otherwise, if an `installer` is supplied it will be 731 invoked to obtain the correct version of the requirement and activate 732 it. 733 734 `extras` is a list of the extras to be used with these requirements. 735 This is important because extra requirements may look like `my_req; 736 extra = "my_extra"`, which would otherwise be interpreted as a purely 737 optional requirement. Instead, we want to be able to assert that these 738 requirements are truly required. 739 """ 740 741 # set up the stack 742 requirements = list(requirements)[::-1] 743 # set of processed requirements 744 processed = {} 745 # key -> dist 746 best = {} 747 to_activate = [] 748 749 req_extras = _ReqExtras() 750 751 # Mapping of requirement to set of distributions that required it; 752 # useful for reporting info about conflicts. 753 required_by = collections.defaultdict(set) 754 755 while requirements: 756 # process dependencies breadth-first 757 req = requirements.pop(0) 758 if req in processed: 759 # Ignore cyclic or redundant dependencies 760 continue 761 762 if not req_extras.markers_pass(req, extras): 763 continue 764 765 dist = best.get(req.key) 766 if dist is None: 767 # Find the best distribution and add it to the map 768 dist = self.by_key.get(req.key) 769 if dist is None or (dist not in req and replace_conflicting): 770 ws = self 771 if env is None: 772 if dist is None: 773 env = Environment(self.entries) 774 else: 775 # Use an empty environment and workingset to avoid 776 # any further conflicts with the conflicting 777 # distribution 778 env = Environment([]) 779 ws = WorkingSet([]) 780 dist = best[req.key] = env.best_match( 781 req, ws, installer, 782 replace_conflicting=replace_conflicting 783 ) 784 if dist is None: 785 requirers = required_by.get(req, None) 786 raise DistributionNotFound(req, requirers) 787 to_activate.append(dist) 788 if dist not in req: 789 # Oops, the "best" so far conflicts with a dependency 790 dependent_req = required_by[req] 791 raise VersionConflict(dist, req).with_context(dependent_req) 792 793 # push the new requirements onto the stack 794 new_requirements = dist.requires(req.extras)[::-1] 795 requirements.extend(new_requirements) 796 797 # Register the new requirements needed by req 798 for new_requirement in new_requirements: 799 required_by[new_requirement].add(req.project_name) 800 req_extras[new_requirement] = req.extras 801 802 processed[req] = True 803 804 # return list of distros to activate 805 return to_activate 806 807 def find_plugins( 808 self, plugin_env, full_env=None, installer=None, fallback=True): 809 """Find all activatable distributions in `plugin_env` 810 811 Example usage:: 812 813 distributions, errors = working_set.find_plugins( 814 Environment(plugin_dirlist) 815 ) 816 # add plugins+libs to sys.path 817 map(working_set.add, distributions) 818 # display errors 819 print('Could not load', errors) 820 821 The `plugin_env` should be an ``Environment`` instance that contains 822 only distributions that are in the project's "plugin directory" or 823 directories. The `full_env`, if supplied, should be an ``Environment`` 824 contains all currently-available distributions. If `full_env` is not 825 supplied, one is created automatically from the ``WorkingSet`` this 826 method is called on, which will typically mean that every directory on 827 ``sys.path`` will be scanned for distributions. 828 829 `installer` is a standard installer callback as used by the 830 ``resolve()`` method. The `fallback` flag indicates whether we should 831 attempt to resolve older versions of a plugin if the newest version 832 cannot be resolved. 833 834 This method returns a 2-tuple: (`distributions`, `error_info`), where 835 `distributions` is a list of the distributions found in `plugin_env` 836 that were loadable, along with any other distributions that are needed 837 to resolve their dependencies. `error_info` is a dictionary mapping 838 unloadable plugin distributions to an exception instance describing the 839 error that occurred. Usually this will be a ``DistributionNotFound`` or 840 ``VersionConflict`` instance. 841 """ 842 843 plugin_projects = list(plugin_env) 844 # scan project names in alphabetic order 845 plugin_projects.sort() 846 847 error_info = {} 848 distributions = {} 849 850 if full_env is None: 851 env = Environment(self.entries) 852 env += plugin_env 853 else: 854 env = full_env + plugin_env 855 856 shadow_set = self.__class__([]) 857 # put all our entries in shadow_set 858 list(map(shadow_set.add, self)) 859 860 for project_name in plugin_projects: 861 862 for dist in plugin_env[project_name]: 863 864 req = [dist.as_requirement()] 865 866 try: 867 resolvees = shadow_set.resolve(req, env, installer) 868 869 except ResolutionError as v: 870 # save error info 871 error_info[dist] = v 872 if fallback: 873 # try the next older version of project 874 continue 875 else: 876 # give up on this project, keep going 877 break 878 879 else: 880 list(map(shadow_set.add, resolvees)) 881 distributions.update(dict.fromkeys(resolvees)) 882 883 # success, no need to try any more versions of this project 884 break 885 886 distributions = list(distributions) 887 distributions.sort() 888 889 return distributions, error_info 890 891 def require(self, *requirements): 892 """Ensure that distributions matching `requirements` are activated 893 894 `requirements` must be a string or a (possibly-nested) sequence 895 thereof, specifying the distributions and versions required. The 896 return value is a sequence of the distributions that needed to be 897 activated to fulfill the requirements; all relevant distributions are 898 included, even if they were already activated in this working set. 899 """ 900 needed = self.resolve(parse_requirements(requirements)) 901 902 for dist in needed: 903 self.add(dist) 904 905 return needed 906 907 def subscribe(self, callback, existing=True): 908 """Invoke `callback` for all distributions 909 910 If `existing=True` (default), 911 call on all existing ones, as well. 912 """ 913 if callback in self.callbacks: 914 return 915 self.callbacks.append(callback) 916 if not existing: 917 return 918 for dist in self: 919 callback(dist) 920 921 def _added_new(self, dist): 922 for callback in self.callbacks: 923 callback(dist) 924 925 def __getstate__(self): 926 return ( 927 self.entries[:], self.entry_keys.copy(), self.by_key.copy(), 928 self.callbacks[:] 929 ) 930 931 def __setstate__(self, e_k_b_c): 932 entries, keys, by_key, callbacks = e_k_b_c 933 self.entries = entries[:] 934 self.entry_keys = keys.copy() 935 self.by_key = by_key.copy() 936 self.callbacks = callbacks[:] 937 938 939class _ReqExtras(dict): 940 """ 941 Map each requirement to the extras that demanded it. 942 """ 943 944 def markers_pass(self, req, extras=None): 945 """ 946 Evaluate markers for req against each extra that 947 demanded it. 948 949 Return False if the req has a marker and fails 950 evaluation. Otherwise, return True. 951 """ 952 extra_evals = ( 953 req.marker.evaluate({'extra': extra}) 954 for extra in self.get(req, ()) + (extras or (None,)) 955 ) 956 return not req.marker or any(extra_evals) 957 958 959class Environment: 960 """Searchable snapshot of distributions on a search path""" 961 962 def __init__( 963 self, search_path=None, platform=get_supported_platform(), 964 python=PY_MAJOR): 965 """Snapshot distributions available on a search path 966 967 Any distributions found on `search_path` are added to the environment. 968 `search_path` should be a sequence of ``sys.path`` items. If not 969 supplied, ``sys.path`` is used. 970 971 `platform` is an optional string specifying the name of the platform 972 that platform-specific distributions must be compatible with. If 973 unspecified, it defaults to the current platform. `python` is an 974 optional string naming the desired version of Python (e.g. ``'3.6'``); 975 it defaults to the current version. 976 977 You may explicitly set `platform` (and/or `python`) to ``None`` if you 978 wish to map *all* distributions, not just those compatible with the 979 running platform or Python version. 980 """ 981 self._distmap = {} 982 self.platform = platform 983 self.python = python 984 self.scan(search_path) 985 986 def can_add(self, dist): 987 """Is distribution `dist` acceptable for this environment? 988 989 The distribution must match the platform and python version 990 requirements specified when this environment was created, or False 991 is returned. 992 """ 993 py_compat = ( 994 self.python is None 995 or dist.py_version is None 996 or dist.py_version == self.python 997 ) 998 return py_compat and compatible_platforms(dist.platform, self.platform) 999 1000 def remove(self, dist): 1001 """Remove `dist` from the environment""" 1002 self._distmap[dist.key].remove(dist) 1003 1004 def scan(self, search_path=None): 1005 """Scan `search_path` for distributions usable in this environment 1006 1007 Any distributions found are added to the environment. 1008 `search_path` should be a sequence of ``sys.path`` items. If not 1009 supplied, ``sys.path`` is used. Only distributions conforming to 1010 the platform/python version defined at initialization are added. 1011 """ 1012 if search_path is None: 1013 search_path = sys.path 1014 1015 for item in search_path: 1016 for dist in find_distributions(item): 1017 self.add(dist) 1018 1019 def __getitem__(self, project_name): 1020 """Return a newest-to-oldest list of distributions for `project_name` 1021 1022 Uses case-insensitive `project_name` comparison, assuming all the 1023 project's distributions use their project's name converted to all 1024 lowercase as their key. 1025 1026 """ 1027 distribution_key = project_name.lower() 1028 return self._distmap.get(distribution_key, []) 1029 1030 def add(self, dist): 1031 """Add `dist` if we ``can_add()`` it and it has not already been added 1032 """ 1033 if self.can_add(dist) and dist.has_version(): 1034 dists = self._distmap.setdefault(dist.key, []) 1035 if dist not in dists: 1036 dists.append(dist) 1037 dists.sort(key=operator.attrgetter('hashcmp'), reverse=True) 1038 1039 def best_match( 1040 self, req, working_set, installer=None, replace_conflicting=False): 1041 """Find distribution best matching `req` and usable on `working_set` 1042 1043 This calls the ``find(req)`` method of the `working_set` to see if a 1044 suitable distribution is already active. (This may raise 1045 ``VersionConflict`` if an unsuitable version of the project is already 1046 active in the specified `working_set`.) If a suitable distribution 1047 isn't active, this method returns the newest distribution in the 1048 environment that meets the ``Requirement`` in `req`. If no suitable 1049 distribution is found, and `installer` is supplied, then the result of 1050 calling the environment's ``obtain(req, installer)`` method will be 1051 returned. 1052 """ 1053 try: 1054 dist = working_set.find(req) 1055 except VersionConflict: 1056 if not replace_conflicting: 1057 raise 1058 dist = None 1059 if dist is not None: 1060 return dist 1061 for dist in self[req.key]: 1062 if dist in req: 1063 return dist 1064 # try to download/install 1065 return self.obtain(req, installer) 1066 1067 def obtain(self, requirement, installer=None): 1068 """Obtain a distribution matching `requirement` (e.g. via download) 1069 1070 Obtain a distro that matches requirement (e.g. via download). In the 1071 base ``Environment`` class, this routine just returns 1072 ``installer(requirement)``, unless `installer` is None, in which case 1073 None is returned instead. This method is a hook that allows subclasses 1074 to attempt other ways of obtaining a distribution before falling back 1075 to the `installer` argument.""" 1076 if installer is not None: 1077 return installer(requirement) 1078 1079 def __iter__(self): 1080 """Yield the unique project names of the available distributions""" 1081 for key in self._distmap.keys(): 1082 if self[key]: 1083 yield key 1084 1085 def __iadd__(self, other): 1086 """In-place addition of a distribution or environment""" 1087 if isinstance(other, Distribution): 1088 self.add(other) 1089 elif isinstance(other, Environment): 1090 for project in other: 1091 for dist in other[project]: 1092 self.add(dist) 1093 else: 1094 raise TypeError("Can't add %r to environment" % (other,)) 1095 return self 1096 1097 def __add__(self, other): 1098 """Add an environment or distribution to an environment""" 1099 new = self.__class__([], platform=None, python=None) 1100 for env in self, other: 1101 new += env 1102 return new 1103 1104 1105# XXX backward compatibility 1106AvailableDistributions = Environment 1107 1108 1109class ExtractionError(RuntimeError): 1110 """An error occurred extracting a resource 1111 1112 The following attributes are available from instances of this exception: 1113 1114 manager 1115 The resource manager that raised this exception 1116 1117 cache_path 1118 The base directory for resource extraction 1119 1120 original_error 1121 The exception instance that caused extraction to fail 1122 """ 1123 1124 1125class ResourceManager: 1126 """Manage resource extraction and packages""" 1127 extraction_path = None 1128 1129 def __init__(self): 1130 self.cached_files = {} 1131 1132 def resource_exists(self, package_or_requirement, resource_name): 1133 """Does the named resource exist?""" 1134 return get_provider(package_or_requirement).has_resource(resource_name) 1135 1136 def resource_isdir(self, package_or_requirement, resource_name): 1137 """Is the named resource an existing directory?""" 1138 return get_provider(package_or_requirement).resource_isdir( 1139 resource_name 1140 ) 1141 1142 def resource_filename(self, package_or_requirement, resource_name): 1143 """Return a true filesystem path for specified resource""" 1144 return get_provider(package_or_requirement).get_resource_filename( 1145 self, resource_name 1146 ) 1147 1148 def resource_stream(self, package_or_requirement, resource_name): 1149 """Return a readable file-like object for specified resource""" 1150 return get_provider(package_or_requirement).get_resource_stream( 1151 self, resource_name 1152 ) 1153 1154 def resource_string(self, package_or_requirement, resource_name): 1155 """Return specified resource as a string""" 1156 return get_provider(package_or_requirement).get_resource_string( 1157 self, resource_name 1158 ) 1159 1160 def resource_listdir(self, package_or_requirement, resource_name): 1161 """List the contents of the named resource directory""" 1162 return get_provider(package_or_requirement).resource_listdir( 1163 resource_name 1164 ) 1165 1166 def extraction_error(self): 1167 """Give an error message for problems extracting file(s)""" 1168 1169 old_exc = sys.exc_info()[1] 1170 cache_path = self.extraction_path or get_default_cache() 1171 1172 tmpl = textwrap.dedent(""" 1173 Can't extract file(s) to egg cache 1174 1175 The following error occurred while trying to extract file(s) 1176 to the Python egg cache: 1177 1178 {old_exc} 1179 1180 The Python egg cache directory is currently set to: 1181 1182 {cache_path} 1183 1184 Perhaps your account does not have write access to this directory? 1185 You can change the cache directory by setting the PYTHON_EGG_CACHE 1186 environment variable to point to an accessible directory. 1187 """).lstrip() 1188 err = ExtractionError(tmpl.format(**locals())) 1189 err.manager = self 1190 err.cache_path = cache_path 1191 err.original_error = old_exc 1192 raise err 1193 1194 def get_cache_path(self, archive_name, names=()): 1195 """Return absolute location in cache for `archive_name` and `names` 1196 1197 The parent directory of the resulting path will be created if it does 1198 not already exist. `archive_name` should be the base filename of the 1199 enclosing egg (which may not be the name of the enclosing zipfile!), 1200 including its ".egg" extension. `names`, if provided, should be a 1201 sequence of path name parts "under" the egg's extraction location. 1202 1203 This method should only be called by resource providers that need to 1204 obtain an extraction location, and only for names they intend to 1205 extract, as it tracks the generated names for possible cleanup later. 1206 """ 1207 extract_path = self.extraction_path or get_default_cache() 1208 target_path = os.path.join(extract_path, archive_name + '-tmp', *names) 1209 try: 1210 _bypass_ensure_directory(target_path) 1211 except Exception: 1212 self.extraction_error() 1213 1214 self._warn_unsafe_extraction_path(extract_path) 1215 1216 self.cached_files[target_path] = 1 1217 return target_path 1218 1219 @staticmethod 1220 def _warn_unsafe_extraction_path(path): 1221 """ 1222 If the default extraction path is overridden and set to an insecure 1223 location, such as /tmp, it opens up an opportunity for an attacker to 1224 replace an extracted file with an unauthorized payload. Warn the user 1225 if a known insecure location is used. 1226 1227 See Distribute #375 for more details. 1228 """ 1229 if os.name == 'nt' and not path.startswith(os.environ['windir']): 1230 # On Windows, permissions are generally restrictive by default 1231 # and temp directories are not writable by other users, so 1232 # bypass the warning. 1233 return 1234 mode = os.stat(path).st_mode 1235 if mode & stat.S_IWOTH or mode & stat.S_IWGRP: 1236 msg = ( 1237 "%s is writable by group/others and vulnerable to attack " 1238 "when " 1239 "used with get_resource_filename. Consider a more secure " 1240 "location (set with .set_extraction_path or the " 1241 "PYTHON_EGG_CACHE environment variable)." % path 1242 ) 1243 warnings.warn(msg, UserWarning) 1244 1245 def postprocess(self, tempname, filename): 1246 """Perform any platform-specific postprocessing of `tempname` 1247 1248 This is where Mac header rewrites should be done; other platforms don't 1249 have anything special they should do. 1250 1251 Resource providers should call this method ONLY after successfully 1252 extracting a compressed resource. They must NOT call it on resources 1253 that are already in the filesystem. 1254 1255 `tempname` is the current (temporary) name of the file, and `filename` 1256 is the name it will be renamed to by the caller after this routine 1257 returns. 1258 """ 1259 1260 if os.name == 'posix': 1261 # Make the resource executable 1262 mode = ((os.stat(tempname).st_mode) | 0o555) & 0o7777 1263 os.chmod(tempname, mode) 1264 1265 def set_extraction_path(self, path): 1266 """Set the base path where resources will be extracted to, if needed. 1267 1268 If you do not call this routine before any extractions take place, the 1269 path defaults to the return value of ``get_default_cache()``. (Which 1270 is based on the ``PYTHON_EGG_CACHE`` environment variable, with various 1271 platform-specific fallbacks. See that routine's documentation for more 1272 details.) 1273 1274 Resources are extracted to subdirectories of this path based upon 1275 information given by the ``IResourceProvider``. You may set this to a 1276 temporary directory, but then you must call ``cleanup_resources()`` to 1277 delete the extracted files when done. There is no guarantee that 1278 ``cleanup_resources()`` will be able to remove all extracted files. 1279 1280 (Note: you may not change the extraction path for a given resource 1281 manager once resources have been extracted, unless you first call 1282 ``cleanup_resources()``.) 1283 """ 1284 if self.cached_files: 1285 raise ValueError( 1286 "Can't change extraction path, files already extracted" 1287 ) 1288 1289 self.extraction_path = path 1290 1291 def cleanup_resources(self, force=False): 1292 """ 1293 Delete all extracted resource files and directories, returning a list 1294 of the file and directory names that could not be successfully removed. 1295 This function does not have any concurrency protection, so it should 1296 generally only be called when the extraction path is a temporary 1297 directory exclusive to a single process. This method is not 1298 automatically called; you must call it explicitly or register it as an 1299 ``atexit`` function if you wish to ensure cleanup of a temporary 1300 directory used for extractions. 1301 """ 1302 # XXX 1303 1304 1305def get_default_cache(): 1306 """ 1307 Return the ``PYTHON_EGG_CACHE`` environment variable 1308 or a platform-relevant user cache dir for an app 1309 named "Python-Eggs". 1310 """ 1311 return ( 1312 os.environ.get('PYTHON_EGG_CACHE') 1313 or appdirs.user_cache_dir(appname='Python-Eggs') 1314 ) 1315 1316 1317def safe_name(name): 1318 """Convert an arbitrary string to a standard distribution name 1319 1320 Any runs of non-alphanumeric/. characters are replaced with a single '-'. 1321 """ 1322 return re.sub('[^A-Za-z0-9.]+', '-', name) 1323 1324 1325def safe_version(version): 1326 """ 1327 Convert an arbitrary string to a standard version string 1328 """ 1329 try: 1330 # normalize the version 1331 return str(packaging.version.Version(version)) 1332 except packaging.version.InvalidVersion: 1333 version = version.replace(' ', '.') 1334 return re.sub('[^A-Za-z0-9.]+', '-', version) 1335 1336 1337def safe_extra(extra): 1338 """Convert an arbitrary string to a standard 'extra' name 1339 1340 Any runs of non-alphanumeric characters are replaced with a single '_', 1341 and the result is always lowercased. 1342 """ 1343 return re.sub('[^A-Za-z0-9.-]+', '_', extra).lower() 1344 1345 1346def to_filename(name): 1347 """Convert a project or version name to its filename-escaped form 1348 1349 Any '-' characters are currently replaced with '_'. 1350 """ 1351 return name.replace('-', '_') 1352 1353 1354def invalid_marker(text): 1355 """ 1356 Validate text as a PEP 508 environment marker; return an exception 1357 if invalid or False otherwise. 1358 """ 1359 try: 1360 evaluate_marker(text) 1361 except SyntaxError as e: 1362 e.filename = None 1363 e.lineno = None 1364 return e 1365 return False 1366 1367 1368def evaluate_marker(text, extra=None): 1369 """ 1370 Evaluate a PEP 508 environment marker. 1371 Return a boolean indicating the marker result in this environment. 1372 Raise SyntaxError if marker is invalid. 1373 1374 This implementation uses the 'pyparsing' module. 1375 """ 1376 try: 1377 marker = packaging.markers.Marker(text) 1378 return marker.evaluate() 1379 except packaging.markers.InvalidMarker as e: 1380 raise SyntaxError(e) 1381 1382 1383class NullProvider: 1384 """Try to implement resources and metadata for arbitrary PEP 302 loaders""" 1385 1386 egg_name = None 1387 egg_info = None 1388 loader = None 1389 1390 def __init__(self, module): 1391 self.loader = getattr(module, '__loader__', None) 1392 self.module_path = os.path.dirname(getattr(module, '__file__', '')) 1393 1394 def get_resource_filename(self, manager, resource_name): 1395 return self._fn(self.module_path, resource_name) 1396 1397 def get_resource_stream(self, manager, resource_name): 1398 return io.BytesIO(self.get_resource_string(manager, resource_name)) 1399 1400 def get_resource_string(self, manager, resource_name): 1401 return self._get(self._fn(self.module_path, resource_name)) 1402 1403 def has_resource(self, resource_name): 1404 return self._has(self._fn(self.module_path, resource_name)) 1405 1406 def _get_metadata_path(self, name): 1407 return self._fn(self.egg_info, name) 1408 1409 def has_metadata(self, name): 1410 if not self.egg_info: 1411 return self.egg_info 1412 1413 path = self._get_metadata_path(name) 1414 return self._has(path) 1415 1416 def get_metadata(self, name): 1417 if not self.egg_info: 1418 return "" 1419 value = self._get(self._fn(self.egg_info, name)) 1420 return value.decode('utf-8') if six.PY3 else value 1421 1422 def get_metadata_lines(self, name): 1423 return yield_lines(self.get_metadata(name)) 1424 1425 def resource_isdir(self, resource_name): 1426 return self._isdir(self._fn(self.module_path, resource_name)) 1427 1428 def metadata_isdir(self, name): 1429 return self.egg_info and self._isdir(self._fn(self.egg_info, name)) 1430 1431 def resource_listdir(self, resource_name): 1432 return self._listdir(self._fn(self.module_path, resource_name)) 1433 1434 def metadata_listdir(self, name): 1435 if self.egg_info: 1436 return self._listdir(self._fn(self.egg_info, name)) 1437 return [] 1438 1439 def run_script(self, script_name, namespace): 1440 script = 'scripts/' + script_name 1441 if not self.has_metadata(script): 1442 raise ResolutionError( 1443 "Script {script!r} not found in metadata at {self.egg_info!r}" 1444 .format(**locals()), 1445 ) 1446 script_text = self.get_metadata(script).replace('\r\n', '\n') 1447 script_text = script_text.replace('\r', '\n') 1448 script_filename = self._fn(self.egg_info, script) 1449 namespace['__file__'] = script_filename 1450 if os.path.exists(script_filename): 1451 source = open(script_filename).read() 1452 code = compile(source, script_filename, 'exec') 1453 exec(code, namespace, namespace) 1454 else: 1455 from linecache import cache 1456 cache[script_filename] = ( 1457 len(script_text), 0, script_text.split('\n'), script_filename 1458 ) 1459 script_code = compile(script_text, script_filename, 'exec') 1460 exec(script_code, namespace, namespace) 1461 1462 def _has(self, path): 1463 raise NotImplementedError( 1464 "Can't perform this operation for unregistered loader type" 1465 ) 1466 1467 def _isdir(self, path): 1468 raise NotImplementedError( 1469 "Can't perform this operation for unregistered loader type" 1470 ) 1471 1472 def _listdir(self, path): 1473 raise NotImplementedError( 1474 "Can't perform this operation for unregistered loader type" 1475 ) 1476 1477 def _fn(self, base, resource_name): 1478 self._validate_resource_path(resource_name) 1479 if resource_name: 1480 return os.path.join(base, *resource_name.split('/')) 1481 return base 1482 1483 @staticmethod 1484 def _validate_resource_path(path): 1485 """ 1486 Validate the resource paths according to the docs. 1487 https://setuptools.readthedocs.io/en/latest/pkg_resources.html#basic-resource-access 1488 1489 >>> warned = getfixture('recwarn') 1490 >>> warnings.simplefilter('always') 1491 >>> vrp = NullProvider._validate_resource_path 1492 >>> vrp('foo/bar.txt') 1493 >>> bool(warned) 1494 False 1495 >>> vrp('../foo/bar.txt') 1496 >>> bool(warned) 1497 True 1498 >>> warned.clear() 1499 >>> vrp('/foo/bar.txt') 1500 >>> bool(warned) 1501 True 1502 >>> vrp('foo/../../bar.txt') 1503 >>> bool(warned) 1504 True 1505 >>> warned.clear() 1506 >>> vrp('foo/f../bar.txt') 1507 >>> bool(warned) 1508 False 1509 1510 Windows path separators are straight-up disallowed. 1511 >>> vrp(r'\\foo/bar.txt') 1512 Traceback (most recent call last): 1513 ... 1514 ValueError: Use of .. or absolute path in a resource path \ 1515is not allowed. 1516 1517 >>> vrp(r'C:\\foo/bar.txt') 1518 Traceback (most recent call last): 1519 ... 1520 ValueError: Use of .. or absolute path in a resource path \ 1521is not allowed. 1522 1523 Blank values are allowed 1524 1525 >>> vrp('') 1526 >>> bool(warned) 1527 False 1528 1529 Non-string values are not. 1530 1531 >>> vrp(None) 1532 Traceback (most recent call last): 1533 ... 1534 AttributeError: ... 1535 """ 1536 invalid = ( 1537 os.path.pardir in path.split(posixpath.sep) or 1538 posixpath.isabs(path) or 1539 ntpath.isabs(path) 1540 ) 1541 if not invalid: 1542 return 1543 1544 msg = "Use of .. or absolute path in a resource path is not allowed." 1545 1546 # Aggressively disallow Windows absolute paths 1547 if ntpath.isabs(path) and not posixpath.isabs(path): 1548 raise ValueError(msg) 1549 1550 # for compatibility, warn; in future 1551 # raise ValueError(msg) 1552 warnings.warn( 1553 msg[:-1] + " and will raise exceptions in a future release.", 1554 DeprecationWarning, 1555 stacklevel=4, 1556 ) 1557 1558 def _get(self, path): 1559 if hasattr(self.loader, 'get_data'): 1560 return self.loader.get_data(path) 1561 raise NotImplementedError( 1562 "Can't perform this operation for loaders without 'get_data()'" 1563 ) 1564 1565 1566register_loader_type(object, NullProvider) 1567 1568 1569class EggProvider(NullProvider): 1570 """Provider based on a virtual filesystem""" 1571 1572 def __init__(self, module): 1573 NullProvider.__init__(self, module) 1574 self._setup_prefix() 1575 1576 def _setup_prefix(self): 1577 # we assume here that our metadata may be nested inside a "basket" 1578 # of multiple eggs; that's why we use module_path instead of .archive 1579 path = self.module_path 1580 old = None 1581 while path != old: 1582 if _is_egg_path(path): 1583 self.egg_name = os.path.basename(path) 1584 self.egg_info = os.path.join(path, 'EGG-INFO') 1585 self.egg_root = path 1586 break 1587 old = path 1588 path, base = os.path.split(path) 1589 1590 1591class DefaultProvider(EggProvider): 1592 """Provides access to package resources in the filesystem""" 1593 1594 def _has(self, path): 1595 return os.path.exists(path) 1596 1597 def _isdir(self, path): 1598 return os.path.isdir(path) 1599 1600 def _listdir(self, path): 1601 return os.listdir(path) 1602 1603 def get_resource_stream(self, manager, resource_name): 1604 return open(self._fn(self.module_path, resource_name), 'rb') 1605 1606 def _get(self, path): 1607 with open(path, 'rb') as stream: 1608 return stream.read() 1609 1610 @classmethod 1611 def _register(cls): 1612 loader_names = 'SourceFileLoader', 'SourcelessFileLoader', 1613 for name in loader_names: 1614 loader_cls = getattr(importlib_machinery, name, type(None)) 1615 register_loader_type(loader_cls, cls) 1616 1617 1618DefaultProvider._register() 1619 1620 1621class EmptyProvider(NullProvider): 1622 """Provider that returns nothing for all requests""" 1623 1624 module_path = None 1625 1626 _isdir = _has = lambda self, path: False 1627 1628 def _get(self, path): 1629 return '' 1630 1631 def _listdir(self, path): 1632 return [] 1633 1634 def __init__(self): 1635 pass 1636 1637 1638empty_provider = EmptyProvider() 1639 1640 1641class ZipManifests(dict): 1642 """ 1643 zip manifest builder 1644 """ 1645 1646 @classmethod 1647 def build(cls, path): 1648 """ 1649 Build a dictionary similar to the zipimport directory 1650 caches, except instead of tuples, store ZipInfo objects. 1651 1652 Use a platform-specific path separator (os.sep) for the path keys 1653 for compatibility with pypy on Windows. 1654 """ 1655 with zipfile.ZipFile(path) as zfile: 1656 items = ( 1657 ( 1658 name.replace('/', os.sep), 1659 zfile.getinfo(name), 1660 ) 1661 for name in zfile.namelist() 1662 ) 1663 return dict(items) 1664 1665 load = build 1666 1667 1668class MemoizedZipManifests(ZipManifests): 1669 """ 1670 Memoized zipfile manifests. 1671 """ 1672 manifest_mod = collections.namedtuple('manifest_mod', 'manifest mtime') 1673 1674 def load(self, path): 1675 """ 1676 Load a manifest at path or return a suitable manifest already loaded. 1677 """ 1678 path = os.path.normpath(path) 1679 mtime = os.stat(path).st_mtime 1680 1681 if path not in self or self[path].mtime != mtime: 1682 manifest = self.build(path) 1683 self[path] = self.manifest_mod(manifest, mtime) 1684 1685 return self[path].manifest 1686 1687 1688class ZipProvider(EggProvider): 1689 """Resource support for zips and eggs""" 1690 1691 eagers = None 1692 _zip_manifests = MemoizedZipManifests() 1693 1694 def __init__(self, module): 1695 EggProvider.__init__(self, module) 1696 self.zip_pre = self.loader.archive + os.sep 1697 1698 def _zipinfo_name(self, fspath): 1699 # Convert a virtual filename (full path to file) into a zipfile subpath 1700 # usable with the zipimport directory cache for our target archive 1701 fspath = fspath.rstrip(os.sep) 1702 if fspath == self.loader.archive: 1703 return '' 1704 if fspath.startswith(self.zip_pre): 1705 return fspath[len(self.zip_pre):] 1706 raise AssertionError( 1707 "%s is not a subpath of %s" % (fspath, self.zip_pre) 1708 ) 1709 1710 def _parts(self, zip_path): 1711 # Convert a zipfile subpath into an egg-relative path part list. 1712 # pseudo-fs path 1713 fspath = self.zip_pre + zip_path 1714 if fspath.startswith(self.egg_root + os.sep): 1715 return fspath[len(self.egg_root) + 1:].split(os.sep) 1716 raise AssertionError( 1717 "%s is not a subpath of %s" % (fspath, self.egg_root) 1718 ) 1719 1720 @property 1721 def zipinfo(self): 1722 return self._zip_manifests.load(self.loader.archive) 1723 1724 def get_resource_filename(self, manager, resource_name): 1725 if not self.egg_name: 1726 raise NotImplementedError( 1727 "resource_filename() only supported for .egg, not .zip" 1728 ) 1729 # no need to lock for extraction, since we use temp names 1730 zip_path = self._resource_to_zip(resource_name) 1731 eagers = self._get_eager_resources() 1732 if '/'.join(self._parts(zip_path)) in eagers: 1733 for name in eagers: 1734 self._extract_resource(manager, self._eager_to_zip(name)) 1735 return self._extract_resource(manager, zip_path) 1736 1737 @staticmethod 1738 def _get_date_and_size(zip_stat): 1739 size = zip_stat.file_size 1740 # ymdhms+wday, yday, dst 1741 date_time = zip_stat.date_time + (0, 0, -1) 1742 # 1980 offset already done 1743 timestamp = time.mktime(date_time) 1744 return timestamp, size 1745 1746 def _extract_resource(self, manager, zip_path): 1747 1748 if zip_path in self._index(): 1749 for name in self._index()[zip_path]: 1750 last = self._extract_resource( 1751 manager, os.path.join(zip_path, name) 1752 ) 1753 # return the extracted directory name 1754 return os.path.dirname(last) 1755 1756 timestamp, size = self._get_date_and_size(self.zipinfo[zip_path]) 1757 1758 if not WRITE_SUPPORT: 1759 raise IOError('"os.rename" and "os.unlink" are not supported ' 1760 'on this platform') 1761 try: 1762 1763 real_path = manager.get_cache_path( 1764 self.egg_name, self._parts(zip_path) 1765 ) 1766 1767 if self._is_current(real_path, zip_path): 1768 return real_path 1769 1770 outf, tmpnam = _mkstemp( 1771 ".$extract", 1772 dir=os.path.dirname(real_path), 1773 ) 1774 os.write(outf, self.loader.get_data(zip_path)) 1775 os.close(outf) 1776 utime(tmpnam, (timestamp, timestamp)) 1777 manager.postprocess(tmpnam, real_path) 1778 1779 try: 1780 rename(tmpnam, real_path) 1781 1782 except os.error: 1783 if os.path.isfile(real_path): 1784 if self._is_current(real_path, zip_path): 1785 # the file became current since it was checked above, 1786 # so proceed. 1787 return real_path 1788 # Windows, del old file and retry 1789 elif os.name == 'nt': 1790 unlink(real_path) 1791 rename(tmpnam, real_path) 1792 return real_path 1793 raise 1794 1795 except os.error: 1796 # report a user-friendly error 1797 manager.extraction_error() 1798 1799 return real_path 1800 1801 def _is_current(self, file_path, zip_path): 1802 """ 1803 Return True if the file_path is current for this zip_path 1804 """ 1805 timestamp, size = self._get_date_and_size(self.zipinfo[zip_path]) 1806 if not os.path.isfile(file_path): 1807 return False 1808 stat = os.stat(file_path) 1809 if stat.st_size != size or stat.st_mtime != timestamp: 1810 return False 1811 # check that the contents match 1812 zip_contents = self.loader.get_data(zip_path) 1813 with open(file_path, 'rb') as f: 1814 file_contents = f.read() 1815 return zip_contents == file_contents 1816 1817 def _get_eager_resources(self): 1818 if self.eagers is None: 1819 eagers = [] 1820 for name in ('native_libs.txt', 'eager_resources.txt'): 1821 if self.has_metadata(name): 1822 eagers.extend(self.get_metadata_lines(name)) 1823 self.eagers = eagers 1824 return self.eagers 1825 1826 def _index(self): 1827 try: 1828 return self._dirindex 1829 except AttributeError: 1830 ind = {} 1831 for path in self.zipinfo: 1832 parts = path.split(os.sep) 1833 while parts: 1834 parent = os.sep.join(parts[:-1]) 1835 if parent in ind: 1836 ind[parent].append(parts[-1]) 1837 break 1838 else: 1839 ind[parent] = [parts.pop()] 1840 self._dirindex = ind 1841 return ind 1842 1843 def _has(self, fspath): 1844 zip_path = self._zipinfo_name(fspath) 1845 return zip_path in self.zipinfo or zip_path in self._index() 1846 1847 def _isdir(self, fspath): 1848 return self._zipinfo_name(fspath) in self._index() 1849 1850 def _listdir(self, fspath): 1851 return list(self._index().get(self._zipinfo_name(fspath), ())) 1852 1853 def _eager_to_zip(self, resource_name): 1854 return self._zipinfo_name(self._fn(self.egg_root, resource_name)) 1855 1856 def _resource_to_zip(self, resource_name): 1857 return self._zipinfo_name(self._fn(self.module_path, resource_name)) 1858 1859 1860register_loader_type(zipimport.zipimporter, ZipProvider) 1861 1862 1863class FileMetadata(EmptyProvider): 1864 """Metadata handler for standalone PKG-INFO files 1865 1866 Usage:: 1867 1868 metadata = FileMetadata("/path/to/PKG-INFO") 1869 1870 This provider rejects all data and metadata requests except for PKG-INFO, 1871 which is treated as existing, and will be the contents of the file at 1872 the provided location. 1873 """ 1874 1875 def __init__(self, path): 1876 self.path = path 1877 1878 def _get_metadata_path(self, name): 1879 return self.path 1880 1881 def has_metadata(self, name): 1882 return name == 'PKG-INFO' and os.path.isfile(self.path) 1883 1884 def get_metadata(self, name): 1885 if name != 'PKG-INFO': 1886 raise KeyError("No metadata except PKG-INFO is available") 1887 1888 with io.open(self.path, encoding='utf-8', errors="replace") as f: 1889 metadata = f.read() 1890 self._warn_on_replacement(metadata) 1891 return metadata 1892 1893 def _warn_on_replacement(self, metadata): 1894 # Python 2.7 compat for: replacement_char = '�' 1895 replacement_char = b'\xef\xbf\xbd'.decode('utf-8') 1896 if replacement_char in metadata: 1897 tmpl = "{self.path} could not be properly decoded in UTF-8" 1898 msg = tmpl.format(**locals()) 1899 warnings.warn(msg) 1900 1901 def get_metadata_lines(self, name): 1902 return yield_lines(self.get_metadata(name)) 1903 1904 1905class PathMetadata(DefaultProvider): 1906 """Metadata provider for egg directories 1907 1908 Usage:: 1909 1910 # Development eggs: 1911 1912 egg_info = "/path/to/PackageName.egg-info" 1913 base_dir = os.path.dirname(egg_info) 1914 metadata = PathMetadata(base_dir, egg_info) 1915 dist_name = os.path.splitext(os.path.basename(egg_info))[0] 1916 dist = Distribution(basedir, project_name=dist_name, metadata=metadata) 1917 1918 # Unpacked egg directories: 1919 1920 egg_path = "/path/to/PackageName-ver-pyver-etc.egg" 1921 metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO')) 1922 dist = Distribution.from_filename(egg_path, metadata=metadata) 1923 """ 1924 1925 def __init__(self, path, egg_info): 1926 self.module_path = path 1927 self.egg_info = egg_info 1928 1929 1930class EggMetadata(ZipProvider): 1931 """Metadata provider for .egg files""" 1932 1933 def __init__(self, importer): 1934 """Create a metadata provider from a zipimporter""" 1935 1936 self.zip_pre = importer.archive + os.sep 1937 self.loader = importer 1938 if importer.prefix: 1939 self.module_path = os.path.join(importer.archive, importer.prefix) 1940 else: 1941 self.module_path = importer.archive 1942 self._setup_prefix() 1943 1944 1945_declare_state('dict', _distribution_finders={}) 1946 1947 1948def register_finder(importer_type, distribution_finder): 1949 """Register `distribution_finder` to find distributions in sys.path items 1950 1951 `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item 1952 handler), and `distribution_finder` is a callable that, passed a path 1953 item and the importer instance, yields ``Distribution`` instances found on 1954 that path item. See ``pkg_resources.find_on_path`` for an example.""" 1955 _distribution_finders[importer_type] = distribution_finder 1956 1957 1958def find_distributions(path_item, only=False): 1959 """Yield distributions accessible via `path_item`""" 1960 importer = get_importer(path_item) 1961 finder = _find_adapter(_distribution_finders, importer) 1962 return finder(importer, path_item, only) 1963 1964 1965def find_eggs_in_zip(importer, path_item, only=False): 1966 """ 1967 Find eggs in zip files; possibly multiple nested eggs. 1968 """ 1969 if importer.archive.endswith('.whl'): 1970 # wheels are not supported with this finder 1971 # they don't have PKG-INFO metadata, and won't ever contain eggs 1972 return 1973 metadata = EggMetadata(importer) 1974 if metadata.has_metadata('PKG-INFO'): 1975 yield Distribution.from_filename(path_item, metadata=metadata) 1976 if only: 1977 # don't yield nested distros 1978 return 1979 for subitem in metadata.resource_listdir(''): 1980 if _is_egg_path(subitem): 1981 subpath = os.path.join(path_item, subitem) 1982 dists = find_eggs_in_zip(zipimport.zipimporter(subpath), subpath) 1983 for dist in dists: 1984 yield dist 1985 elif subitem.lower().endswith('.dist-info'): 1986 subpath = os.path.join(path_item, subitem) 1987 submeta = EggMetadata(zipimport.zipimporter(subpath)) 1988 submeta.egg_info = subpath 1989 yield Distribution.from_location(path_item, subitem, submeta) 1990 1991 1992register_finder(zipimport.zipimporter, find_eggs_in_zip) 1993 1994 1995def find_nothing(importer, path_item, only=False): 1996 return () 1997 1998 1999register_finder(object, find_nothing) 2000 2001 2002def _by_version_descending(names): 2003 """ 2004 Given a list of filenames, return them in descending order 2005 by version number. 2006 2007 >>> names = 'bar', 'foo', 'Python-2.7.10.egg', 'Python-2.7.2.egg' 2008 >>> _by_version_descending(names) 2009 ['Python-2.7.10.egg', 'Python-2.7.2.egg', 'foo', 'bar'] 2010 >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.egg' 2011 >>> _by_version_descending(names) 2012 ['Setuptools-1.2.3.egg', 'Setuptools-1.2.3b1.egg'] 2013 >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.post1.egg' 2014 >>> _by_version_descending(names) 2015 ['Setuptools-1.2.3.post1.egg', 'Setuptools-1.2.3b1.egg'] 2016 """ 2017 def _by_version(name): 2018 """ 2019 Parse each component of the filename 2020 """ 2021 name, ext = os.path.splitext(name) 2022 parts = itertools.chain(name.split('-'), [ext]) 2023 return [packaging.version.parse(part) for part in parts] 2024 2025 return sorted(names, key=_by_version, reverse=True) 2026 2027 2028def find_on_path(importer, path_item, only=False): 2029 """Yield distributions accessible on a sys.path directory""" 2030 path_item = _normalize_cached(path_item) 2031 2032 if _is_unpacked_egg(path_item): 2033 yield Distribution.from_filename( 2034 path_item, metadata=PathMetadata( 2035 path_item, os.path.join(path_item, 'EGG-INFO') 2036 ) 2037 ) 2038 return 2039 2040 entries = safe_listdir(path_item) 2041 2042 # for performance, before sorting by version, 2043 # screen entries for only those that will yield 2044 # distributions 2045 filtered = ( 2046 entry 2047 for entry in entries 2048 if dist_factory(path_item, entry, only) 2049 ) 2050 2051 # scan for .egg and .egg-info in directory 2052 path_item_entries = _by_version_descending(filtered) 2053 for entry in path_item_entries: 2054 fullpath = os.path.join(path_item, entry) 2055 factory = dist_factory(path_item, entry, only) 2056 for dist in factory(fullpath): 2057 yield dist 2058 2059 2060def dist_factory(path_item, entry, only): 2061 """ 2062 Return a dist_factory for a path_item and entry 2063 """ 2064 lower = entry.lower() 2065 is_meta = any(map(lower.endswith, ('.egg-info', '.dist-info'))) 2066 return ( 2067 distributions_from_metadata 2068 if is_meta else 2069 find_distributions 2070 if not only and _is_egg_path(entry) else 2071 resolve_egg_link 2072 if not only and lower.endswith('.egg-link') else 2073 NoDists() 2074 ) 2075 2076 2077class NoDists: 2078 """ 2079 >>> bool(NoDists()) 2080 False 2081 2082 >>> list(NoDists()('anything')) 2083 [] 2084 """ 2085 def __bool__(self): 2086 return False 2087 if six.PY2: 2088 __nonzero__ = __bool__ 2089 2090 def __call__(self, fullpath): 2091 return iter(()) 2092 2093 2094def safe_listdir(path): 2095 """ 2096 Attempt to list contents of path, but suppress some exceptions. 2097 """ 2098 try: 2099 return os.listdir(path) 2100 except (PermissionError, NotADirectoryError): 2101 pass 2102 except OSError as e: 2103 # Ignore the directory if does not exist, not a directory or 2104 # permission denied 2105 ignorable = ( 2106 e.errno in (errno.ENOTDIR, errno.EACCES, errno.ENOENT) 2107 # Python 2 on Windows needs to be handled this way :( 2108 or getattr(e, "winerror", None) == 267 2109 ) 2110 if not ignorable: 2111 raise 2112 return () 2113 2114 2115def distributions_from_metadata(path): 2116 root = os.path.dirname(path) 2117 if os.path.isdir(path): 2118 if len(os.listdir(path)) == 0: 2119 # empty metadata dir; skip 2120 return 2121 metadata = PathMetadata(root, path) 2122 else: 2123 metadata = FileMetadata(path) 2124 entry = os.path.basename(path) 2125 yield Distribution.from_location( 2126 root, entry, metadata, precedence=DEVELOP_DIST, 2127 ) 2128 2129 2130def non_empty_lines(path): 2131 """ 2132 Yield non-empty lines from file at path 2133 """ 2134 with open(path) as f: 2135 for line in f: 2136 line = line.strip() 2137 if line: 2138 yield line 2139 2140 2141def resolve_egg_link(path): 2142 """ 2143 Given a path to an .egg-link, resolve distributions 2144 present in the referenced path. 2145 """ 2146 referenced_paths = non_empty_lines(path) 2147 resolved_paths = ( 2148 os.path.join(os.path.dirname(path), ref) 2149 for ref in referenced_paths 2150 ) 2151 dist_groups = map(find_distributions, resolved_paths) 2152 return next(dist_groups, ()) 2153 2154 2155register_finder(pkgutil.ImpImporter, find_on_path) 2156 2157if hasattr(importlib_machinery, 'FileFinder'): 2158 register_finder(importlib_machinery.FileFinder, find_on_path) 2159 2160_declare_state('dict', _namespace_handlers={}) 2161_declare_state('dict', _namespace_packages={}) 2162 2163 2164def register_namespace_handler(importer_type, namespace_handler): 2165 """Register `namespace_handler` to declare namespace packages 2166 2167 `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item 2168 handler), and `namespace_handler` is a callable like this:: 2169 2170 def namespace_handler(importer, path_entry, moduleName, module): 2171 # return a path_entry to use for child packages 2172 2173 Namespace handlers are only called if the importer object has already 2174 agreed that it can handle the relevant path item, and they should only 2175 return a subpath if the module __path__ does not already contain an 2176 equivalent subpath. For an example namespace handler, see 2177 ``pkg_resources.file_ns_handler``. 2178 """ 2179 _namespace_handlers[importer_type] = namespace_handler 2180 2181 2182def _handle_ns(packageName, path_item): 2183 """Ensure that named package includes a subpath of path_item (if needed)""" 2184 2185 importer = get_importer(path_item) 2186 if importer is None: 2187 return None 2188 2189 # capture warnings due to #1111 2190 with warnings.catch_warnings(): 2191 warnings.simplefilter("ignore") 2192 loader = importer.find_module(packageName) 2193 2194 if loader is None: 2195 return None 2196 module = sys.modules.get(packageName) 2197 if module is None: 2198 module = sys.modules[packageName] = types.ModuleType(packageName) 2199 module.__path__ = [] 2200 _set_parent_ns(packageName) 2201 elif not hasattr(module, '__path__'): 2202 raise TypeError("Not a package:", packageName) 2203 handler = _find_adapter(_namespace_handlers, importer) 2204 subpath = handler(importer, path_item, packageName, module) 2205 if subpath is not None: 2206 path = module.__path__ 2207 path.append(subpath) 2208 loader.load_module(packageName) 2209 _rebuild_mod_path(path, packageName, module) 2210 return subpath 2211 2212 2213def _rebuild_mod_path(orig_path, package_name, module): 2214 """ 2215 Rebuild module.__path__ ensuring that all entries are ordered 2216 corresponding to their sys.path order 2217 """ 2218 sys_path = [_normalize_cached(p) for p in sys.path] 2219 2220 def safe_sys_path_index(entry): 2221 """ 2222 Workaround for #520 and #513. 2223 """ 2224 try: 2225 return sys_path.index(entry) 2226 except ValueError: 2227 return float('inf') 2228 2229 def position_in_sys_path(path): 2230 """ 2231 Return the ordinal of the path based on its position in sys.path 2232 """ 2233 path_parts = path.split(os.sep) 2234 module_parts = package_name.count('.') + 1 2235 parts = path_parts[:-module_parts] 2236 return safe_sys_path_index(_normalize_cached(os.sep.join(parts))) 2237 2238 new_path = sorted(orig_path, key=position_in_sys_path) 2239 new_path = [_normalize_cached(p) for p in new_path] 2240 2241 if isinstance(module.__path__, list): 2242 module.__path__[:] = new_path 2243 else: 2244 module.__path__ = new_path 2245 2246 2247def declare_namespace(packageName): 2248 """Declare that package 'packageName' is a namespace package""" 2249 2250 _imp.acquire_lock() 2251 try: 2252 if packageName in _namespace_packages: 2253 return 2254 2255 path = sys.path 2256 parent, _, _ = packageName.rpartition('.') 2257 2258 if parent: 2259 declare_namespace(parent) 2260 if parent not in _namespace_packages: 2261 __import__(parent) 2262 try: 2263 path = sys.modules[parent].__path__ 2264 except AttributeError: 2265 raise TypeError("Not a package:", parent) 2266 2267 # Track what packages are namespaces, so when new path items are added, 2268 # they can be updated 2269 _namespace_packages.setdefault(parent or None, []).append(packageName) 2270 _namespace_packages.setdefault(packageName, []) 2271 2272 for path_item in path: 2273 # Ensure all the parent's path items are reflected in the child, 2274 # if they apply 2275 _handle_ns(packageName, path_item) 2276 2277 finally: 2278 _imp.release_lock() 2279 2280 2281def fixup_namespace_packages(path_item, parent=None): 2282 """Ensure that previously-declared namespace packages include path_item""" 2283 _imp.acquire_lock() 2284 try: 2285 for package in _namespace_packages.get(parent, ()): 2286 subpath = _handle_ns(package, path_item) 2287 if subpath: 2288 fixup_namespace_packages(subpath, package) 2289 finally: 2290 _imp.release_lock() 2291 2292 2293def file_ns_handler(importer, path_item, packageName, module): 2294 """Compute an ns-package subpath for a filesystem or zipfile importer""" 2295 2296 subpath = os.path.join(path_item, packageName.split('.')[-1]) 2297 normalized = _normalize_cached(subpath) 2298 for item in module.__path__: 2299 if _normalize_cached(item) == normalized: 2300 break 2301 else: 2302 # Only return the path if it's not already there 2303 return subpath 2304 2305 2306register_namespace_handler(pkgutil.ImpImporter, file_ns_handler) 2307register_namespace_handler(zipimport.zipimporter, file_ns_handler) 2308 2309if hasattr(importlib_machinery, 'FileFinder'): 2310 register_namespace_handler(importlib_machinery.FileFinder, file_ns_handler) 2311 2312 2313def null_ns_handler(importer, path_item, packageName, module): 2314 return None 2315 2316 2317register_namespace_handler(object, null_ns_handler) 2318 2319 2320def normalize_path(filename): 2321 """Normalize a file/dir name for comparison purposes""" 2322 return os.path.normcase(os.path.realpath(os.path.normpath(_cygwin_patch(filename)))) 2323 2324 2325def _cygwin_patch(filename): # pragma: nocover 2326 """ 2327 Contrary to POSIX 2008, on Cygwin, getcwd (3) contains 2328 symlink components. Using 2329 os.path.abspath() works around this limitation. A fix in os.getcwd() 2330 would probably better, in Cygwin even more so, except 2331 that this seems to be by design... 2332 """ 2333 return os.path.abspath(filename) if sys.platform == 'cygwin' else filename 2334 2335 2336def _normalize_cached(filename, _cache={}): 2337 try: 2338 return _cache[filename] 2339 except KeyError: 2340 _cache[filename] = result = normalize_path(filename) 2341 return result 2342 2343 2344def _is_egg_path(path): 2345 """ 2346 Determine if given path appears to be an egg. 2347 """ 2348 return path.lower().endswith('.egg') 2349 2350 2351def _is_unpacked_egg(path): 2352 """ 2353 Determine if given path appears to be an unpacked egg. 2354 """ 2355 return ( 2356 _is_egg_path(path) and 2357 os.path.isfile(os.path.join(path, 'EGG-INFO', 'PKG-INFO')) 2358 ) 2359 2360 2361def _set_parent_ns(packageName): 2362 parts = packageName.split('.') 2363 name = parts.pop() 2364 if parts: 2365 parent = '.'.join(parts) 2366 setattr(sys.modules[parent], name, sys.modules[packageName]) 2367 2368 2369def yield_lines(strs): 2370 """Yield non-empty/non-comment lines of a string or sequence""" 2371 if isinstance(strs, six.string_types): 2372 for s in strs.splitlines(): 2373 s = s.strip() 2374 # skip blank lines/comments 2375 if s and not s.startswith('#'): 2376 yield s 2377 else: 2378 for ss in strs: 2379 for s in yield_lines(ss): 2380 yield s 2381 2382 2383MODULE = re.compile(r"\w+(\.\w+)*$").match 2384EGG_NAME = re.compile( 2385 r""" 2386 (?P<name>[^-]+) ( 2387 -(?P<ver>[^-]+) ( 2388 -py(?P<pyver>[^-]+) ( 2389 -(?P<plat>.+) 2390 )? 2391 )? 2392 )? 2393 """, 2394 re.VERBOSE | re.IGNORECASE, 2395).match 2396 2397 2398class EntryPoint: 2399 """Object representing an advertised importable object""" 2400 2401 def __init__(self, name, module_name, attrs=(), extras=(), dist=None): 2402 if not MODULE(module_name): 2403 raise ValueError("Invalid module name", module_name) 2404 self.name = name 2405 self.module_name = module_name 2406 self.attrs = tuple(attrs) 2407 self.extras = tuple(extras) 2408 self.dist = dist 2409 2410 def __str__(self): 2411 s = "%s = %s" % (self.name, self.module_name) 2412 if self.attrs: 2413 s += ':' + '.'.join(self.attrs) 2414 if self.extras: 2415 s += ' [%s]' % ','.join(self.extras) 2416 return s 2417 2418 def __repr__(self): 2419 return "EntryPoint.parse(%r)" % str(self) 2420 2421 def load(self, require=True, *args, **kwargs): 2422 """ 2423 Require packages for this EntryPoint, then resolve it. 2424 """ 2425 if not require or args or kwargs: 2426 warnings.warn( 2427 "Parameters to load are deprecated. Call .resolve and " 2428 ".require separately.", 2429 PkgResourcesDeprecationWarning, 2430 stacklevel=2, 2431 ) 2432 if require: 2433 self.require(*args, **kwargs) 2434 return self.resolve() 2435 2436 def resolve(self): 2437 """ 2438 Resolve the entry point from its module and attrs. 2439 """ 2440 module = __import__(self.module_name, fromlist=['__name__'], level=0) 2441 try: 2442 return functools.reduce(getattr, self.attrs, module) 2443 except AttributeError as exc: 2444 raise ImportError(str(exc)) 2445 2446 def require(self, env=None, installer=None): 2447 if self.extras and not self.dist: 2448 raise UnknownExtra("Can't require() without a distribution", self) 2449 2450 # Get the requirements for this entry point with all its extras and 2451 # then resolve them. We have to pass `extras` along when resolving so 2452 # that the working set knows what extras we want. Otherwise, for 2453 # dist-info distributions, the working set will assume that the 2454 # requirements for that extra are purely optional and skip over them. 2455 reqs = self.dist.requires(self.extras) 2456 items = working_set.resolve(reqs, env, installer, extras=self.extras) 2457 list(map(working_set.add, items)) 2458 2459 pattern = re.compile( 2460 r'\s*' 2461 r'(?P<name>.+?)\s*' 2462 r'=\s*' 2463 r'(?P<module>[\w.]+)\s*' 2464 r'(:\s*(?P<attr>[\w.]+))?\s*' 2465 r'(?P<extras>\[.*\])?\s*$' 2466 ) 2467 2468 @classmethod 2469 def parse(cls, src, dist=None): 2470 """Parse a single entry point from string `src` 2471 2472 Entry point syntax follows the form:: 2473 2474 name = some.module:some.attr [extra1, extra2] 2475 2476 The entry name and module name are required, but the ``:attrs`` and 2477 ``[extras]`` parts are optional 2478 """ 2479 m = cls.pattern.match(src) 2480 if not m: 2481 msg = "EntryPoint must be in 'name=module:attrs [extras]' format" 2482 raise ValueError(msg, src) 2483 res = m.groupdict() 2484 extras = cls._parse_extras(res['extras']) 2485 attrs = res['attr'].split('.') if res['attr'] else () 2486 return cls(res['name'], res['module'], attrs, extras, dist) 2487 2488 @classmethod 2489 def _parse_extras(cls, extras_spec): 2490 if not extras_spec: 2491 return () 2492 req = Requirement.parse('x' + extras_spec) 2493 if req.specs: 2494 raise ValueError() 2495 return req.extras 2496 2497 @classmethod 2498 def parse_group(cls, group, lines, dist=None): 2499 """Parse an entry point group""" 2500 if not MODULE(group): 2501 raise ValueError("Invalid group name", group) 2502 this = {} 2503 for line in yield_lines(lines): 2504 ep = cls.parse(line, dist) 2505 if ep.name in this: 2506 raise ValueError("Duplicate entry point", group, ep.name) 2507 this[ep.name] = ep 2508 return this 2509 2510 @classmethod 2511 def parse_map(cls, data, dist=None): 2512 """Parse a map of entry point groups""" 2513 if isinstance(data, dict): 2514 data = data.items() 2515 else: 2516 data = split_sections(data) 2517 maps = {} 2518 for group, lines in data: 2519 if group is None: 2520 if not lines: 2521 continue 2522 raise ValueError("Entry points must be listed in groups") 2523 group = group.strip() 2524 if group in maps: 2525 raise ValueError("Duplicate group name", group) 2526 maps[group] = cls.parse_group(group, lines, dist) 2527 return maps 2528 2529 2530def _remove_md5_fragment(location): 2531 if not location: 2532 return '' 2533 parsed = urllib.parse.urlparse(location) 2534 if parsed[-1].startswith('md5='): 2535 return urllib.parse.urlunparse(parsed[:-1] + ('',)) 2536 return location 2537 2538 2539def _version_from_file(lines): 2540 """ 2541 Given an iterable of lines from a Metadata file, return 2542 the value of the Version field, if present, or None otherwise. 2543 """ 2544 def is_version_line(line): 2545 return line.lower().startswith('version:') 2546 version_lines = filter(is_version_line, lines) 2547 line = next(iter(version_lines), '') 2548 _, _, value = line.partition(':') 2549 return safe_version(value.strip()) or None 2550 2551 2552class Distribution: 2553 """Wrap an actual or potential sys.path entry w/metadata""" 2554 PKG_INFO = 'PKG-INFO' 2555 2556 def __init__( 2557 self, location=None, metadata=None, project_name=None, 2558 version=None, py_version=PY_MAJOR, platform=None, 2559 precedence=EGG_DIST): 2560 self.project_name = safe_name(project_name or 'Unknown') 2561 if version is not None: 2562 self._version = safe_version(version) 2563 self.py_version = py_version 2564 self.platform = platform 2565 self.location = location 2566 self.precedence = precedence 2567 self._provider = metadata or empty_provider 2568 2569 @classmethod 2570 def from_location(cls, location, basename, metadata=None, **kw): 2571 project_name, version, py_version, platform = [None] * 4 2572 basename, ext = os.path.splitext(basename) 2573 if ext.lower() in _distributionImpl: 2574 cls = _distributionImpl[ext.lower()] 2575 2576 match = EGG_NAME(basename) 2577 if match: 2578 project_name, version, py_version, platform = match.group( 2579 'name', 'ver', 'pyver', 'plat' 2580 ) 2581 return cls( 2582 location, metadata, project_name=project_name, version=version, 2583 py_version=py_version, platform=platform, **kw 2584 )._reload_version() 2585 2586 def _reload_version(self): 2587 return self 2588 2589 @property 2590 def hashcmp(self): 2591 return ( 2592 self.parsed_version, 2593 self.precedence, 2594 self.key, 2595 _remove_md5_fragment(self.location), 2596 self.py_version or '', 2597 self.platform or '', 2598 ) 2599 2600 def __hash__(self): 2601 return hash(self.hashcmp) 2602 2603 def __lt__(self, other): 2604 return self.hashcmp < other.hashcmp 2605 2606 def __le__(self, other): 2607 return self.hashcmp <= other.hashcmp 2608 2609 def __gt__(self, other): 2610 return self.hashcmp > other.hashcmp 2611 2612 def __ge__(self, other): 2613 return self.hashcmp >= other.hashcmp 2614 2615 def __eq__(self, other): 2616 if not isinstance(other, self.__class__): 2617 # It's not a Distribution, so they are not equal 2618 return False 2619 return self.hashcmp == other.hashcmp 2620 2621 def __ne__(self, other): 2622 return not self == other 2623 2624 # These properties have to be lazy so that we don't have to load any 2625 # metadata until/unless it's actually needed. (i.e., some distributions 2626 # may not know their name or version without loading PKG-INFO) 2627 2628 @property 2629 def key(self): 2630 try: 2631 return self._key 2632 except AttributeError: 2633 self._key = key = self.project_name.lower() 2634 return key 2635 2636 @property 2637 def parsed_version(self): 2638 if not hasattr(self, "_parsed_version"): 2639 self._parsed_version = parse_version(self.version) 2640 2641 return self._parsed_version 2642 2643 def _warn_legacy_version(self): 2644 LV = packaging.version.LegacyVersion 2645 is_legacy = isinstance(self._parsed_version, LV) 2646 if not is_legacy: 2647 return 2648 2649 # While an empty version is technically a legacy version and 2650 # is not a valid PEP 440 version, it's also unlikely to 2651 # actually come from someone and instead it is more likely that 2652 # it comes from setuptools attempting to parse a filename and 2653 # including it in the list. So for that we'll gate this warning 2654 # on if the version is anything at all or not. 2655 if not self.version: 2656 return 2657 2658 tmpl = textwrap.dedent(""" 2659 '{project_name} ({version})' is being parsed as a legacy, 2660 non PEP 440, 2661 version. You may find odd behavior and sort order. 2662 In particular it will be sorted as less than 0.0. It 2663 is recommended to migrate to PEP 440 compatible 2664 versions. 2665 """).strip().replace('\n', ' ') 2666 2667 warnings.warn(tmpl.format(**vars(self)), PEP440Warning) 2668 2669 @property 2670 def version(self): 2671 try: 2672 return self._version 2673 except AttributeError: 2674 version = self._get_version() 2675 if version is None: 2676 path = self._get_metadata_path_for_display(self.PKG_INFO) 2677 msg = ( 2678 "Missing 'Version:' header and/or {} file at path: {}" 2679 ).format(self.PKG_INFO, path) 2680 raise ValueError(msg, self) 2681 2682 return version 2683 2684 @property 2685 def _dep_map(self): 2686 """ 2687 A map of extra to its list of (direct) requirements 2688 for this distribution, including the null extra. 2689 """ 2690 try: 2691 return self.__dep_map 2692 except AttributeError: 2693 self.__dep_map = self._filter_extras(self._build_dep_map()) 2694 return self.__dep_map 2695 2696 @staticmethod 2697 def _filter_extras(dm): 2698 """ 2699 Given a mapping of extras to dependencies, strip off 2700 environment markers and filter out any dependencies 2701 not matching the markers. 2702 """ 2703 for extra in list(filter(None, dm)): 2704 new_extra = extra 2705 reqs = dm.pop(extra) 2706 new_extra, _, marker = extra.partition(':') 2707 fails_marker = marker and ( 2708 invalid_marker(marker) 2709 or not evaluate_marker(marker) 2710 ) 2711 if fails_marker: 2712 reqs = [] 2713 new_extra = safe_extra(new_extra) or None 2714 2715 dm.setdefault(new_extra, []).extend(reqs) 2716 return dm 2717 2718 def _build_dep_map(self): 2719 dm = {} 2720 for name in 'requires.txt', 'depends.txt': 2721 for extra, reqs in split_sections(self._get_metadata(name)): 2722 dm.setdefault(extra, []).extend(parse_requirements(reqs)) 2723 return dm 2724 2725 def requires(self, extras=()): 2726 """List of Requirements needed for this distro if `extras` are used""" 2727 dm = self._dep_map 2728 deps = [] 2729 deps.extend(dm.get(None, ())) 2730 for ext in extras: 2731 try: 2732 deps.extend(dm[safe_extra(ext)]) 2733 except KeyError: 2734 raise UnknownExtra( 2735 "%s has no such extra feature %r" % (self, ext) 2736 ) 2737 return deps 2738 2739 def _get_metadata_path_for_display(self, name): 2740 """ 2741 Return the path to the given metadata file, if available. 2742 """ 2743 try: 2744 # We need to access _get_metadata_path() on the provider object 2745 # directly rather than through this class's __getattr__() 2746 # since _get_metadata_path() is marked private. 2747 path = self._provider._get_metadata_path(name) 2748 2749 # Handle exceptions e.g. in case the distribution's metadata 2750 # provider doesn't support _get_metadata_path(). 2751 except Exception: 2752 return '[could not detect]' 2753 2754 return path 2755 2756 def _get_metadata(self, name): 2757 if self.has_metadata(name): 2758 for line in self.get_metadata_lines(name): 2759 yield line 2760 2761 def _get_version(self): 2762 lines = self._get_metadata(self.PKG_INFO) 2763 version = _version_from_file(lines) 2764 2765 return version 2766 2767 def activate(self, path=None, replace=False): 2768 """Ensure distribution is importable on `path` (default=sys.path)""" 2769 if path is None: 2770 path = sys.path 2771 self.insert_on(path, replace=replace) 2772 if path is sys.path: 2773 fixup_namespace_packages(self.location) 2774 for pkg in self._get_metadata('namespace_packages.txt'): 2775 if pkg in sys.modules: 2776 declare_namespace(pkg) 2777 2778 def egg_name(self): 2779 """Return what this distribution's standard .egg filename should be""" 2780 filename = "%s-%s-py%s" % ( 2781 to_filename(self.project_name), to_filename(self.version), 2782 self.py_version or PY_MAJOR 2783 ) 2784 2785 if self.platform: 2786 filename += '-' + self.platform 2787 return filename 2788 2789 def __repr__(self): 2790 if self.location: 2791 return "%s (%s)" % (self, self.location) 2792 else: 2793 return str(self) 2794 2795 def __str__(self): 2796 try: 2797 version = getattr(self, 'version', None) 2798 except ValueError: 2799 version = None 2800 version = version or "[unknown version]" 2801 return "%s %s" % (self.project_name, version) 2802 2803 def __getattr__(self, attr): 2804 """Delegate all unrecognized public attributes to .metadata provider""" 2805 if attr.startswith('_'): 2806 raise AttributeError(attr) 2807 return getattr(self._provider, attr) 2808 2809 def __dir__(self): 2810 return list( 2811 set(super(Distribution, self).__dir__()) 2812 | set( 2813 attr for attr in self._provider.__dir__() 2814 if not attr.startswith('_') 2815 ) 2816 ) 2817 2818 if not hasattr(object, '__dir__'): 2819 # python 2.7 not supported 2820 del __dir__ 2821 2822 @classmethod 2823 def from_filename(cls, filename, metadata=None, **kw): 2824 return cls.from_location( 2825 _normalize_cached(filename), os.path.basename(filename), metadata, 2826 **kw 2827 ) 2828 2829 def as_requirement(self): 2830 """Return a ``Requirement`` that matches this distribution exactly""" 2831 if isinstance(self.parsed_version, packaging.version.Version): 2832 spec = "%s==%s" % (self.project_name, self.parsed_version) 2833 else: 2834 spec = "%s===%s" % (self.project_name, self.parsed_version) 2835 2836 return Requirement.parse(spec) 2837 2838 def load_entry_point(self, group, name): 2839 """Return the `name` entry point of `group` or raise ImportError""" 2840 ep = self.get_entry_info(group, name) 2841 if ep is None: 2842 raise ImportError("Entry point %r not found" % ((group, name),)) 2843 return ep.load() 2844 2845 def get_entry_map(self, group=None): 2846 """Return the entry point map for `group`, or the full entry map""" 2847 try: 2848 ep_map = self._ep_map 2849 except AttributeError: 2850 ep_map = self._ep_map = EntryPoint.parse_map( 2851 self._get_metadata('entry_points.txt'), self 2852 ) 2853 if group is not None: 2854 return ep_map.get(group, {}) 2855 return ep_map 2856 2857 def get_entry_info(self, group, name): 2858 """Return the EntryPoint object for `group`+`name`, or ``None``""" 2859 return self.get_entry_map(group).get(name) 2860 2861 def insert_on(self, path, loc=None, replace=False): 2862 """Ensure self.location is on path 2863 2864 If replace=False (default): 2865 - If location is already in path anywhere, do nothing. 2866 - Else: 2867 - If it's an egg and its parent directory is on path, 2868 insert just ahead of the parent. 2869 - Else: add to the end of path. 2870 If replace=True: 2871 - If location is already on path anywhere (not eggs) 2872 or higher priority than its parent (eggs) 2873 do nothing. 2874 - Else: 2875 - If it's an egg and its parent directory is on path, 2876 insert just ahead of the parent, 2877 removing any lower-priority entries. 2878 - Else: add it to the front of path. 2879 """ 2880 2881 loc = loc or self.location 2882 if not loc: 2883 return 2884 2885 nloc = _normalize_cached(loc) 2886 bdir = os.path.dirname(nloc) 2887 npath = [(p and _normalize_cached(p) or p) for p in path] 2888 2889 for p, item in enumerate(npath): 2890 if item == nloc: 2891 if replace: 2892 break 2893 else: 2894 # don't modify path (even removing duplicates) if 2895 # found and not replace 2896 return 2897 elif item == bdir and self.precedence == EGG_DIST: 2898 # if it's an .egg, give it precedence over its directory 2899 # UNLESS it's already been added to sys.path and replace=False 2900 if (not replace) and nloc in npath[p:]: 2901 return 2902 if path is sys.path: 2903 self.check_version_conflict() 2904 path.insert(p, loc) 2905 npath.insert(p, nloc) 2906 break 2907 else: 2908 if path is sys.path: 2909 self.check_version_conflict() 2910 if replace: 2911 path.insert(0, loc) 2912 else: 2913 path.append(loc) 2914 return 2915 2916 # p is the spot where we found or inserted loc; now remove duplicates 2917 while True: 2918 try: 2919 np = npath.index(nloc, p + 1) 2920 except ValueError: 2921 break 2922 else: 2923 del npath[np], path[np] 2924 # ha! 2925 p = np 2926 2927 return 2928 2929 def check_version_conflict(self): 2930 if self.key == 'setuptools': 2931 # ignore the inevitable setuptools self-conflicts :( 2932 return 2933 2934 nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt')) 2935 loc = normalize_path(self.location) 2936 for modname in self._get_metadata('top_level.txt'): 2937 if (modname not in sys.modules or modname in nsp 2938 or modname in _namespace_packages): 2939 continue 2940 if modname in ('pkg_resources', 'setuptools', 'site'): 2941 continue 2942 fn = getattr(sys.modules[modname], '__file__', None) 2943 if fn and (normalize_path(fn).startswith(loc) or 2944 fn.startswith(self.location)): 2945 continue 2946 issue_warning( 2947 "Module %s was already imported from %s, but %s is being added" 2948 " to sys.path" % (modname, fn, self.location), 2949 ) 2950 2951 def has_version(self): 2952 try: 2953 self.version 2954 except ValueError: 2955 issue_warning("Unbuilt egg for " + repr(self)) 2956 return False 2957 return True 2958 2959 def clone(self, **kw): 2960 """Copy this distribution, substituting in any changed keyword args""" 2961 names = 'project_name version py_version platform location precedence' 2962 for attr in names.split(): 2963 kw.setdefault(attr, getattr(self, attr, None)) 2964 kw.setdefault('metadata', self._provider) 2965 return self.__class__(**kw) 2966 2967 @property 2968 def extras(self): 2969 return [dep for dep in self._dep_map if dep] 2970 2971 2972class EggInfoDistribution(Distribution): 2973 def _reload_version(self): 2974 """ 2975 Packages installed by distutils (e.g. numpy or scipy), 2976 which uses an old safe_version, and so 2977 their version numbers can get mangled when 2978 converted to filenames (e.g., 1.11.0.dev0+2329eae to 2979 1.11.0.dev0_2329eae). These distributions will not be 2980 parsed properly 2981 downstream by Distribution and safe_version, so 2982 take an extra step and try to get the version number from 2983 the metadata file itself instead of the filename. 2984 """ 2985 md_version = self._get_version() 2986 if md_version: 2987 self._version = md_version 2988 return self 2989 2990 2991class DistInfoDistribution(Distribution): 2992 """ 2993 Wrap an actual or potential sys.path entry 2994 w/metadata, .dist-info style. 2995 """ 2996 PKG_INFO = 'METADATA' 2997 EQEQ = re.compile(r"([\(,])\s*(\d.*?)\s*([,\)])") 2998 2999 @property 3000 def _parsed_pkg_info(self): 3001 """Parse and cache metadata""" 3002 try: 3003 return self._pkg_info 3004 except AttributeError: 3005 metadata = self.get_metadata(self.PKG_INFO) 3006 self._pkg_info = email.parser.Parser().parsestr(metadata) 3007 return self._pkg_info 3008 3009 @property 3010 def _dep_map(self): 3011 try: 3012 return self.__dep_map 3013 except AttributeError: 3014 self.__dep_map = self._compute_dependencies() 3015 return self.__dep_map 3016 3017 def _compute_dependencies(self): 3018 """Recompute this distribution's dependencies.""" 3019 dm = self.__dep_map = {None: []} 3020 3021 reqs = [] 3022 # Including any condition expressions 3023 for req in self._parsed_pkg_info.get_all('Requires-Dist') or []: 3024 reqs.extend(parse_requirements(req)) 3025 3026 def reqs_for_extra(extra): 3027 for req in reqs: 3028 if not req.marker or req.marker.evaluate({'extra': extra}): 3029 yield req 3030 3031 common = frozenset(reqs_for_extra(None)) 3032 dm[None].extend(common) 3033 3034 for extra in self._parsed_pkg_info.get_all('Provides-Extra') or []: 3035 s_extra = safe_extra(extra.strip()) 3036 dm[s_extra] = list(frozenset(reqs_for_extra(extra)) - common) 3037 3038 return dm 3039 3040 3041_distributionImpl = { 3042 '.egg': Distribution, 3043 '.egg-info': EggInfoDistribution, 3044 '.dist-info': DistInfoDistribution, 3045} 3046 3047 3048def issue_warning(*args, **kw): 3049 level = 1 3050 g = globals() 3051 try: 3052 # find the first stack frame that is *not* code in 3053 # the pkg_resources module, to use for the warning 3054 while sys._getframe(level).f_globals is g: 3055 level += 1 3056 except ValueError: 3057 pass 3058 warnings.warn(stacklevel=level + 1, *args, **kw) 3059 3060 3061class RequirementParseError(ValueError): 3062 def __str__(self): 3063 return ' '.join(self.args) 3064 3065 3066def parse_requirements(strs): 3067 """Yield ``Requirement`` objects for each specification in `strs` 3068 3069 `strs` must be a string, or a (possibly-nested) iterable thereof. 3070 """ 3071 # create a steppable iterator, so we can handle \-continuations 3072 lines = iter(yield_lines(strs)) 3073 3074 for line in lines: 3075 # Drop comments -- a hash without a space may be in a URL. 3076 if ' #' in line: 3077 line = line[:line.find(' #')] 3078 # If there is a line continuation, drop it, and append the next line. 3079 if line.endswith('\\'): 3080 line = line[:-2].strip() 3081 try: 3082 line += next(lines) 3083 except StopIteration: 3084 return 3085 yield Requirement(line) 3086 3087 3088class Requirement(packaging.requirements.Requirement): 3089 def __init__(self, requirement_string): 3090 """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!""" 3091 try: 3092 super(Requirement, self).__init__(requirement_string) 3093 except packaging.requirements.InvalidRequirement as e: 3094 raise RequirementParseError(str(e)) 3095 self.unsafe_name = self.name 3096 project_name = safe_name(self.name) 3097 self.project_name, self.key = project_name, project_name.lower() 3098 self.specs = [ 3099 (spec.operator, spec.version) for spec in self.specifier] 3100 self.extras = tuple(map(safe_extra, self.extras)) 3101 self.hashCmp = ( 3102 self.key, 3103 self.specifier, 3104 frozenset(self.extras), 3105 str(self.marker) if self.marker else None, 3106 ) 3107 self.__hash = hash(self.hashCmp) 3108 3109 def __eq__(self, other): 3110 return ( 3111 isinstance(other, Requirement) and 3112 self.hashCmp == other.hashCmp 3113 ) 3114 3115 def __ne__(self, other): 3116 return not self == other 3117 3118 def __contains__(self, item): 3119 if isinstance(item, Distribution): 3120 if item.key != self.key: 3121 return False 3122 3123 item = item.version 3124 3125 # Allow prereleases always in order to match the previous behavior of 3126 # this method. In the future this should be smarter and follow PEP 440 3127 # more accurately. 3128 return self.specifier.contains(item, prereleases=True) 3129 3130 def __hash__(self): 3131 return self.__hash 3132 3133 def __repr__(self): 3134 return "Requirement.parse(%r)" % str(self) 3135 3136 @staticmethod 3137 def parse(s): 3138 req, = parse_requirements(s) 3139 return req 3140 3141 3142def _always_object(classes): 3143 """ 3144 Ensure object appears in the mro even 3145 for old-style classes. 3146 """ 3147 if object not in classes: 3148 return classes + (object,) 3149 return classes 3150 3151 3152def _find_adapter(registry, ob): 3153 """Return an adapter factory for `ob` from `registry`""" 3154 types = _always_object(inspect.getmro(getattr(ob, '__class__', type(ob)))) 3155 for t in types: 3156 if t in registry: 3157 return registry[t] 3158 3159 3160def ensure_directory(path): 3161 """Ensure that the parent directory of `path` exists""" 3162 dirname = os.path.dirname(path) 3163 py31compat.makedirs(dirname, exist_ok=True) 3164 3165 3166def _bypass_ensure_directory(path): 3167 """Sandbox-bypassing version of ensure_directory()""" 3168 if not WRITE_SUPPORT: 3169 raise IOError('"os.mkdir" not supported on this platform.') 3170 dirname, filename = split(path) 3171 if dirname and filename and not isdir(dirname): 3172 _bypass_ensure_directory(dirname) 3173 try: 3174 mkdir(dirname, 0o755) 3175 except FileExistsError: 3176 pass 3177 3178 3179def split_sections(s): 3180 """Split a string or iterable thereof into (section, content) pairs 3181 3182 Each ``section`` is a stripped version of the section header ("[section]") 3183 and each ``content`` is a list of stripped lines excluding blank lines and 3184 comment-only lines. If there are any such lines before the first section 3185 header, they're returned in a first ``section`` of ``None``. 3186 """ 3187 section = None 3188 content = [] 3189 for line in yield_lines(s): 3190 if line.startswith("["): 3191 if line.endswith("]"): 3192 if section or content: 3193 yield section, content 3194 section = line[1:-1].strip() 3195 content = [] 3196 else: 3197 raise ValueError("Invalid section heading", line) 3198 else: 3199 content.append(line) 3200 3201 # wrap up last segment 3202 yield section, content 3203 3204 3205def _mkstemp(*args, **kw): 3206 old_open = os.open 3207 try: 3208 # temporarily bypass sandboxing 3209 os.open = os_open 3210 return tempfile.mkstemp(*args, **kw) 3211 finally: 3212 # and then put it back 3213 os.open = old_open 3214 3215 3216# Silence the PEP440Warning by default, so that end users don't get hit by it 3217# randomly just because they use pkg_resources. We want to append the rule 3218# because we want earlier uses of filterwarnings to take precedence over this 3219# one. 3220warnings.filterwarnings("ignore", category=PEP440Warning, append=True) 3221 3222 3223# from jaraco.functools 1.3 3224def _call_aside(f, *args, **kwargs): 3225 f(*args, **kwargs) 3226 return f 3227 3228 3229@_call_aside 3230def _initialize(g=globals()): 3231 "Set up global resource manager (deliberately not state-saved)" 3232 manager = ResourceManager() 3233 g['_manager'] = manager 3234 g.update( 3235 (name, getattr(manager, name)) 3236 for name in dir(manager) 3237 if not name.startswith('_') 3238 ) 3239 3240 3241@_call_aside 3242def _initialize_master_working_set(): 3243 """ 3244 Prepare the master working set and make the ``require()`` 3245 API available. 3246 3247 This function has explicit effects on the global state 3248 of pkg_resources. It is intended to be invoked once at 3249 the initialization of this module. 3250 3251 Invocation by other packages is unsupported and done 3252 at their own risk. 3253 """ 3254 working_set = WorkingSet._build_master() 3255 _declare_state('object', working_set=working_set) 3256 3257 require = working_set.require 3258 iter_entry_points = working_set.iter_entry_points 3259 add_activation_listener = working_set.subscribe 3260 run_script = working_set.run_script 3261 # backward compatibility 3262 run_main = run_script 3263 # Activate all distributions already on sys.path with replace=False and 3264 # ensure that all distributions added to the working set in the future 3265 # (e.g. by calling ``require()``) will get activated as well, 3266 # with higher priority (replace=True). 3267 tuple( 3268 dist.activate(replace=False) 3269 for dist in working_set 3270 ) 3271 add_activation_listener( 3272 lambda dist: dist.activate(replace=True), 3273 existing=False, 3274 ) 3275 working_set.entries = [] 3276 # match order 3277 list(map(working_set.add_entry, sys.path)) 3278 globals().update(locals()) 3279 3280class PkgResourcesDeprecationWarning(Warning): 3281 """ 3282 Base class for warning about deprecations in ``pkg_resources`` 3283 3284 This class is not derived from ``DeprecationWarning``, and as such is 3285 visible by default. 3286 """ 3287