1# The following comment should be removed at some point in the future. 2# mypy: strict-optional=False 3 4from __future__ import absolute_import 5 6import logging 7import os 8import shutil 9import sys 10import uuid 11import zipfile 12 13from pip._vendor import pkg_resources, six 14from pip._vendor.packaging.requirements import Requirement 15from pip._vendor.packaging.utils import canonicalize_name 16from pip._vendor.packaging.version import Version 17from pip._vendor.packaging.version import parse as parse_version 18from pip._vendor.pep517.wrappers import Pep517HookCaller 19 20from pip._internal.build_env import NoOpBuildEnvironment 21from pip._internal.exceptions import InstallationError 22from pip._internal.locations import get_scheme 23from pip._internal.models.link import Link 24from pip._internal.operations.build.metadata import generate_metadata 25from pip._internal.operations.build.metadata_legacy import ( 26 generate_metadata as generate_metadata_legacy, 27) 28from pip._internal.operations.install.editable_legacy import ( 29 install_editable as install_editable_legacy, 30) 31from pip._internal.operations.install.legacy import LegacyInstallFailure 32from pip._internal.operations.install.legacy import install as install_legacy 33from pip._internal.operations.install.wheel import install_wheel 34from pip._internal.pyproject import load_pyproject_toml, make_pyproject_path 35from pip._internal.req.req_uninstall import UninstallPathSet 36from pip._internal.utils.deprecation import deprecated 37from pip._internal.utils.direct_url_helpers import direct_url_from_link 38from pip._internal.utils.hashes import Hashes 39from pip._internal.utils.logging import indent_log 40from pip._internal.utils.misc import ( 41 ask_path_exists, 42 backup_dir, 43 display_path, 44 dist_in_site_packages, 45 dist_in_usersite, 46 get_distribution, 47 get_installed_version, 48 hide_url, 49 redact_auth_from_url, 50) 51from pip._internal.utils.packaging import get_metadata 52from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds 53from pip._internal.utils.typing import MYPY_CHECK_RUNNING 54from pip._internal.utils.virtualenv import running_under_virtualenv 55from pip._internal.vcs import vcs 56 57if MYPY_CHECK_RUNNING: 58 from typing import Any, Dict, Iterable, List, Optional, Sequence, Union 59 60 from pip._vendor.packaging.markers import Marker 61 from pip._vendor.packaging.specifiers import SpecifierSet 62 from pip._vendor.pkg_resources import Distribution 63 64 from pip._internal.build_env import BuildEnvironment 65 66 67logger = logging.getLogger(__name__) 68 69 70def _get_dist(metadata_directory): 71 # type: (str) -> Distribution 72 """Return a pkg_resources.Distribution for the provided 73 metadata directory. 74 """ 75 dist_dir = metadata_directory.rstrip(os.sep) 76 77 # Build a PathMetadata object, from path to metadata. :wink: 78 base_dir, dist_dir_name = os.path.split(dist_dir) 79 metadata = pkg_resources.PathMetadata(base_dir, dist_dir) 80 81 # Determine the correct Distribution object type. 82 if dist_dir.endswith(".egg-info"): 83 dist_cls = pkg_resources.Distribution 84 dist_name = os.path.splitext(dist_dir_name)[0] 85 else: 86 assert dist_dir.endswith(".dist-info") 87 dist_cls = pkg_resources.DistInfoDistribution 88 dist_name = os.path.splitext(dist_dir_name)[0].split("-")[0] 89 90 return dist_cls( 91 base_dir, 92 project_name=dist_name, 93 metadata=metadata, 94 ) 95 96 97class InstallRequirement(object): 98 """ 99 Represents something that may be installed later on, may have information 100 about where to fetch the relevant requirement and also contains logic for 101 installing the said requirement. 102 """ 103 104 def __init__( 105 self, 106 req, # type: Optional[Requirement] 107 comes_from, # type: Optional[Union[str, InstallRequirement]] 108 editable=False, # type: bool 109 link=None, # type: Optional[Link] 110 markers=None, # type: Optional[Marker] 111 use_pep517=None, # type: Optional[bool] 112 isolated=False, # type: bool 113 install_options=None, # type: Optional[List[str]] 114 global_options=None, # type: Optional[List[str]] 115 hash_options=None, # type: Optional[Dict[str, List[str]]] 116 constraint=False, # type: bool 117 extras=(), # type: Iterable[str] 118 user_supplied=False, # type: bool 119 ): 120 # type: (...) -> None 121 assert req is None or isinstance(req, Requirement), req 122 self.req = req 123 self.comes_from = comes_from 124 self.constraint = constraint 125 self.editable = editable 126 self.legacy_install_reason = None # type: Optional[int] 127 128 # source_dir is the local directory where the linked requirement is 129 # located, or unpacked. In case unpacking is needed, creating and 130 # populating source_dir is done by the RequirementPreparer. Note this 131 # is not necessarily the directory where pyproject.toml or setup.py is 132 # located - that one is obtained via unpacked_source_directory. 133 self.source_dir = None # type: Optional[str] 134 if self.editable: 135 assert link 136 if link.is_file: 137 self.source_dir = os.path.normpath( 138 os.path.abspath(link.file_path) 139 ) 140 141 if link is None and req and req.url: 142 # PEP 508 URL requirement 143 link = Link(req.url) 144 self.link = self.original_link = link 145 self.original_link_is_in_wheel_cache = False 146 147 # Path to any downloaded or already-existing package. 148 self.local_file_path = None # type: Optional[str] 149 if self.link and self.link.is_file: 150 self.local_file_path = self.link.file_path 151 152 if extras: 153 self.extras = extras 154 elif req: 155 self.extras = { 156 pkg_resources.safe_extra(extra) for extra in req.extras 157 } 158 else: 159 self.extras = set() 160 if markers is None and req: 161 markers = req.marker 162 self.markers = markers 163 164 # This holds the pkg_resources.Distribution object if this requirement 165 # is already available: 166 self.satisfied_by = None # type: Optional[Distribution] 167 # Whether the installation process should try to uninstall an existing 168 # distribution before installing this requirement. 169 self.should_reinstall = False 170 # Temporary build location 171 self._temp_build_dir = None # type: Optional[TempDirectory] 172 # Set to True after successful installation 173 self.install_succeeded = None # type: Optional[bool] 174 # Supplied options 175 self.install_options = install_options if install_options else [] 176 self.global_options = global_options if global_options else [] 177 self.hash_options = hash_options if hash_options else {} 178 # Set to True after successful preparation of this requirement 179 self.prepared = False 180 # User supplied requirement are explicitly requested for installation 181 # by the user via CLI arguments or requirements files, as opposed to, 182 # e.g. dependencies, extras or constraints. 183 self.user_supplied = user_supplied 184 185 self.isolated = isolated 186 self.build_env = NoOpBuildEnvironment() # type: BuildEnvironment 187 188 # For PEP 517, the directory where we request the project metadata 189 # gets stored. We need this to pass to build_wheel, so the backend 190 # can ensure that the wheel matches the metadata (see the PEP for 191 # details). 192 self.metadata_directory = None # type: Optional[str] 193 194 # The static build requirements (from pyproject.toml) 195 self.pyproject_requires = None # type: Optional[List[str]] 196 197 # Build requirements that we will check are available 198 self.requirements_to_check = [] # type: List[str] 199 200 # The PEP 517 backend we should use to build the project 201 self.pep517_backend = None # type: Optional[Pep517HookCaller] 202 203 # Are we using PEP 517 for this requirement? 204 # After pyproject.toml has been loaded, the only valid values are True 205 # and False. Before loading, None is valid (meaning "use the default"). 206 # Setting an explicit value before loading pyproject.toml is supported, 207 # but after loading this flag should be treated as read only. 208 self.use_pep517 = use_pep517 209 210 # This requirement needs more preparation before it can be built 211 self.needs_more_preparation = False 212 213 def __str__(self): 214 # type: () -> str 215 if self.req: 216 s = str(self.req) 217 if self.link: 218 s += ' from {}'.format(redact_auth_from_url(self.link.url)) 219 elif self.link: 220 s = redact_auth_from_url(self.link.url) 221 else: 222 s = '<InstallRequirement>' 223 if self.satisfied_by is not None: 224 s += ' in {}'.format(display_path(self.satisfied_by.location)) 225 if self.comes_from: 226 if isinstance(self.comes_from, six.string_types): 227 comes_from = self.comes_from # type: Optional[str] 228 else: 229 comes_from = self.comes_from.from_path() 230 if comes_from: 231 s += ' (from {})'.format(comes_from) 232 return s 233 234 def __repr__(self): 235 # type: () -> str 236 return '<{} object: {} editable={!r}>'.format( 237 self.__class__.__name__, str(self), self.editable) 238 239 def format_debug(self): 240 # type: () -> str 241 """An un-tested helper for getting state, for debugging. 242 """ 243 attributes = vars(self) 244 names = sorted(attributes) 245 246 state = ( 247 "{}={!r}".format(attr, attributes[attr]) for attr in sorted(names) 248 ) 249 return '<{name} object: {{{state}}}>'.format( 250 name=self.__class__.__name__, 251 state=", ".join(state), 252 ) 253 254 # Things that are valid for all kinds of requirements? 255 @property 256 def name(self): 257 # type: () -> Optional[str] 258 if self.req is None: 259 return None 260 return six.ensure_str(pkg_resources.safe_name(self.req.name)) 261 262 @property 263 def specifier(self): 264 # type: () -> SpecifierSet 265 return self.req.specifier 266 267 @property 268 def is_pinned(self): 269 # type: () -> bool 270 """Return whether I am pinned to an exact version. 271 272 For example, some-package==1.2 is pinned; some-package>1.2 is not. 273 """ 274 specifiers = self.specifier 275 return (len(specifiers) == 1 and 276 next(iter(specifiers)).operator in {'==', '==='}) 277 278 @property 279 def installed_version(self): 280 # type: () -> Optional[str] 281 return get_installed_version(self.name) 282 283 def match_markers(self, extras_requested=None): 284 # type: (Optional[Iterable[str]]) -> bool 285 if not extras_requested: 286 # Provide an extra to safely evaluate the markers 287 # without matching any extra 288 extras_requested = ('',) 289 if self.markers is not None: 290 return any( 291 self.markers.evaluate({'extra': extra}) 292 for extra in extras_requested) 293 else: 294 return True 295 296 @property 297 def has_hash_options(self): 298 # type: () -> bool 299 """Return whether any known-good hashes are specified as options. 300 301 These activate --require-hashes mode; hashes specified as part of a 302 URL do not. 303 304 """ 305 return bool(self.hash_options) 306 307 def hashes(self, trust_internet=True): 308 # type: (bool) -> Hashes 309 """Return a hash-comparer that considers my option- and URL-based 310 hashes to be known-good. 311 312 Hashes in URLs--ones embedded in the requirements file, not ones 313 downloaded from an index server--are almost peers with ones from 314 flags. They satisfy --require-hashes (whether it was implicitly or 315 explicitly activated) but do not activate it. md5 and sha224 are not 316 allowed in flags, which should nudge people toward good algos. We 317 always OR all hashes together, even ones from URLs. 318 319 :param trust_internet: Whether to trust URL-based (#md5=...) hashes 320 downloaded from the internet, as by populate_link() 321 322 """ 323 good_hashes = self.hash_options.copy() 324 link = self.link if trust_internet else self.original_link 325 if link and link.hash: 326 good_hashes.setdefault(link.hash_name, []).append(link.hash) 327 return Hashes(good_hashes) 328 329 def from_path(self): 330 # type: () -> Optional[str] 331 """Format a nice indicator to show where this "comes from" 332 """ 333 if self.req is None: 334 return None 335 s = str(self.req) 336 if self.comes_from: 337 if isinstance(self.comes_from, six.string_types): 338 comes_from = self.comes_from 339 else: 340 comes_from = self.comes_from.from_path() 341 if comes_from: 342 s += '->' + comes_from 343 return s 344 345 def ensure_build_location(self, build_dir, autodelete, parallel_builds): 346 # type: (str, bool, bool) -> str 347 assert build_dir is not None 348 if self._temp_build_dir is not None: 349 assert self._temp_build_dir.path 350 return self._temp_build_dir.path 351 if self.req is None: 352 # Some systems have /tmp as a symlink which confuses custom 353 # builds (such as numpy). Thus, we ensure that the real path 354 # is returned. 355 self._temp_build_dir = TempDirectory( 356 kind=tempdir_kinds.REQ_BUILD, globally_managed=True 357 ) 358 359 return self._temp_build_dir.path 360 361 # This is the only remaining place where we manually determine the path 362 # for the temporary directory. It is only needed for editables where 363 # it is the value of the --src option. 364 365 # When parallel builds are enabled, add a UUID to the build directory 366 # name so multiple builds do not interfere with each other. 367 dir_name = canonicalize_name(self.name) 368 if parallel_builds: 369 dir_name = "{}_{}".format(dir_name, uuid.uuid4().hex) 370 371 # FIXME: Is there a better place to create the build_dir? (hg and bzr 372 # need this) 373 if not os.path.exists(build_dir): 374 logger.debug('Creating directory %s', build_dir) 375 os.makedirs(build_dir) 376 actual_build_dir = os.path.join(build_dir, dir_name) 377 # `None` indicates that we respect the globally-configured deletion 378 # settings, which is what we actually want when auto-deleting. 379 delete_arg = None if autodelete else False 380 return TempDirectory( 381 path=actual_build_dir, 382 delete=delete_arg, 383 kind=tempdir_kinds.REQ_BUILD, 384 globally_managed=True, 385 ).path 386 387 def _set_requirement(self): 388 # type: () -> None 389 """Set requirement after generating metadata. 390 """ 391 assert self.req is None 392 assert self.metadata is not None 393 assert self.source_dir is not None 394 395 # Construct a Requirement object from the generated metadata 396 if isinstance(parse_version(self.metadata["Version"]), Version): 397 op = "==" 398 else: 399 op = "===" 400 401 self.req = Requirement( 402 "".join([ 403 self.metadata["Name"], 404 op, 405 self.metadata["Version"], 406 ]) 407 ) 408 409 def warn_on_mismatching_name(self): 410 # type: () -> None 411 metadata_name = canonicalize_name(self.metadata["Name"]) 412 if canonicalize_name(self.req.name) == metadata_name: 413 # Everything is fine. 414 return 415 416 # If we're here, there's a mismatch. Log a warning about it. 417 logger.warning( 418 'Generating metadata for package %s ' 419 'produced metadata for project name %s. Fix your ' 420 '#egg=%s fragments.', 421 self.name, metadata_name, self.name 422 ) 423 self.req = Requirement(metadata_name) 424 425 def check_if_exists(self, use_user_site): 426 # type: (bool) -> None 427 """Find an installed distribution that satisfies or conflicts 428 with this requirement, and set self.satisfied_by or 429 self.should_reinstall appropriately. 430 """ 431 if self.req is None: 432 return 433 existing_dist = get_distribution(self.req.name) 434 if not existing_dist: 435 return 436 437 existing_version = existing_dist.parsed_version 438 if not self.req.specifier.contains(existing_version, prereleases=True): 439 self.satisfied_by = None 440 if use_user_site: 441 if dist_in_usersite(existing_dist): 442 self.should_reinstall = True 443 elif (running_under_virtualenv() and 444 dist_in_site_packages(existing_dist)): 445 raise InstallationError( 446 "Will not install to the user site because it will " 447 "lack sys.path precedence to {} in {}".format( 448 existing_dist.project_name, existing_dist.location) 449 ) 450 else: 451 self.should_reinstall = True 452 else: 453 if self.editable: 454 self.should_reinstall = True 455 # when installing editables, nothing pre-existing should ever 456 # satisfy 457 self.satisfied_by = None 458 else: 459 self.satisfied_by = existing_dist 460 461 # Things valid for wheels 462 @property 463 def is_wheel(self): 464 # type: () -> bool 465 if not self.link: 466 return False 467 return self.link.is_wheel 468 469 # Things valid for sdists 470 @property 471 def unpacked_source_directory(self): 472 # type: () -> str 473 return os.path.join( 474 self.source_dir, 475 self.link and self.link.subdirectory_fragment or '') 476 477 @property 478 def setup_py_path(self): 479 # type: () -> str 480 assert self.source_dir, "No source dir for {}".format(self) 481 setup_py = os.path.join(self.unpacked_source_directory, 'setup.py') 482 483 # Python2 __file__ should not be unicode 484 if six.PY2 and isinstance(setup_py, six.text_type): 485 setup_py = setup_py.encode(sys.getfilesystemencoding()) 486 487 return setup_py 488 489 @property 490 def pyproject_toml_path(self): 491 # type: () -> str 492 assert self.source_dir, "No source dir for {}".format(self) 493 return make_pyproject_path(self.unpacked_source_directory) 494 495 def load_pyproject_toml(self): 496 # type: () -> None 497 """Load the pyproject.toml file. 498 499 After calling this routine, all of the attributes related to PEP 517 500 processing for this requirement have been set. In particular, the 501 use_pep517 attribute can be used to determine whether we should 502 follow the PEP 517 or legacy (setup.py) code path. 503 """ 504 pyproject_toml_data = load_pyproject_toml( 505 self.use_pep517, 506 self.pyproject_toml_path, 507 self.setup_py_path, 508 str(self) 509 ) 510 511 if pyproject_toml_data is None: 512 self.use_pep517 = False 513 return 514 515 self.use_pep517 = True 516 requires, backend, check, backend_path = pyproject_toml_data 517 self.requirements_to_check = check 518 self.pyproject_requires = requires 519 self.pep517_backend = Pep517HookCaller( 520 self.unpacked_source_directory, backend, backend_path=backend_path, 521 ) 522 523 def _generate_metadata(self): 524 # type: () -> str 525 """Invokes metadata generator functions, with the required arguments. 526 """ 527 if not self.use_pep517: 528 assert self.unpacked_source_directory 529 530 return generate_metadata_legacy( 531 build_env=self.build_env, 532 setup_py_path=self.setup_py_path, 533 source_dir=self.unpacked_source_directory, 534 isolated=self.isolated, 535 details=self.name or "from {}".format(self.link) 536 ) 537 538 assert self.pep517_backend is not None 539 540 return generate_metadata( 541 build_env=self.build_env, 542 backend=self.pep517_backend, 543 ) 544 545 def prepare_metadata(self): 546 # type: () -> None 547 """Ensure that project metadata is available. 548 549 Under PEP 517, call the backend hook to prepare the metadata. 550 Under legacy processing, call setup.py egg-info. 551 """ 552 assert self.source_dir 553 554 with indent_log(): 555 self.metadata_directory = self._generate_metadata() 556 557 # Act on the newly generated metadata, based on the name and version. 558 if not self.name: 559 self._set_requirement() 560 else: 561 self.warn_on_mismatching_name() 562 563 self.assert_source_matches_version() 564 565 @property 566 def metadata(self): 567 # type: () -> Any 568 if not hasattr(self, '_metadata'): 569 self._metadata = get_metadata(self.get_dist()) 570 571 return self._metadata 572 573 def get_dist(self): 574 # type: () -> Distribution 575 return _get_dist(self.metadata_directory) 576 577 def assert_source_matches_version(self): 578 # type: () -> None 579 assert self.source_dir 580 version = self.metadata['version'] 581 if self.req.specifier and version not in self.req.specifier: 582 logger.warning( 583 'Requested %s, but installing version %s', 584 self, 585 version, 586 ) 587 else: 588 logger.debug( 589 'Source in %s has version %s, which satisfies requirement %s', 590 display_path(self.source_dir), 591 version, 592 self, 593 ) 594 595 # For both source distributions and editables 596 def ensure_has_source_dir( 597 self, 598 parent_dir, 599 autodelete=False, 600 parallel_builds=False, 601 ): 602 # type: (str, bool, bool) -> None 603 """Ensure that a source_dir is set. 604 605 This will create a temporary build dir if the name of the requirement 606 isn't known yet. 607 608 :param parent_dir: The ideal pip parent_dir for the source_dir. 609 Generally src_dir for editables and build_dir for sdists. 610 :return: self.source_dir 611 """ 612 if self.source_dir is None: 613 self.source_dir = self.ensure_build_location( 614 parent_dir, 615 autodelete=autodelete, 616 parallel_builds=parallel_builds, 617 ) 618 619 # For editable installations 620 def update_editable(self, obtain=True): 621 # type: (bool) -> None 622 if not self.link: 623 logger.debug( 624 "Cannot update repository at %s; repository location is " 625 "unknown", 626 self.source_dir, 627 ) 628 return 629 assert self.editable 630 assert self.source_dir 631 if self.link.scheme == 'file': 632 # Static paths don't get updated 633 return 634 assert '+' in self.link.url, \ 635 "bad url: {self.link.url!r}".format(**locals()) 636 vc_type, url = self.link.url.split('+', 1) 637 vcs_backend = vcs.get_backend(vc_type) 638 if vcs_backend: 639 if not self.link.is_vcs: 640 reason = ( 641 "This form of VCS requirement is being deprecated: {}." 642 ).format( 643 self.link.url 644 ) 645 replacement = None 646 if self.link.url.startswith("git+git@"): 647 replacement = ( 648 "git+https://git@example.com/..., " 649 "git+ssh://git@example.com/..., " 650 "or the insecure git+git://git@example.com/..." 651 ) 652 deprecated(reason, replacement, gone_in="21.0", issue=7554) 653 hidden_url = hide_url(self.link.url) 654 if obtain: 655 vcs_backend.obtain(self.source_dir, url=hidden_url) 656 else: 657 vcs_backend.export(self.source_dir, url=hidden_url) 658 else: 659 assert 0, ( 660 'Unexpected version control type (in {}): {}'.format( 661 self.link, vc_type)) 662 663 # Top-level Actions 664 def uninstall(self, auto_confirm=False, verbose=False): 665 # type: (bool, bool) -> Optional[UninstallPathSet] 666 """ 667 Uninstall the distribution currently satisfying this requirement. 668 669 Prompts before removing or modifying files unless 670 ``auto_confirm`` is True. 671 672 Refuses to delete or modify files outside of ``sys.prefix`` - 673 thus uninstallation within a virtual environment can only 674 modify that virtual environment, even if the virtualenv is 675 linked to global site-packages. 676 677 """ 678 assert self.req 679 dist = get_distribution(self.req.name) 680 if not dist: 681 logger.warning("Skipping %s as it is not installed.", self.name) 682 return None 683 logger.info('Found existing installation: %s', dist) 684 685 uninstalled_pathset = UninstallPathSet.from_dist(dist) 686 uninstalled_pathset.remove(auto_confirm, verbose) 687 return uninstalled_pathset 688 689 def _get_archive_name(self, path, parentdir, rootdir): 690 # type: (str, str, str) -> str 691 692 def _clean_zip_name(name, prefix): 693 # type: (str, str) -> str 694 assert name.startswith(prefix + os.path.sep), ( 695 "name {name!r} doesn't start with prefix {prefix!r}" 696 .format(**locals()) 697 ) 698 name = name[len(prefix) + 1:] 699 name = name.replace(os.path.sep, '/') 700 return name 701 702 path = os.path.join(parentdir, path) 703 name = _clean_zip_name(path, rootdir) 704 return self.name + '/' + name 705 706 def archive(self, build_dir): 707 # type: (Optional[str]) -> None 708 """Saves archive to provided build_dir. 709 710 Used for saving downloaded VCS requirements as part of `pip download`. 711 """ 712 assert self.source_dir 713 if build_dir is None: 714 return 715 716 create_archive = True 717 archive_name = '{}-{}.zip'.format(self.name, self.metadata["version"]) 718 archive_path = os.path.join(build_dir, archive_name) 719 720 if os.path.exists(archive_path): 721 response = ask_path_exists( 722 'The file {} exists. (i)gnore, (w)ipe, ' 723 '(b)ackup, (a)bort '.format( 724 display_path(archive_path)), 725 ('i', 'w', 'b', 'a')) 726 if response == 'i': 727 create_archive = False 728 elif response == 'w': 729 logger.warning('Deleting %s', display_path(archive_path)) 730 os.remove(archive_path) 731 elif response == 'b': 732 dest_file = backup_dir(archive_path) 733 logger.warning( 734 'Backing up %s to %s', 735 display_path(archive_path), 736 display_path(dest_file), 737 ) 738 shutil.move(archive_path, dest_file) 739 elif response == 'a': 740 sys.exit(-1) 741 742 if not create_archive: 743 return 744 745 zip_output = zipfile.ZipFile( 746 archive_path, 'w', zipfile.ZIP_DEFLATED, allowZip64=True, 747 ) 748 with zip_output: 749 dir = os.path.normcase( 750 os.path.abspath(self.unpacked_source_directory) 751 ) 752 for dirpath, dirnames, filenames in os.walk(dir): 753 for dirname in dirnames: 754 dir_arcname = self._get_archive_name( 755 dirname, parentdir=dirpath, rootdir=dir, 756 ) 757 zipdir = zipfile.ZipInfo(dir_arcname + '/') 758 zipdir.external_attr = 0x1ED << 16 # 0o755 759 zip_output.writestr(zipdir, '') 760 for filename in filenames: 761 file_arcname = self._get_archive_name( 762 filename, parentdir=dirpath, rootdir=dir, 763 ) 764 filename = os.path.join(dirpath, filename) 765 zip_output.write(filename, file_arcname) 766 767 logger.info('Saved %s', display_path(archive_path)) 768 769 def install( 770 self, 771 install_options, # type: List[str] 772 global_options=None, # type: Optional[Sequence[str]] 773 root=None, # type: Optional[str] 774 home=None, # type: Optional[str] 775 prefix=None, # type: Optional[str] 776 warn_script_location=True, # type: bool 777 use_user_site=False, # type: bool 778 pycompile=True # type: bool 779 ): 780 # type: (...) -> None 781 scheme = get_scheme( 782 self.name, 783 user=use_user_site, 784 home=home, 785 root=root, 786 isolated=self.isolated, 787 prefix=prefix, 788 ) 789 790 global_options = global_options if global_options is not None else [] 791 if self.editable: 792 install_editable_legacy( 793 install_options, 794 global_options, 795 prefix=prefix, 796 home=home, 797 use_user_site=use_user_site, 798 name=self.name, 799 setup_py_path=self.setup_py_path, 800 isolated=self.isolated, 801 build_env=self.build_env, 802 unpacked_source_directory=self.unpacked_source_directory, 803 ) 804 self.install_succeeded = True 805 return 806 807 if self.is_wheel: 808 assert self.local_file_path 809 direct_url = None 810 if self.original_link: 811 direct_url = direct_url_from_link( 812 self.original_link, 813 self.source_dir, 814 self.original_link_is_in_wheel_cache, 815 ) 816 install_wheel( 817 self.name, 818 self.local_file_path, 819 scheme=scheme, 820 req_description=str(self.req), 821 pycompile=pycompile, 822 warn_script_location=warn_script_location, 823 direct_url=direct_url, 824 requested=self.user_supplied, 825 ) 826 self.install_succeeded = True 827 return 828 829 # TODO: Why don't we do this for editable installs? 830 831 # Extend the list of global and install options passed on to 832 # the setup.py call with the ones from the requirements file. 833 # Options specified in requirements file override those 834 # specified on the command line, since the last option given 835 # to setup.py is the one that is used. 836 global_options = list(global_options) + self.global_options 837 install_options = list(install_options) + self.install_options 838 839 try: 840 success = install_legacy( 841 install_options=install_options, 842 global_options=global_options, 843 root=root, 844 home=home, 845 prefix=prefix, 846 use_user_site=use_user_site, 847 pycompile=pycompile, 848 scheme=scheme, 849 setup_py_path=self.setup_py_path, 850 isolated=self.isolated, 851 req_name=self.name, 852 build_env=self.build_env, 853 unpacked_source_directory=self.unpacked_source_directory, 854 req_description=str(self.req), 855 ) 856 except LegacyInstallFailure as exc: 857 self.install_succeeded = False 858 six.reraise(*exc.parent) 859 except Exception: 860 self.install_succeeded = True 861 raise 862 863 self.install_succeeded = success 864 865 if success and self.legacy_install_reason == 8368: 866 deprecated( 867 reason=( 868 "{} was installed using the legacy 'setup.py install' " 869 "method, because a wheel could not be built for it.". 870 format(self.name) 871 ), 872 replacement="to fix the wheel build issue reported above", 873 gone_in="21.0", 874 issue=8368, 875 ) 876 877 878def check_invalid_constraint_type(req): 879 # type: (InstallRequirement) -> str 880 881 # Check for unsupported forms 882 problem = "" 883 if not req.name: 884 problem = "Unnamed requirements are not allowed as constraints" 885 elif req.link: 886 problem = "Links are not allowed as constraints" 887 elif req.extras: 888 problem = "Constraints cannot have extras" 889 890 if problem: 891 deprecated( 892 reason=( 893 "Constraints are only allowed to take the form of a package " 894 "name and a version specifier. Other forms were originally " 895 "permitted as an accident of the implementation, but were " 896 "undocumented. The new implementation of the resolver no " 897 "longer supports these forms." 898 ), 899 replacement=( 900 "replacing the constraint with a requirement." 901 ), 902 # No plan yet for when the new resolver becomes default 903 gone_in=None, 904 issue=8210 905 ) 906 907 return problem 908