1# The following comment should be removed at some point in the future.
2# mypy: strict-optional=False
3
4import logging
5import os
6import shutil
7import sys
8import uuid
9import zipfile
10from typing import Any, Dict, Iterable, List, Optional, Sequence, Union
11
12from pip._vendor import pkg_resources, six
13from pip._vendor.packaging.markers import Marker
14from pip._vendor.packaging.requirements import Requirement
15from pip._vendor.packaging.specifiers import SpecifierSet
16from pip._vendor.packaging.utils import canonicalize_name
17from pip._vendor.packaging.version import Version
18from pip._vendor.packaging.version import parse as parse_version
19from pip._vendor.pep517.wrappers import Pep517HookCaller
20from pip._vendor.pkg_resources import Distribution
21
22from pip._internal.build_env import BuildEnvironment, NoOpBuildEnvironment
23from pip._internal.exceptions import InstallationError
24from pip._internal.locations import get_scheme
25from pip._internal.models.link import Link
26from pip._internal.operations.build.metadata import generate_metadata
27from pip._internal.operations.build.metadata_legacy import (
28    generate_metadata as generate_metadata_legacy,
29)
30from pip._internal.operations.install.editable_legacy import (
31    install_editable as install_editable_legacy,
32)
33from pip._internal.operations.install.legacy import LegacyInstallFailure
34from pip._internal.operations.install.legacy import install as install_legacy
35from pip._internal.operations.install.wheel import install_wheel
36from pip._internal.pyproject import load_pyproject_toml, make_pyproject_path
37from pip._internal.req.req_uninstall import UninstallPathSet
38from pip._internal.utils.deprecation import deprecated
39from pip._internal.utils.direct_url_helpers import direct_url_from_link
40from pip._internal.utils.hashes import Hashes
41from pip._internal.utils.logging import indent_log
42from pip._internal.utils.misc import (
43    ask_path_exists,
44    backup_dir,
45    display_path,
46    dist_in_site_packages,
47    dist_in_usersite,
48    get_distribution,
49    hide_url,
50    redact_auth_from_url,
51)
52from pip._internal.utils.packaging import get_metadata
53from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
54from pip._internal.utils.virtualenv import running_under_virtualenv
55from pip._internal.vcs import vcs
56
57logger = logging.getLogger(__name__)
58
59
60def _get_dist(metadata_directory: str) -> Distribution:
61    """Return a pkg_resources.Distribution for the provided
62    metadata directory.
63    """
64    dist_dir = metadata_directory.rstrip(os.sep)
65
66    # Build a PathMetadata object, from path to metadata. :wink:
67    base_dir, dist_dir_name = os.path.split(dist_dir)
68    metadata = pkg_resources.PathMetadata(base_dir, dist_dir)
69
70    # Determine the correct Distribution object type.
71    if dist_dir.endswith(".egg-info"):
72        dist_cls = pkg_resources.Distribution
73        dist_name = os.path.splitext(dist_dir_name)[0]
74    else:
75        assert dist_dir.endswith(".dist-info")
76        dist_cls = pkg_resources.DistInfoDistribution
77        dist_name = os.path.splitext(dist_dir_name)[0].split("-")[0]
78
79    return dist_cls(
80        base_dir,
81        project_name=dist_name,
82        metadata=metadata,
83    )
84
85
86class InstallRequirement:
87    """
88    Represents something that may be installed later on, may have information
89    about where to fetch the relevant requirement and also contains logic for
90    installing the said requirement.
91    """
92
93    def __init__(
94        self,
95        req: Optional[Requirement],
96        comes_from: Optional[Union[str, "InstallRequirement"]],
97        editable: bool = False,
98        link: Optional[Link] = None,
99        markers: Optional[Marker] = None,
100        use_pep517: Optional[bool] = None,
101        isolated: bool = False,
102        install_options: Optional[List[str]] = None,
103        global_options: Optional[List[str]] = None,
104        hash_options: Optional[Dict[str, List[str]]] = None,
105        constraint: bool = False,
106        extras: Iterable[str] = (),
107        user_supplied: bool = False,
108    ) -> None:
109        assert req is None or isinstance(req, Requirement), req
110        self.req = req
111        self.comes_from = comes_from
112        self.constraint = constraint
113        self.editable = editable
114        self.legacy_install_reason: Optional[int] = None
115
116        # source_dir is the local directory where the linked requirement is
117        # located, or unpacked. In case unpacking is needed, creating and
118        # populating source_dir is done by the RequirementPreparer. Note this
119        # is not necessarily the directory where pyproject.toml or setup.py is
120        # located - that one is obtained via unpacked_source_directory.
121        self.source_dir: Optional[str] = None
122        if self.editable:
123            assert link
124            if link.is_file:
125                self.source_dir = os.path.normpath(
126                    os.path.abspath(link.file_path)
127                )
128
129        if link is None and req and req.url:
130            # PEP 508 URL requirement
131            link = Link(req.url)
132        self.link = self.original_link = link
133        self.original_link_is_in_wheel_cache = False
134
135        # Path to any downloaded or already-existing package.
136        self.local_file_path: Optional[str] = None
137        if self.link and self.link.is_file:
138            self.local_file_path = self.link.file_path
139
140        if extras:
141            self.extras = extras
142        elif req:
143            self.extras = {
144                pkg_resources.safe_extra(extra) for extra in req.extras
145            }
146        else:
147            self.extras = set()
148        if markers is None and req:
149            markers = req.marker
150        self.markers = markers
151
152        # This holds the pkg_resources.Distribution object if this requirement
153        # is already available:
154        self.satisfied_by: Optional[Distribution] = None
155        # Whether the installation process should try to uninstall an existing
156        # distribution before installing this requirement.
157        self.should_reinstall = False
158        # Temporary build location
159        self._temp_build_dir: Optional[TempDirectory] = None
160        # Set to True after successful installation
161        self.install_succeeded: Optional[bool] = None
162        # Supplied options
163        self.install_options = install_options if install_options else []
164        self.global_options = global_options if global_options else []
165        self.hash_options = hash_options if hash_options else {}
166        # Set to True after successful preparation of this requirement
167        self.prepared = False
168        # User supplied requirement are explicitly requested for installation
169        # by the user via CLI arguments or requirements files, as opposed to,
170        # e.g. dependencies, extras or constraints.
171        self.user_supplied = user_supplied
172
173        self.isolated = isolated
174        self.build_env: BuildEnvironment = NoOpBuildEnvironment()
175
176        # For PEP 517, the directory where we request the project metadata
177        # gets stored. We need this to pass to build_wheel, so the backend
178        # can ensure that the wheel matches the metadata (see the PEP for
179        # details).
180        self.metadata_directory: Optional[str] = None
181
182        # The static build requirements (from pyproject.toml)
183        self.pyproject_requires: Optional[List[str]] = None
184
185        # Build requirements that we will check are available
186        self.requirements_to_check: List[str] = []
187
188        # The PEP 517 backend we should use to build the project
189        self.pep517_backend: Optional[Pep517HookCaller] = None
190
191        # Are we using PEP 517 for this requirement?
192        # After pyproject.toml has been loaded, the only valid values are True
193        # and False. Before loading, None is valid (meaning "use the default").
194        # Setting an explicit value before loading pyproject.toml is supported,
195        # but after loading this flag should be treated as read only.
196        self.use_pep517 = use_pep517
197
198        # This requirement needs more preparation before it can be built
199        self.needs_more_preparation = False
200
201    def __str__(self) -> str:
202        if self.req:
203            s = str(self.req)
204            if self.link:
205                s += ' from {}'.format(redact_auth_from_url(self.link.url))
206        elif self.link:
207            s = redact_auth_from_url(self.link.url)
208        else:
209            s = '<InstallRequirement>'
210        if self.satisfied_by is not None:
211            s += ' in {}'.format(display_path(self.satisfied_by.location))
212        if self.comes_from:
213            if isinstance(self.comes_from, str):
214                comes_from: Optional[str] = self.comes_from
215            else:
216                comes_from = self.comes_from.from_path()
217            if comes_from:
218                s += f' (from {comes_from})'
219        return s
220
221    def __repr__(self) -> str:
222        return '<{} object: {} editable={!r}>'.format(
223            self.__class__.__name__, str(self), self.editable)
224
225    def format_debug(self) -> str:
226        """An un-tested helper for getting state, for debugging.
227        """
228        attributes = vars(self)
229        names = sorted(attributes)
230
231        state = (
232            "{}={!r}".format(attr, attributes[attr]) for attr in sorted(names)
233        )
234        return '<{name} object: {{{state}}}>'.format(
235            name=self.__class__.__name__,
236            state=", ".join(state),
237        )
238
239    # Things that are valid for all kinds of requirements?
240    @property
241    def name(self) -> Optional[str]:
242        if self.req is None:
243            return None
244        return pkg_resources.safe_name(self.req.name)
245
246    @property
247    def specifier(self) -> SpecifierSet:
248        return self.req.specifier
249
250    @property
251    def is_pinned(self) -> bool:
252        """Return whether I am pinned to an exact version.
253
254        For example, some-package==1.2 is pinned; some-package>1.2 is not.
255        """
256        specifiers = self.specifier
257        return (len(specifiers) == 1 and
258                next(iter(specifiers)).operator in {'==', '==='})
259
260    def match_markers(self, extras_requested: Optional[Iterable[str]] = None) -> bool:
261        if not extras_requested:
262            # Provide an extra to safely evaluate the markers
263            # without matching any extra
264            extras_requested = ('',)
265        if self.markers is not None:
266            return any(
267                self.markers.evaluate({'extra': extra})
268                for extra in extras_requested)
269        else:
270            return True
271
272    @property
273    def has_hash_options(self) -> bool:
274        """Return whether any known-good hashes are specified as options.
275
276        These activate --require-hashes mode; hashes specified as part of a
277        URL do not.
278
279        """
280        return bool(self.hash_options)
281
282    def hashes(self, trust_internet: bool = True) -> Hashes:
283        """Return a hash-comparer that considers my option- and URL-based
284        hashes to be known-good.
285
286        Hashes in URLs--ones embedded in the requirements file, not ones
287        downloaded from an index server--are almost peers with ones from
288        flags. They satisfy --require-hashes (whether it was implicitly or
289        explicitly activated) but do not activate it. md5 and sha224 are not
290        allowed in flags, which should nudge people toward good algos. We
291        always OR all hashes together, even ones from URLs.
292
293        :param trust_internet: Whether to trust URL-based (#md5=...) hashes
294            downloaded from the internet, as by populate_link()
295
296        """
297        good_hashes = self.hash_options.copy()
298        link = self.link if trust_internet else self.original_link
299        if link and link.hash:
300            good_hashes.setdefault(link.hash_name, []).append(link.hash)
301        return Hashes(good_hashes)
302
303    def from_path(self) -> Optional[str]:
304        """Format a nice indicator to show where this "comes from"
305        """
306        if self.req is None:
307            return None
308        s = str(self.req)
309        if self.comes_from:
310            if isinstance(self.comes_from, str):
311                comes_from = self.comes_from
312            else:
313                comes_from = self.comes_from.from_path()
314            if comes_from:
315                s += '->' + comes_from
316        return s
317
318    def ensure_build_location(
319        self, build_dir: str, autodelete: bool, parallel_builds: bool
320    ) -> str:
321        assert build_dir is not None
322        if self._temp_build_dir is not None:
323            assert self._temp_build_dir.path
324            return self._temp_build_dir.path
325        if self.req is None:
326            # Some systems have /tmp as a symlink which confuses custom
327            # builds (such as numpy). Thus, we ensure that the real path
328            # is returned.
329            self._temp_build_dir = TempDirectory(
330                kind=tempdir_kinds.REQ_BUILD, globally_managed=True
331            )
332
333            return self._temp_build_dir.path
334
335        # This is the only remaining place where we manually determine the path
336        # for the temporary directory. It is only needed for editables where
337        # it is the value of the --src option.
338
339        # When parallel builds are enabled, add a UUID to the build directory
340        # name so multiple builds do not interfere with each other.
341        dir_name: str = canonicalize_name(self.name)
342        if parallel_builds:
343            dir_name = f"{dir_name}_{uuid.uuid4().hex}"
344
345        # FIXME: Is there a better place to create the build_dir? (hg and bzr
346        # need this)
347        if not os.path.exists(build_dir):
348            logger.debug('Creating directory %s', build_dir)
349            os.makedirs(build_dir)
350        actual_build_dir = os.path.join(build_dir, dir_name)
351        # `None` indicates that we respect the globally-configured deletion
352        # settings, which is what we actually want when auto-deleting.
353        delete_arg = None if autodelete else False
354        return TempDirectory(
355            path=actual_build_dir,
356            delete=delete_arg,
357            kind=tempdir_kinds.REQ_BUILD,
358            globally_managed=True,
359        ).path
360
361    def _set_requirement(self) -> None:
362        """Set requirement after generating metadata.
363        """
364        assert self.req is None
365        assert self.metadata is not None
366        assert self.source_dir is not None
367
368        # Construct a Requirement object from the generated metadata
369        if isinstance(parse_version(self.metadata["Version"]), Version):
370            op = "=="
371        else:
372            op = "==="
373
374        self.req = Requirement(
375            "".join([
376                self.metadata["Name"],
377                op,
378                self.metadata["Version"],
379            ])
380        )
381
382    def warn_on_mismatching_name(self) -> None:
383        metadata_name = canonicalize_name(self.metadata["Name"])
384        if canonicalize_name(self.req.name) == metadata_name:
385            # Everything is fine.
386            return
387
388        # If we're here, there's a mismatch. Log a warning about it.
389        logger.warning(
390            'Generating metadata for package %s '
391            'produced metadata for project name %s. Fix your '
392            '#egg=%s fragments.',
393            self.name, metadata_name, self.name
394        )
395        self.req = Requirement(metadata_name)
396
397    def check_if_exists(self, use_user_site: bool) -> None:
398        """Find an installed distribution that satisfies or conflicts
399        with this requirement, and set self.satisfied_by or
400        self.should_reinstall appropriately.
401        """
402        if self.req is None:
403            return
404        existing_dist = get_distribution(self.req.name)
405        if not existing_dist:
406            return
407
408        # pkg_resouces may contain a different copy of packaging.version from
409        # pip in if the downstream distributor does a poor job debundling pip.
410        # We avoid existing_dist.parsed_version and let SpecifierSet.contains
411        # parses the version instead.
412        existing_version = existing_dist.version
413        version_compatible = (
414            existing_version is not None and
415            self.req.specifier.contains(existing_version, prereleases=True)
416        )
417        if not version_compatible:
418            self.satisfied_by = None
419            if use_user_site:
420                if dist_in_usersite(existing_dist):
421                    self.should_reinstall = True
422                elif (running_under_virtualenv() and
423                        dist_in_site_packages(existing_dist)):
424                    raise InstallationError(
425                        "Will not install to the user site because it will "
426                        "lack sys.path precedence to {} in {}".format(
427                            existing_dist.project_name, existing_dist.location)
428                    )
429            else:
430                self.should_reinstall = True
431        else:
432            if self.editable:
433                self.should_reinstall = True
434                # when installing editables, nothing pre-existing should ever
435                # satisfy
436                self.satisfied_by = None
437            else:
438                self.satisfied_by = existing_dist
439
440    # Things valid for wheels
441    @property
442    def is_wheel(self) -> bool:
443        if not self.link:
444            return False
445        return self.link.is_wheel
446
447    # Things valid for sdists
448    @property
449    def unpacked_source_directory(self) -> str:
450        return os.path.join(
451            self.source_dir,
452            self.link and self.link.subdirectory_fragment or '')
453
454    @property
455    def setup_py_path(self) -> str:
456        assert self.source_dir, f"No source dir for {self}"
457        setup_py = os.path.join(self.unpacked_source_directory, 'setup.py')
458
459        return setup_py
460
461    @property
462    def pyproject_toml_path(self) -> str:
463        assert self.source_dir, f"No source dir for {self}"
464        return make_pyproject_path(self.unpacked_source_directory)
465
466    def load_pyproject_toml(self) -> None:
467        """Load the pyproject.toml file.
468
469        After calling this routine, all of the attributes related to PEP 517
470        processing for this requirement have been set. In particular, the
471        use_pep517 attribute can be used to determine whether we should
472        follow the PEP 517 or legacy (setup.py) code path.
473        """
474        pyproject_toml_data = load_pyproject_toml(
475            self.use_pep517,
476            self.pyproject_toml_path,
477            self.setup_py_path,
478            str(self)
479        )
480
481        if pyproject_toml_data is None:
482            self.use_pep517 = False
483            return
484
485        self.use_pep517 = True
486        requires, backend, check, backend_path = pyproject_toml_data
487        self.requirements_to_check = check
488        self.pyproject_requires = requires
489        self.pep517_backend = Pep517HookCaller(
490            self.unpacked_source_directory, backend, backend_path=backend_path,
491        )
492
493    def _generate_metadata(self) -> str:
494        """Invokes metadata generator functions, with the required arguments.
495        """
496        if not self.use_pep517:
497            assert self.unpacked_source_directory
498
499            if not os.path.exists(self.setup_py_path):
500                raise InstallationError(
501                    f'File "setup.py" not found for legacy project {self}.'
502                )
503
504            return generate_metadata_legacy(
505                build_env=self.build_env,
506                setup_py_path=self.setup_py_path,
507                source_dir=self.unpacked_source_directory,
508                isolated=self.isolated,
509                details=self.name or f"from {self.link}"
510            )
511
512        assert self.pep517_backend is not None
513
514        return generate_metadata(
515            build_env=self.build_env,
516            backend=self.pep517_backend,
517        )
518
519    def prepare_metadata(self) -> None:
520        """Ensure that project metadata is available.
521
522        Under PEP 517, call the backend hook to prepare the metadata.
523        Under legacy processing, call setup.py egg-info.
524        """
525        assert self.source_dir
526
527        with indent_log():
528            self.metadata_directory = self._generate_metadata()
529
530        # Act on the newly generated metadata, based on the name and version.
531        if not self.name:
532            self._set_requirement()
533        else:
534            self.warn_on_mismatching_name()
535
536        self.assert_source_matches_version()
537
538    @property
539    def metadata(self) -> Any:
540        if not hasattr(self, '_metadata'):
541            self._metadata = get_metadata(self.get_dist())
542
543        return self._metadata
544
545    def get_dist(self) -> Distribution:
546        return _get_dist(self.metadata_directory)
547
548    def assert_source_matches_version(self) -> None:
549        assert self.source_dir
550        version = self.metadata['version']
551        if self.req.specifier and version not in self.req.specifier:
552            logger.warning(
553                'Requested %s, but installing version %s',
554                self,
555                version,
556            )
557        else:
558            logger.debug(
559                'Source in %s has version %s, which satisfies requirement %s',
560                display_path(self.source_dir),
561                version,
562                self,
563            )
564
565    # For both source distributions and editables
566    def ensure_has_source_dir(
567        self,
568        parent_dir: str,
569        autodelete: bool = False,
570        parallel_builds: bool = False,
571    ) -> None:
572        """Ensure that a source_dir is set.
573
574        This will create a temporary build dir if the name of the requirement
575        isn't known yet.
576
577        :param parent_dir: The ideal pip parent_dir for the source_dir.
578            Generally src_dir for editables and build_dir for sdists.
579        :return: self.source_dir
580        """
581        if self.source_dir is None:
582            self.source_dir = self.ensure_build_location(
583                parent_dir,
584                autodelete=autodelete,
585                parallel_builds=parallel_builds,
586            )
587
588    # For editable installations
589    def update_editable(self) -> None:
590        if not self.link:
591            logger.debug(
592                "Cannot update repository at %s; repository location is "
593                "unknown",
594                self.source_dir,
595            )
596            return
597        assert self.editable
598        assert self.source_dir
599        if self.link.scheme == 'file':
600            # Static paths don't get updated
601            return
602        vcs_backend = vcs.get_backend_for_scheme(self.link.scheme)
603        # Editable requirements are validated in Requirement constructors.
604        # So here, if it's neither a path nor a valid VCS URL, it's a bug.
605        assert vcs_backend, f"Unsupported VCS URL {self.link.url}"
606        hidden_url = hide_url(self.link.url)
607        vcs_backend.obtain(self.source_dir, url=hidden_url)
608
609    # Top-level Actions
610    def uninstall(
611        self, auto_confirm: bool = False, verbose: bool = False
612    ) -> Optional[UninstallPathSet]:
613        """
614        Uninstall the distribution currently satisfying this requirement.
615
616        Prompts before removing or modifying files unless
617        ``auto_confirm`` is True.
618
619        Refuses to delete or modify files outside of ``sys.prefix`` -
620        thus uninstallation within a virtual environment can only
621        modify that virtual environment, even if the virtualenv is
622        linked to global site-packages.
623
624        """
625        assert self.req
626        dist = get_distribution(self.req.name)
627        if not dist:
628            logger.warning("Skipping %s as it is not installed.", self.name)
629            return None
630        logger.info('Found existing installation: %s', dist)
631
632        uninstalled_pathset = UninstallPathSet.from_dist(dist)
633        uninstalled_pathset.remove(auto_confirm, verbose)
634        return uninstalled_pathset
635
636    def _get_archive_name(self, path: str, parentdir: str, rootdir: str) -> str:
637
638        def _clean_zip_name(name: str, prefix: str) -> str:
639            assert name.startswith(prefix + os.path.sep), (
640                f"name {name!r} doesn't start with prefix {prefix!r}"
641            )
642            name = name[len(prefix) + 1:]
643            name = name.replace(os.path.sep, '/')
644            return name
645
646        path = os.path.join(parentdir, path)
647        name = _clean_zip_name(path, rootdir)
648        return self.name + '/' + name
649
650    def archive(self, build_dir: Optional[str]) -> None:
651        """Saves archive to provided build_dir.
652
653        Used for saving downloaded VCS requirements as part of `pip download`.
654        """
655        assert self.source_dir
656        if build_dir is None:
657            return
658
659        create_archive = True
660        archive_name = '{}-{}.zip'.format(self.name, self.metadata["version"])
661        archive_path = os.path.join(build_dir, archive_name)
662
663        if os.path.exists(archive_path):
664            response = ask_path_exists(
665                'The file {} exists. (i)gnore, (w)ipe, '
666                '(b)ackup, (a)bort '.format(
667                    display_path(archive_path)),
668                ('i', 'w', 'b', 'a'))
669            if response == 'i':
670                create_archive = False
671            elif response == 'w':
672                logger.warning('Deleting %s', display_path(archive_path))
673                os.remove(archive_path)
674            elif response == 'b':
675                dest_file = backup_dir(archive_path)
676                logger.warning(
677                    'Backing up %s to %s',
678                    display_path(archive_path),
679                    display_path(dest_file),
680                )
681                shutil.move(archive_path, dest_file)
682            elif response == 'a':
683                sys.exit(-1)
684
685        if not create_archive:
686            return
687
688        zip_output = zipfile.ZipFile(
689            archive_path, 'w', zipfile.ZIP_DEFLATED, allowZip64=True,
690        )
691        with zip_output:
692            dir = os.path.normcase(
693                os.path.abspath(self.unpacked_source_directory)
694            )
695            for dirpath, dirnames, filenames in os.walk(dir):
696                for dirname in dirnames:
697                    dir_arcname = self._get_archive_name(
698                        dirname, parentdir=dirpath, rootdir=dir,
699                    )
700                    zipdir = zipfile.ZipInfo(dir_arcname + '/')
701                    zipdir.external_attr = 0x1ED << 16  # 0o755
702                    zip_output.writestr(zipdir, '')
703                for filename in filenames:
704                    file_arcname = self._get_archive_name(
705                        filename, parentdir=dirpath, rootdir=dir,
706                    )
707                    filename = os.path.join(dirpath, filename)
708                    zip_output.write(filename, file_arcname)
709
710        logger.info('Saved %s', display_path(archive_path))
711
712    def install(
713        self,
714        install_options: List[str],
715        global_options: Optional[Sequence[str]] = None,
716        root: Optional[str] = None,
717        home: Optional[str] = None,
718        prefix: Optional[str] = None,
719        warn_script_location: bool = True,
720        use_user_site: bool = False,
721        pycompile: bool = True
722    ) -> None:
723        scheme = get_scheme(
724            self.name,
725            user=use_user_site,
726            home=home,
727            root=root,
728            isolated=self.isolated,
729            prefix=prefix,
730        )
731
732        global_options = global_options if global_options is not None else []
733        if self.editable:
734            install_editable_legacy(
735                install_options,
736                global_options,
737                prefix=prefix,
738                home=home,
739                use_user_site=use_user_site,
740                name=self.name,
741                setup_py_path=self.setup_py_path,
742                isolated=self.isolated,
743                build_env=self.build_env,
744                unpacked_source_directory=self.unpacked_source_directory,
745            )
746            self.install_succeeded = True
747            return
748
749        if self.is_wheel:
750            assert self.local_file_path
751            direct_url = None
752            if self.original_link:
753                direct_url = direct_url_from_link(
754                    self.original_link,
755                    self.source_dir,
756                    self.original_link_is_in_wheel_cache,
757                )
758            install_wheel(
759                self.name,
760                self.local_file_path,
761                scheme=scheme,
762                req_description=str(self.req),
763                pycompile=pycompile,
764                warn_script_location=warn_script_location,
765                direct_url=direct_url,
766                requested=self.user_supplied,
767            )
768            self.install_succeeded = True
769            return
770
771        # TODO: Why don't we do this for editable installs?
772
773        # Extend the list of global and install options passed on to
774        # the setup.py call with the ones from the requirements file.
775        # Options specified in requirements file override those
776        # specified on the command line, since the last option given
777        # to setup.py is the one that is used.
778        global_options = list(global_options) + self.global_options
779        install_options = list(install_options) + self.install_options
780
781        try:
782            success = install_legacy(
783                install_options=install_options,
784                global_options=global_options,
785                root=root,
786                home=home,
787                prefix=prefix,
788                use_user_site=use_user_site,
789                pycompile=pycompile,
790                scheme=scheme,
791                setup_py_path=self.setup_py_path,
792                isolated=self.isolated,
793                req_name=self.name,
794                build_env=self.build_env,
795                unpacked_source_directory=self.unpacked_source_directory,
796                req_description=str(self.req),
797            )
798        except LegacyInstallFailure as exc:
799            self.install_succeeded = False
800            six.reraise(*exc.parent)
801        except Exception:
802            self.install_succeeded = True
803            raise
804
805        self.install_succeeded = success
806
807        if success and self.legacy_install_reason == 8368:
808            deprecated(
809                reason=(
810                    "{} was installed using the legacy 'setup.py install' "
811                    "method, because a wheel could not be built for it.".
812                    format(self.name)
813                ),
814                replacement="to fix the wheel build issue reported above",
815                gone_in=None,
816                issue=8368,
817            )
818
819
820def check_invalid_constraint_type(req: InstallRequirement) -> str:
821
822    # Check for unsupported forms
823    problem = ""
824    if not req.name:
825        problem = "Unnamed requirements are not allowed as constraints"
826    elif req.editable:
827        problem = "Editable requirements are not allowed as constraints"
828    elif req.extras:
829        problem = "Constraints cannot have extras"
830
831    if problem:
832        deprecated(
833            reason=(
834                "Constraints are only allowed to take the form of a package "
835                "name and a version specifier. Other forms were originally "
836                "permitted as an accident of the implementation, but were "
837                "undocumented. The new implementation of the resolver no "
838                "longer supports these forms."
839            ),
840            replacement="replacing the constraint with a requirement",
841            # No plan yet for when the new resolver becomes default
842            gone_in=None,
843            issue=8210,
844        )
845
846    return problem
847