1# -*- coding: utf-8 -*-
2
3from __future__ import absolute_import, print_function
4import json
5import logging
6import os
7import sys
8
9
10os.environ["PIP_PYTHON_PATH"] = str(sys.executable)
11
12
13def find_site_path(pkg, site_dir=None):
14    import pkg_resources
15    if site_dir is None:
16        site_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
17    working_set = pkg_resources.WorkingSet([site_dir] + sys.path[:])
18    for dist in working_set:
19        root = dist.location
20        base_name = dist.project_name if dist.project_name else dist.key
21        name = None
22        if "top_level.txt" in dist.metadata_listdir(""):
23            name = next(iter([l.strip() for l in dist.get_metadata_lines("top_level.txt") if l is not None]), None)
24        if name is None:
25            name = pkg_resources.safe_name(base_name).replace("-", "_")
26        if not any(pkg == _ for _ in [base_name, name]):
27            continue
28        path_options = [name, "{0}.py".format(name)]
29        path_options = [os.path.join(root, p) for p in path_options if p is not None]
30        path = next(iter(p for p in path_options if os.path.exists(p)), None)
31        if path is not None:
32            return (dist, path)
33    return (None, None)
34
35
36def _patch_path(pipenv_site=None):
37    import site
38    pipenv_libdir = os.path.dirname(os.path.abspath(__file__))
39    pipenv_site_dir = os.path.dirname(pipenv_libdir)
40    pipenv_dist = None
41    if pipenv_site is not None:
42        pipenv_dist, pipenv_path = find_site_path("pipenv", site_dir=pipenv_site)
43    else:
44        pipenv_dist, pipenv_path = find_site_path("pipenv", site_dir=pipenv_site_dir)
45    if pipenv_dist is not None:
46        pipenv_dist.activate()
47    else:
48        site.addsitedir(next(iter(
49            sitedir for sitedir in (pipenv_site, pipenv_site_dir)
50            if sitedir is not None
51        ), None))
52    if pipenv_path is not None:
53        pipenv_libdir = pipenv_path
54    for _dir in ("vendor", "patched", pipenv_libdir):
55        sys.path.insert(0, os.path.join(pipenv_libdir, _dir))
56
57
58def get_parser():
59    from argparse import ArgumentParser
60    parser = ArgumentParser("pipenv-resolver")
61    parser.add_argument("--pre", action="store_true", default=False)
62    parser.add_argument("--clear", action="store_true", default=False)
63    parser.add_argument("--verbose", "-v", action="count", default=False)
64    parser.add_argument("--dev", action="store_true", default=False)
65    parser.add_argument("--debug", action="store_true", default=False)
66    parser.add_argument("--system", action="store_true", default=False)
67    parser.add_argument("--parse-only", action="store_true", default=False)
68    parser.add_argument("--pipenv-site", metavar="pipenv_site_dir", action="store",
69                        default=os.environ.get("PIPENV_SITE_DIR"))
70    parser.add_argument("--requirements-dir", metavar="requirements_dir", action="store",
71                        default=os.environ.get("PIPENV_REQ_DIR"))
72    parser.add_argument("--write", metavar="write", action="store",
73                        default=os.environ.get("PIPENV_RESOLVER_FILE"))
74    parser.add_argument("packages", nargs="*")
75    return parser
76
77
78def which(*args, **kwargs):
79    return sys.executable
80
81
82def handle_parsed_args(parsed):
83    if parsed.debug:
84        parsed.verbose = max(parsed.verbose, 2)
85    if parsed.verbose > 1:
86        logging.getLogger("notpip").setLevel(logging.DEBUG)
87    elif parsed.verbose > 0:
88        logging.getLogger("notpip").setLevel(logging.INFO)
89    os.environ["PIPENV_VERBOSITY"] = str(parsed.verbose)
90    if "PIPENV_PACKAGES" in os.environ:
91        parsed.packages += os.environ.get("PIPENV_PACKAGES", "").strip().split("\n")
92    return parsed
93
94
95class Entry(object):
96    """A resolved entry from a resolver run"""
97
98    def __init__(self, name, entry_dict, project, resolver, reverse_deps=None, dev=False):
99        super(Entry, self).__init__()
100        from pipenv.vendor.requirementslib.models.utils import tomlkit_value_to_python
101        self.name = name
102        if isinstance(entry_dict, dict):
103            self.entry_dict = self.clean_initial_dict(entry_dict)
104        else:
105            self.entry_dict = entry_dict
106        self.project = project
107        section = "develop" if dev else "default"
108        pipfile_section = "dev-packages" if dev else "packages"
109        self.dev = dev
110        self.pipfile = tomlkit_value_to_python(
111            project.parsed_pipfile.get(pipfile_section, {})
112        )
113        self.lockfile = project.lockfile_content.get(section, {})
114        self.pipfile_dict = self.pipfile.get(self.pipfile_name, {})
115        if self.dev and self.name in project.lockfile_content.get("default", {}):
116            self.lockfile_dict = project.lockfile_content["default"][name]
117        else:
118            self.lockfile_dict = self.lockfile.get(name, entry_dict)
119        self.resolver = resolver
120        self.reverse_deps = reverse_deps
121        self._original_markers = None
122        self._markers = None
123        self._entry = None
124        self._lockfile_entry = None
125        self._pipfile_entry = None
126        self._parent_deps = []
127        self._flattened_parents = []
128        self._requires = None
129        self._deptree = None
130        self._parents_in_pipfile = []
131
132    @staticmethod
133    def make_requirement(name=None, entry=None, from_ireq=False):
134        from pipenv.vendor.requirementslib.models.requirements import Requirement
135        if from_ireq:
136            return Requirement.from_ireq(entry)
137        return Requirement.from_pipfile(name, entry)
138
139    @classmethod
140    def clean_initial_dict(cls, entry_dict):
141        if not entry_dict.get("version", "").startswith("=="):
142            entry_dict["version"] = cls.clean_specifier(entry_dict.get("version", ""))
143        if "name" in entry_dict:
144            del entry_dict["name"]
145        return entry_dict
146
147    @classmethod
148    def parse_pyparsing_exprs(cls, expr_iterable):
149        from pipenv.vendor.pyparsing import Literal, MatchFirst
150        keys = []
151        expr_list = []
152        expr = expr_iterable.copy()
153        if isinstance(expr, Literal) or (
154            expr.__class__.__name__ == Literal.__name__
155        ):
156            keys.append(expr.match)
157        elif isinstance(expr, MatchFirst) or (
158            expr.__class__.__name__ == MatchFirst.__name__
159        ):
160            expr_list = expr.exprs
161        elif isinstance(expr, list):
162            expr_list = expr
163        if expr_list:
164            for part in expr_list:
165                keys.extend(cls.parse_pyparsing_exprs(part))
166        return keys
167
168    @classmethod
169    def get_markers_from_dict(cls, entry_dict):
170        from pipenv.vendor.packaging import markers as packaging_markers
171        from pipenv.vendor.requirementslib.models.markers import normalize_marker_str
172        marker_keys = cls.parse_pyparsing_exprs(packaging_markers.VARIABLE)
173        markers = set()
174        keys_in_dict = [k for k in marker_keys if k in entry_dict]
175        markers = {
176            normalize_marker_str("{k} {v}".format(k=k, v=entry_dict.pop(k)))
177            for k in keys_in_dict
178        }
179        if "markers" in entry_dict:
180            markers.add(normalize_marker_str(entry_dict["markers"]))
181        if None in markers:
182            markers.remove(None)
183        if markers:
184            entry_dict["markers"] = " and ".join(list(markers))
185        else:
186            markers = None
187        return markers, entry_dict
188
189    @property
190    def markers(self):
191        self._markers, self.entry_dict = self.get_markers_from_dict(self.entry_dict)
192        return self._markers
193
194    @markers.setter
195    def markers(self, markers):
196        if not markers:
197            marker_str = self.marker_to_str(markers)
198            if marker_str:
199                self._entry = self.entry.merge_markers(marker_str)
200                self._markers = self.marker_to_str(self._entry.markers)
201                entry_dict = self.entry_dict.copy()
202                entry_dict["markers"] = self.marker_to_str(self._entry.markers)
203                self.entry_dict = entry_dict
204
205    @property
206    def original_markers(self):
207        original_markers, lockfile_dict = self.get_markers_from_dict(
208            self.lockfile_dict
209        )
210        self.lockfile_dict = lockfile_dict
211        self._original_markers = self.marker_to_str(original_markers)
212        return self._original_markers
213
214    @staticmethod
215    def marker_to_str(marker):
216        from pipenv.vendor.requirementslib.models.markers import normalize_marker_str
217        if not marker:
218            return None
219        from pipenv.vendor import six
220        from pipenv.vendor.vistir.compat import Mapping
221        marker_str = None
222        if isinstance(marker, Mapping):
223            marker_dict, _ = Entry.get_markers_from_dict(marker)
224            if marker_dict:
225                marker_str = "{0}".format(marker_dict.popitem()[1])
226        elif isinstance(marker, (list, set, tuple)):
227            marker_str = " and ".join([normalize_marker_str(m) for m in marker if m])
228        elif isinstance(marker, six.string_types):
229            marker_str = "{0}".format(normalize_marker_str(marker))
230        if isinstance(marker_str, six.string_types):
231            return marker_str
232        return None
233
234    def get_cleaned_dict(self, keep_outdated=False):
235        if keep_outdated and self.is_updated:
236            self.validate_constraints()
237            self.ensure_least_updates_possible()
238        elif not keep_outdated:
239            self.validate_constraints()
240        if self.entry.extras != self.lockfile_entry.extras:
241            entry_extras = list(self.entry.extras)
242            if self.lockfile_entry.extras:
243                entry_extras.extend(list(self.lockfile_entry.extras))
244            self._entry.req.extras = entry_extras
245            self.entry_dict["extras"] = self.entry.extras
246        if self.original_markers and not self.markers:
247            original_markers = self.marker_to_str(self.original_markers)
248            self.markers = original_markers
249            self.entry_dict["markers"] = self.marker_to_str(original_markers)
250        entry_hashes = set(self.entry.hashes)
251        locked_hashes = set(self.lockfile_entry.hashes)
252        if entry_hashes != locked_hashes and not self.is_updated:
253            self.entry_dict["hashes"] = sorted(entry_hashes | locked_hashes)
254        self.entry_dict["name"] = self.name
255        if "version" in self.entry_dict:
256            self.entry_dict["version"] = self.strip_version(self.entry_dict["version"])
257        _, self.entry_dict = self.get_markers_from_dict(self.entry_dict)
258        return self.entry_dict
259
260    @property
261    def lockfile_entry(self):
262        if self._lockfile_entry is None:
263            self._lockfile_entry = self.make_requirement(self.name, self.lockfile_dict)
264        return self._lockfile_entry
265
266    @lockfile_entry.setter
267    def lockfile_entry(self, entry):
268        self._lockfile_entry = entry
269
270    @property
271    def pipfile_entry(self):
272        if self._pipfile_entry is None:
273            self._pipfile_entry = self.make_requirement(self.pipfile_name, self.pipfile_dict)
274        return self._pipfile_entry
275
276    @property
277    def entry(self):
278        if self._entry is None:
279            self._entry = self.make_requirement(self.name, self.entry_dict)
280        return self._entry
281
282    @property
283    def normalized_name(self):
284        return self.entry.normalized_name
285
286    @property
287    def pipfile_name(self):
288        return self.project.get_package_name_in_pipfile(self.name, dev=self.dev)
289
290    @property
291    def is_in_pipfile(self):
292        return True if self.pipfile_name else False
293
294    @property
295    def pipfile_packages(self):
296        return self.project.pipfile_package_names["dev" if self.dev else "default"]
297
298    def create_parent(self, name, specifier="*"):
299        parent = self.create(name, specifier, self.project, self.resolver,
300                             self.reverse_deps, self.dev)
301        parent._deptree = self.deptree
302        return parent
303
304    @property
305    def deptree(self):
306        if not self._deptree:
307            self._deptree = self.project.environment.get_package_requirements()
308        return self._deptree
309
310    @classmethod
311    def create(cls, name, entry_dict, project, resolver, reverse_deps=None, dev=False):
312        return cls(name, entry_dict, project, resolver, reverse_deps, dev)
313
314    @staticmethod
315    def clean_specifier(specifier):
316        from pipenv.vendor.packaging.specifiers import Specifier
317        if not any(specifier.startswith(k) for k in Specifier._operators.keys()):
318            if specifier.strip().lower() in ["any", "<any>", "*"]:
319                return "*"
320            specifier = "=={0}".format(specifier)
321        elif specifier.startswith("==") and specifier.count("=") > 3:
322            specifier = "=={0}".format(specifier.lstrip("="))
323        return specifier
324
325    @staticmethod
326    def strip_version(specifier):
327        from pipenv.vendor.packaging.specifiers import Specifier
328        op = next(iter(
329            k for k in Specifier._operators.keys() if specifier.startswith(k)
330        ), None)
331        if op:
332            specifier = specifier[len(op):]
333        while op:
334            op = next(iter(
335                k for k in Specifier._operators.keys() if specifier.startswith(k)
336            ), None)
337            if op:
338                specifier = specifier[len(op):]
339        return specifier
340
341    @property
342    def parent_deps(self):
343        if not self._parent_deps:
344            self._parent_deps = self.get_parent_deps(unnest=False)
345        return self._parent_deps
346
347    @property
348    def flattened_parents(self):
349        if not self._flattened_parents:
350            self._flattened_parents = self.get_parent_deps(unnest=True)
351        return self._flattened_parents
352
353    @property
354    def parents_in_pipfile(self):
355        if not self._parents_in_pipfile:
356            self._parents_in_pipfile = [
357                p for p in self.flattened_parents
358                if p.normalized_name in self.pipfile_packages
359            ]
360        return self._parents_in_pipfile
361
362    @property
363    def is_updated(self):
364        return self.entry.specifiers != self.lockfile_entry.specifiers
365
366    @property
367    def requirements(self):
368        if not self._requires:
369            self._requires = next(iter(
370                self.project.environment.get_package_requirements(self.name)
371            ), {})
372        return self._requires
373
374    @property
375    def updated_version(self):
376        version = self.entry.specifiers
377        return self.strip_version(version)
378
379    @property
380    def updated_specifier(self):
381        # type: () -> str
382        return self.entry.specifiers
383
384    @property
385    def original_specifier(self):
386        # type: () -> str
387        return self.lockfile_entry.specifiers
388
389    @property
390    def original_version(self):
391        if self.original_specifier:
392            return self.strip_version(self.original_specifier)
393        return None
394
395    def validate_specifiers(self):
396        if self.is_in_pipfile and not self.pipfile_entry.editable:
397            return self.pipfile_entry.requirement.specifier.contains(self.updated_version)
398        return True
399
400    def get_dependency(self, name):
401        if self.requirements:
402            return next(iter(
403                dep for dep in self.requirements.get("dependencies", [])
404                if dep and dep.get("package_name", "") == name
405            ), {})
406        return {}
407
408    def get_parent_deps(self, unnest=False):
409        from pipenv.vendor.packaging.specifiers import Specifier
410        parents = []
411        for spec in self.reverse_deps.get(self.normalized_name, {}).get("parents", set()):
412            spec_match = next(iter(c for c in Specifier._operators if c in spec), None)
413            name = spec
414            parent = None
415            if spec_match is not None:
416                spec_index = spec.index(spec_match)
417                specifier = self.clean_specifier(spec[spec_index:len(spec_match)]).strip()
418                name_start = spec_index + len(spec_match)
419                name = spec[name_start:].strip()
420                parent = self.create_parent(name, specifier)
421            else:
422                name = spec
423                parent = self.create_parent(name)
424            if parent is not None:
425                parents.append(parent)
426            if not unnest or parent.pipfile_name is not None:
427                continue
428            if self.reverse_deps.get(parent.normalized_name, {}).get("parents", set()):
429                parents.extend(parent.flattened_parents)
430        return parents
431
432    def ensure_least_updates_possible(self):
433        """
434        Mutate the current entry to ensure that we are making the smallest amount of
435        changes possible to the existing lockfile -- this will keep the old locked
436        versions of packages if they satisfy new constraints.
437
438        :return: None
439        """
440        constraints = self.get_constraints()
441        can_use_original = True
442        can_use_updated = True
443        satisfied_by_versions = set()
444        for constraint in constraints:
445            if not constraint.specifier.contains(self.original_version):
446                self.can_use_original = False
447            if not constraint.specifier.contains(self.updated_version):
448                self.can_use_updated = False
449            satisfied_by_value = getattr(constraint, "satisfied_by", None)
450            if satisfied_by_value:
451                satisfied_by = "{0}".format(
452                    self.clean_specifier(str(satisfied_by_value.version))
453                )
454                satisfied_by_versions.add(satisfied_by)
455        if can_use_original:
456            self.entry_dict = self.lockfile_dict.copy()
457        elif can_use_updated:
458            if len(satisfied_by_versions) == 1:
459                self.entry_dict["version"] = next(iter(
460                    sat_by for sat_by in satisfied_by_versions if sat_by
461                ), None)
462                hashes = None
463                if self.lockfile_entry.specifiers == satisfied_by:
464                    ireq = self.lockfile_entry.as_ireq()
465                    if not self.lockfile_entry.hashes and self.resolver._should_include_hash(ireq):
466                        hashes = self.resolver.get_hash(ireq)
467                    else:
468                        hashes = self.lockfile_entry.hashes
469                else:
470                    if self.resolver._should_include_hash(constraint):
471                        hashes = self.resolver.get_hash(constraint)
472                if hashes:
473                    self.entry_dict["hashes"] = list(hashes)
474                    self._entry.hashes = frozenset(hashes)
475        else:
476            # check for any parents, since they depend on this and the current
477            # installed versions are not compatible with the new version, so
478            # we will need to update the top level dependency if possible
479            self.check_flattened_parents()
480
481    def get_constraints(self):
482        """
483        Retrieve all of the relevant constraints, aggregated from the pipfile, resolver,
484        and parent dependencies and their respective conflict resolution where possible.
485
486        :return: A set of **InstallRequirement** instances representing constraints
487        :rtype: Set
488        """
489        constraints = {
490            c for c in self.resolver.parsed_constraints
491            if c and c.name == self.entry.name
492        }
493        pipfile_constraint = self.get_pipfile_constraint()
494        if pipfile_constraint and not (self.pipfile_entry.editable or pipfile_constraint.editable):
495            constraints.add(pipfile_constraint)
496        return constraints
497
498    def get_pipfile_constraint(self):
499        """
500        Retrieve the version constraint from the pipfile if it is specified there,
501        otherwise check the constraints of the parent dependencies and their conflicts.
502
503        :return: An **InstallRequirement** instance representing a version constraint
504        """
505        if self.is_in_pipfile:
506            return self.pipfile_entry.as_ireq()
507
508    def validate_constraints(self):
509        """
510        Retrieves the full set of available constraints and iterate over them, validating
511        that they exist and that they are not causing unresolvable conflicts.
512
513        :return: True if the constraints are satisfied by the resolution provided
514        :raises: :exc:`pipenv.exceptions.DependencyConflict` if the constraints dont exist
515        """
516        from pipenv.exceptions import DependencyConflict
517        from pipenv.environments import is_verbose
518
519        constraints = self.get_constraints()
520        pinned_version = self.updated_version
521        for constraint in constraints:
522            if not constraint.req:
523                continue
524            if pinned_version and not constraint.req.specifier.contains(
525                str(pinned_version), prereleases=True
526            ):
527                if is_verbose():
528                    print("Tried constraint: {0!r}".format(constraint), file=sys.stderr)
529                msg = (
530                    "Cannot resolve conflicting version {0}{1} while {2}{3} is "
531                    "locked.".format(
532                        self.name, constraint.req.specifier,
533                        self.name, self.updated_specifier
534                    )
535                )
536                raise DependencyConflict(msg)
537        return True
538
539    def check_flattened_parents(self):
540        for parent in self.parents_in_pipfile:
541            if not parent.updated_specifier:
542                continue
543            if not parent.validate_specifiers():
544                from pipenv.exceptions import DependencyConflict
545                msg = (
546                    "Cannot resolve conflicting versions: (Root: {0}) {1}{2} (Pipfile) "
547                    "Incompatible with {3}{4} (resolved)\n".format(
548                        self.name, parent.pipfile_name,
549                        parent.pipfile_entry.requirement.specifiers, parent.name,
550                        parent.updated_specifiers
551                    )
552                )
553                raise DependencyConflict(msg)
554
555    def __getattribute__(self, key):
556        result = None
557        old_version = ["was_", "had_", "old_"]
558        new_version = ["is_", "has_", "new_"]
559        if any(key.startswith(v) for v in new_version):
560            entry = Entry.__getattribute__(self, "entry")
561            try:
562                keystart = key.index("_") + 1
563                try:
564                    result = getattr(entry, key[keystart:])
565                except AttributeError:
566                    result = getattr(entry, key)
567            except AttributeError:
568                result = super(Entry, self).__getattribute__(key)
569            return result
570        if any(key.startswith(v) for v in old_version):
571            lockfile_entry = Entry.__getattribute__(self, "lockfile_entry")
572            try:
573                keystart = key.index("_") + 1
574                try:
575                    result = getattr(lockfile_entry, key[keystart:])
576                except AttributeError:
577                    result = getattr(lockfile_entry, key)
578            except AttributeError:
579                result = super(Entry, self).__getattribute__(key)
580            return result
581        return super(Entry, self).__getattribute__(key)
582
583
584def clean_results(results, resolver, project, dev=False):
585    from pipenv.utils import translate_markers
586    if not project.lockfile_exists:
587        return results
588    lockfile = project.lockfile_content
589    section = "develop" if dev else "default"
590    reverse_deps = project.environment.reverse_dependencies()
591    new_results = [r for r in results if r["name"] not in lockfile[section]]
592    for result in results:
593        name = result.get("name")
594        entry_dict = result.copy()
595        entry = Entry(name, entry_dict, project, resolver, reverse_deps=reverse_deps, dev=dev)
596        entry_dict = translate_markers(entry.get_cleaned_dict(keep_outdated=False))
597        new_results.append(entry_dict)
598    return new_results
599
600
601def clean_outdated(results, resolver, project, dev=False):
602    if not project.lockfile_exists:
603        return results
604    lockfile = project.lockfile_content
605    section = "develop" if dev else "default"
606    reverse_deps = project.environment.reverse_dependencies()
607    new_results = [r for r in results if r["name"] not in lockfile[section]]
608    for result in results:
609        name = result.get("name")
610        entry_dict = result.copy()
611        entry = Entry(name, entry_dict, project, resolver, reverse_deps=reverse_deps, dev=dev)
612        # The old entry was editable but this one isnt; prefer the old one
613        # TODO: Should this be the case for all locking?
614        if entry.was_editable and not entry.is_editable:
615            continue
616        lockfile_entry = lockfile[section].get(name, None)
617        if not lockfile_entry:
618            alternate_section = "develop" if not dev else "default"
619            if name in lockfile[alternate_section]:
620                lockfile_entry = lockfile[alternate_section][name]
621        if lockfile_entry and not entry.is_updated:
622            old_markers = next(iter(m for m in (
623                entry.lockfile_entry.markers, lockfile_entry.get("markers", None)
624            ) if m is not None), None)
625            new_markers = entry_dict.get("markers", None)
626            if old_markers:
627                old_markers = Entry.marker_to_str(old_markers)
628            if old_markers and not new_markers:
629                entry.markers = old_markers
630            elif new_markers and not old_markers:
631                del entry.entry_dict["markers"]
632                entry._entry.req.req.marker = None
633                entry._entry.markers = None
634            # if the entry has not changed versions since the previous lock,
635            # don't introduce new markers since that is more restrictive
636            # if entry.has_markers and not entry.had_markers and not entry.is_updated:
637            # do make sure we retain the original markers for entries that are not changed
638        entry_dict = entry.get_cleaned_dict(keep_outdated=True)
639        new_results.append(entry_dict)
640    return new_results
641
642
643def parse_packages(packages, pre, clear, system, requirements_dir=None):
644    from pipenv.vendor.requirementslib.models.requirements import Requirement
645    from pipenv.vendor.vistir.contextmanagers import cd, temp_path
646    from pipenv.utils import parse_indexes
647    parsed_packages = []
648    for package in packages:
649        indexes, trusted_hosts, line = parse_indexes(package)
650        line = " ".join(line)
651        pf = dict()
652        req = Requirement.from_line(line)
653        if not req.name:
654            with temp_path(), cd(req.req.setup_info.base_dir):
655                sys.path.insert(0, req.req.setup_info.base_dir)
656                req.req._setup_info.get_info()
657                req.update_name_from_path(req.req.setup_info.base_dir)
658        try:
659            name, entry = req.pipfile_entry
660        except Exception:
661            continue
662        else:
663            if name is not None and entry is not None:
664                pf[name] = entry
665                parsed_packages.append(pf)
666    print("RESULTS:")
667    if parsed_packages:
668        print(json.dumps(parsed_packages))
669    else:
670        print(json.dumps([]))
671
672
673def resolve_packages(pre, clear, verbose, system, write, requirements_dir, packages, dev):
674    from pipenv.utils import create_mirror_source, resolve_deps, replace_pypi_sources
675    pypi_mirror_source = (
676        create_mirror_source(os.environ["PIPENV_PYPI_MIRROR"])
677        if "PIPENV_PYPI_MIRROR" in os.environ
678        else None
679    )
680
681    def resolve(packages, pre, project, sources, clear, system, requirements_dir=None):
682        from pipenv.patched.piptools import logging as piptools_logging
683        piptools_logging.log.verbosity = 1 if verbose else 0
684        return resolve_deps(
685            packages,
686            which,
687            project=project,
688            pre=pre,
689            sources=sources,
690            clear=clear,
691            allow_global=system,
692            req_dir=requirements_dir
693        )
694
695    from pipenv.core import project
696    sources = (
697        replace_pypi_sources(project.pipfile_sources, pypi_mirror_source)
698        if pypi_mirror_source
699        else project.pipfile_sources
700    )
701    keep_outdated = os.environ.get("PIPENV_KEEP_OUTDATED", False)
702    results, resolver = resolve(
703        packages,
704        pre=pre,
705        project=project,
706        sources=sources,
707        clear=clear,
708        system=system,
709        requirements_dir=requirements_dir,
710    )
711    if keep_outdated:
712        results = clean_outdated(results, resolver, project, dev)
713    else:
714        results = clean_results(results, resolver, project, dev)
715    if write:
716        with open(write, "w") as fh:
717            if not results:
718                json.dump([], fh)
719            else:
720                json.dump(results, fh)
721    else:
722        print("RESULTS:")
723        if results:
724            print(json.dumps(results))
725        else:
726            print(json.dumps([]))
727
728
729def _main(pre, clear, verbose, system, write, requirements_dir, packages, parse_only=False, dev=False):
730    os.environ["PIPENV_REQUESTED_PYTHON_VERSION"] = ".".join([str(s) for s in sys.version_info[:3]])
731    os.environ["PIP_PYTHON_PATH"] = str(sys.executable)
732    if parse_only:
733        parse_packages(
734            packages,
735            pre=pre,
736            clear=clear,
737            system=system,
738            requirements_dir=requirements_dir,
739        )
740    else:
741        resolve_packages(pre, clear, verbose, system, write, requirements_dir, packages, dev)
742
743
744def main():
745    parser = get_parser()
746    parsed, remaining = parser.parse_known_args()
747    _patch_path(pipenv_site=parsed.pipenv_site)
748    import warnings
749    from pipenv.vendor.vistir.compat import ResourceWarning
750    from pipenv.vendor.vistir.misc import replace_with_text_stream
751    warnings.simplefilter("ignore", category=ResourceWarning)
752    replace_with_text_stream("stdout")
753    replace_with_text_stream("stderr")
754    os.environ["PIP_DISABLE_PIP_VERSION_CHECK"] = str("1")
755    os.environ["PYTHONIOENCODING"] = str("utf-8")
756    os.environ["PYTHONUNBUFFERED"] = str("1")
757    parsed = handle_parsed_args(parsed)
758    _main(parsed.pre, parsed.clear, parsed.verbose, parsed.system, parsed.write,
759          parsed.requirements_dir, parsed.packages, parse_only=parsed.parse_only,
760          dev=parsed.dev)
761
762
763if __name__ == "__main__":
764    main()
765