tests versuch 2

This commit is contained in:
2000-Trek 2023-07-28 23:30:45 +02:00
parent fdf385fe06
commit c88f7df83a
2363 changed files with 408191 additions and 0 deletions

View file

@ -0,0 +1,141 @@
from typing import FrozenSet, Iterable, Optional, Tuple, Union
from pip._vendor.packaging.specifiers import SpecifierSet
from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
from pip._vendor.packaging.version import LegacyVersion, Version
from pip._internal.models.link import Link, links_equivalent
from pip._internal.req.req_install import InstallRequirement
from pip._internal.utils.hashes import Hashes
CandidateLookup = Tuple[Optional["Candidate"], Optional[InstallRequirement]]
CandidateVersion = Union[LegacyVersion, Version]
def format_name(project: str, extras: FrozenSet[str]) -> str:
if not extras:
return project
canonical_extras = sorted(canonicalize_name(e) for e in extras)
return "{}[{}]".format(project, ",".join(canonical_extras))
class Constraint:
def __init__(
self, specifier: SpecifierSet, hashes: Hashes, links: FrozenSet[Link]
) -> None:
self.specifier = specifier
self.hashes = hashes
self.links = links
@classmethod
def empty(cls) -> "Constraint":
return Constraint(SpecifierSet(), Hashes(), frozenset())
@classmethod
def from_ireq(cls, ireq: InstallRequirement) -> "Constraint":
links = frozenset([ireq.link]) if ireq.link else frozenset()
return Constraint(ireq.specifier, ireq.hashes(trust_internet=False), links)
def __bool__(self) -> bool:
return bool(self.specifier) or bool(self.hashes) or bool(self.links)
def __and__(self, other: InstallRequirement) -> "Constraint":
if not isinstance(other, InstallRequirement):
return NotImplemented
specifier = self.specifier & other.specifier
hashes = self.hashes & other.hashes(trust_internet=False)
links = self.links
if other.link:
links = links.union([other.link])
return Constraint(specifier, hashes, links)
def is_satisfied_by(self, candidate: "Candidate") -> bool:
# Reject if there are any mismatched URL constraints on this package.
if self.links and not all(_match_link(link, candidate) for link in self.links):
return False
# We can safely always allow prereleases here since PackageFinder
# already implements the prerelease logic, and would have filtered out
# prerelease candidates if the user does not expect them.
return self.specifier.contains(candidate.version, prereleases=True)
class Requirement:
@property
def project_name(self) -> NormalizedName:
"""The "project name" of a requirement.
This is different from ``name`` if this requirement contains extras,
in which case ``name`` would contain the ``[...]`` part, while this
refers to the name of the project.
"""
raise NotImplementedError("Subclass should override")
@property
def name(self) -> str:
"""The name identifying this requirement in the resolver.
This is different from ``project_name`` if this requirement contains
extras, where ``project_name`` would not contain the ``[...]`` part.
"""
raise NotImplementedError("Subclass should override")
def is_satisfied_by(self, candidate: "Candidate") -> bool:
return False
def get_candidate_lookup(self) -> CandidateLookup:
raise NotImplementedError("Subclass should override")
def format_for_error(self) -> str:
raise NotImplementedError("Subclass should override")
def _match_link(link: Link, candidate: "Candidate") -> bool:
if candidate.source_link:
return links_equivalent(link, candidate.source_link)
return False
class Candidate:
@property
def project_name(self) -> NormalizedName:
"""The "project name" of the candidate.
This is different from ``name`` if this candidate contains extras,
in which case ``name`` would contain the ``[...]`` part, while this
refers to the name of the project.
"""
raise NotImplementedError("Override in subclass")
@property
def name(self) -> str:
"""The name identifying this candidate in the resolver.
This is different from ``project_name`` if this candidate contains
extras, where ``project_name`` would not contain the ``[...]`` part.
"""
raise NotImplementedError("Override in subclass")
@property
def version(self) -> CandidateVersion:
raise NotImplementedError("Override in subclass")
@property
def is_installed(self) -> bool:
raise NotImplementedError("Override in subclass")
@property
def is_editable(self) -> bool:
raise NotImplementedError("Override in subclass")
@property
def source_link(self) -> Optional[Link]:
raise NotImplementedError("Override in subclass")
def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]:
raise NotImplementedError("Override in subclass")
def get_install_requirement(self) -> Optional[InstallRequirement]:
raise NotImplementedError("Override in subclass")
def format_for_error(self) -> str:
raise NotImplementedError("Subclass should override")

View file

@ -0,0 +1,555 @@
import logging
import sys
from typing import TYPE_CHECKING, Any, FrozenSet, Iterable, Optional, Tuple, Union, cast
from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
from pip._vendor.packaging.version import Version
from pip._internal.exceptions import (
HashError,
InstallationSubprocessError,
MetadataInconsistent,
)
from pip._internal.metadata import BaseDistribution
from pip._internal.models.link import Link, links_equivalent
from pip._internal.models.wheel import Wheel
from pip._internal.req.constructors import (
install_req_from_editable,
install_req_from_line,
)
from pip._internal.req.req_install import InstallRequirement
from pip._internal.utils.direct_url_helpers import direct_url_from_link
from pip._internal.utils.misc import normalize_version_info
from .base import Candidate, CandidateVersion, Requirement, format_name
if TYPE_CHECKING:
from .factory import Factory
logger = logging.getLogger(__name__)
BaseCandidate = Union[
"AlreadyInstalledCandidate",
"EditableCandidate",
"LinkCandidate",
]
# Avoid conflicting with the PyPI package "Python".
REQUIRES_PYTHON_IDENTIFIER = cast(NormalizedName, "<Python from Requires-Python>")
def as_base_candidate(candidate: Candidate) -> Optional[BaseCandidate]:
"""The runtime version of BaseCandidate."""
base_candidate_classes = (
AlreadyInstalledCandidate,
EditableCandidate,
LinkCandidate,
)
if isinstance(candidate, base_candidate_classes):
return candidate
return None
def make_install_req_from_link(
link: Link, template: InstallRequirement
) -> InstallRequirement:
assert not template.editable, "template is editable"
if template.req:
line = str(template.req)
else:
line = link.url
ireq = install_req_from_line(
line,
user_supplied=template.user_supplied,
comes_from=template.comes_from,
use_pep517=template.use_pep517,
isolated=template.isolated,
constraint=template.constraint,
global_options=template.global_options,
hash_options=template.hash_options,
config_settings=template.config_settings,
)
ireq.original_link = template.original_link
ireq.link = link
ireq.extras = template.extras
return ireq
def make_install_req_from_editable(
link: Link, template: InstallRequirement
) -> InstallRequirement:
assert template.editable, "template not editable"
ireq = install_req_from_editable(
link.url,
user_supplied=template.user_supplied,
comes_from=template.comes_from,
use_pep517=template.use_pep517,
isolated=template.isolated,
constraint=template.constraint,
permit_editable_wheels=template.permit_editable_wheels,
global_options=template.global_options,
hash_options=template.hash_options,
config_settings=template.config_settings,
)
ireq.extras = template.extras
return ireq
def _make_install_req_from_dist(
dist: BaseDistribution, template: InstallRequirement
) -> InstallRequirement:
if template.req:
line = str(template.req)
elif template.link:
line = f"{dist.canonical_name} @ {template.link.url}"
else:
line = f"{dist.canonical_name}=={dist.version}"
ireq = install_req_from_line(
line,
user_supplied=template.user_supplied,
comes_from=template.comes_from,
use_pep517=template.use_pep517,
isolated=template.isolated,
constraint=template.constraint,
global_options=template.global_options,
hash_options=template.hash_options,
config_settings=template.config_settings,
)
ireq.satisfied_by = dist
return ireq
class _InstallRequirementBackedCandidate(Candidate):
"""A candidate backed by an ``InstallRequirement``.
This represents a package request with the target not being already
in the environment, and needs to be fetched and installed. The backing
``InstallRequirement`` is responsible for most of the leg work; this
class exposes appropriate information to the resolver.
:param link: The link passed to the ``InstallRequirement``. The backing
``InstallRequirement`` will use this link to fetch the distribution.
:param source_link: The link this candidate "originates" from. This is
different from ``link`` when the link is found in the wheel cache.
``link`` would point to the wheel cache, while this points to the
found remote link (e.g. from pypi.org).
"""
dist: BaseDistribution
is_installed = False
def __init__(
self,
link: Link,
source_link: Link,
ireq: InstallRequirement,
factory: "Factory",
name: Optional[NormalizedName] = None,
version: Optional[CandidateVersion] = None,
) -> None:
self._link = link
self._source_link = source_link
self._factory = factory
self._ireq = ireq
self._name = name
self._version = version
self.dist = self._prepare()
def __str__(self) -> str:
return f"{self.name} {self.version}"
def __repr__(self) -> str:
return "{class_name}({link!r})".format(
class_name=self.__class__.__name__,
link=str(self._link),
)
def __hash__(self) -> int:
return hash((self.__class__, self._link))
def __eq__(self, other: Any) -> bool:
if isinstance(other, self.__class__):
return links_equivalent(self._link, other._link)
return False
@property
def source_link(self) -> Optional[Link]:
return self._source_link
@property
def project_name(self) -> NormalizedName:
"""The normalised name of the project the candidate refers to"""
if self._name is None:
self._name = self.dist.canonical_name
return self._name
@property
def name(self) -> str:
return self.project_name
@property
def version(self) -> CandidateVersion:
if self._version is None:
self._version = self.dist.version
return self._version
def format_for_error(self) -> str:
return "{} {} (from {})".format(
self.name,
self.version,
self._link.file_path if self._link.is_file else self._link,
)
def _prepare_distribution(self) -> BaseDistribution:
raise NotImplementedError("Override in subclass")
def _check_metadata_consistency(self, dist: BaseDistribution) -> None:
"""Check for consistency of project name and version of dist."""
if self._name is not None and self._name != dist.canonical_name:
raise MetadataInconsistent(
self._ireq,
"name",
self._name,
dist.canonical_name,
)
if self._version is not None and self._version != dist.version:
raise MetadataInconsistent(
self._ireq,
"version",
str(self._version),
str(dist.version),
)
def _prepare(self) -> BaseDistribution:
try:
dist = self._prepare_distribution()
except HashError as e:
# Provide HashError the underlying ireq that caused it. This
# provides context for the resulting error message to show the
# offending line to the user.
e.req = self._ireq
raise
except InstallationSubprocessError as exc:
# The output has been presented already, so don't duplicate it.
exc.context = "See above for output."
raise
self._check_metadata_consistency(dist)
return dist
def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]:
requires = self.dist.iter_dependencies() if with_requires else ()
for r in requires:
yield self._factory.make_requirement_from_spec(str(r), self._ireq)
yield self._factory.make_requires_python_requirement(self.dist.requires_python)
def get_install_requirement(self) -> Optional[InstallRequirement]:
return self._ireq
class LinkCandidate(_InstallRequirementBackedCandidate):
is_editable = False
def __init__(
self,
link: Link,
template: InstallRequirement,
factory: "Factory",
name: Optional[NormalizedName] = None,
version: Optional[CandidateVersion] = None,
) -> None:
source_link = link
cache_entry = factory.get_wheel_cache_entry(source_link, name)
if cache_entry is not None:
logger.debug("Using cached wheel link: %s", cache_entry.link)
link = cache_entry.link
ireq = make_install_req_from_link(link, template)
assert ireq.link == link
if ireq.link.is_wheel and not ireq.link.is_file:
wheel = Wheel(ireq.link.filename)
wheel_name = canonicalize_name(wheel.name)
assert name == wheel_name, f"{name!r} != {wheel_name!r} for wheel"
# Version may not be present for PEP 508 direct URLs
if version is not None:
wheel_version = Version(wheel.version)
assert version == wheel_version, "{!r} != {!r} for wheel {}".format(
version, wheel_version, name
)
if cache_entry is not None:
assert ireq.link.is_wheel
assert ireq.link.is_file
if cache_entry.persistent and template.link is template.original_link:
ireq.cached_wheel_source_link = source_link
if cache_entry.origin is not None:
ireq.download_info = cache_entry.origin
else:
# Legacy cache entry that does not have origin.json.
# download_info may miss the archive_info.hashes field.
ireq.download_info = direct_url_from_link(
source_link, link_is_in_wheel_cache=cache_entry.persistent
)
super().__init__(
link=link,
source_link=source_link,
ireq=ireq,
factory=factory,
name=name,
version=version,
)
def _prepare_distribution(self) -> BaseDistribution:
preparer = self._factory.preparer
return preparer.prepare_linked_requirement(self._ireq, parallel_builds=True)
class EditableCandidate(_InstallRequirementBackedCandidate):
is_editable = True
def __init__(
self,
link: Link,
template: InstallRequirement,
factory: "Factory",
name: Optional[NormalizedName] = None,
version: Optional[CandidateVersion] = None,
) -> None:
super().__init__(
link=link,
source_link=link,
ireq=make_install_req_from_editable(link, template),
factory=factory,
name=name,
version=version,
)
def _prepare_distribution(self) -> BaseDistribution:
return self._factory.preparer.prepare_editable_requirement(self._ireq)
class AlreadyInstalledCandidate(Candidate):
is_installed = True
source_link = None
def __init__(
self,
dist: BaseDistribution,
template: InstallRequirement,
factory: "Factory",
) -> None:
self.dist = dist
self._ireq = _make_install_req_from_dist(dist, template)
self._factory = factory
self._version = None
# This is just logging some messages, so we can do it eagerly.
# The returned dist would be exactly the same as self.dist because we
# set satisfied_by in _make_install_req_from_dist.
# TODO: Supply reason based on force_reinstall and upgrade_strategy.
skip_reason = "already satisfied"
factory.preparer.prepare_installed_requirement(self._ireq, skip_reason)
def __str__(self) -> str:
return str(self.dist)
def __repr__(self) -> str:
return "{class_name}({distribution!r})".format(
class_name=self.__class__.__name__,
distribution=self.dist,
)
def __hash__(self) -> int:
return hash((self.__class__, self.name, self.version))
def __eq__(self, other: Any) -> bool:
if isinstance(other, self.__class__):
return self.name == other.name and self.version == other.version
return False
@property
def project_name(self) -> NormalizedName:
return self.dist.canonical_name
@property
def name(self) -> str:
return self.project_name
@property
def version(self) -> CandidateVersion:
if self._version is None:
self._version = self.dist.version
return self._version
@property
def is_editable(self) -> bool:
return self.dist.editable
def format_for_error(self) -> str:
return f"{self.name} {self.version} (Installed)"
def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]:
if not with_requires:
return
for r in self.dist.iter_dependencies():
yield self._factory.make_requirement_from_spec(str(r), self._ireq)
def get_install_requirement(self) -> Optional[InstallRequirement]:
return None
class ExtrasCandidate(Candidate):
"""A candidate that has 'extras', indicating additional dependencies.
Requirements can be for a project with dependencies, something like
foo[extra]. The extras don't affect the project/version being installed
directly, but indicate that we need additional dependencies. We model that
by having an artificial ExtrasCandidate that wraps the "base" candidate.
The ExtrasCandidate differs from the base in the following ways:
1. It has a unique name, of the form foo[extra]. This causes the resolver
to treat it as a separate node in the dependency graph.
2. When we're getting the candidate's dependencies,
a) We specify that we want the extra dependencies as well.
b) We add a dependency on the base candidate.
See below for why this is needed.
3. We return None for the underlying InstallRequirement, as the base
candidate will provide it, and we don't want to end up with duplicates.
The dependency on the base candidate is needed so that the resolver can't
decide that it should recommend foo[extra1] version 1.0 and foo[extra2]
version 2.0. Having those candidates depend on foo=1.0 and foo=2.0
respectively forces the resolver to recognise that this is a conflict.
"""
def __init__(
self,
base: BaseCandidate,
extras: FrozenSet[str],
) -> None:
self.base = base
self.extras = extras
def __str__(self) -> str:
name, rest = str(self.base).split(" ", 1)
return "{}[{}] {}".format(name, ",".join(self.extras), rest)
def __repr__(self) -> str:
return "{class_name}(base={base!r}, extras={extras!r})".format(
class_name=self.__class__.__name__,
base=self.base,
extras=self.extras,
)
def __hash__(self) -> int:
return hash((self.base, self.extras))
def __eq__(self, other: Any) -> bool:
if isinstance(other, self.__class__):
return self.base == other.base and self.extras == other.extras
return False
@property
def project_name(self) -> NormalizedName:
return self.base.project_name
@property
def name(self) -> str:
"""The normalised name of the project the candidate refers to"""
return format_name(self.base.project_name, self.extras)
@property
def version(self) -> CandidateVersion:
return self.base.version
def format_for_error(self) -> str:
return "{} [{}]".format(
self.base.format_for_error(), ", ".join(sorted(self.extras))
)
@property
def is_installed(self) -> bool:
return self.base.is_installed
@property
def is_editable(self) -> bool:
return self.base.is_editable
@property
def source_link(self) -> Optional[Link]:
return self.base.source_link
def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]:
factory = self.base._factory
# Add a dependency on the exact base
# (See note 2b in the class docstring)
yield factory.make_requirement_from_candidate(self.base)
if not with_requires:
return
# The user may have specified extras that the candidate doesn't
# support. We ignore any unsupported extras here.
valid_extras = self.extras.intersection(self.base.dist.iter_provided_extras())
invalid_extras = self.extras.difference(self.base.dist.iter_provided_extras())
for extra in sorted(invalid_extras):
logger.warning(
"%s %s does not provide the extra '%s'",
self.base.name,
self.version,
extra,
)
for r in self.base.dist.iter_dependencies(valid_extras):
requirement = factory.make_requirement_from_spec(
str(r), self.base._ireq, valid_extras
)
if requirement:
yield requirement
def get_install_requirement(self) -> Optional[InstallRequirement]:
# We don't return anything here, because we always
# depend on the base candidate, and we'll get the
# install requirement from that.
return None
class RequiresPythonCandidate(Candidate):
is_installed = False
source_link = None
def __init__(self, py_version_info: Optional[Tuple[int, ...]]) -> None:
if py_version_info is not None:
version_info = normalize_version_info(py_version_info)
else:
version_info = sys.version_info[:3]
self._version = Version(".".join(str(c) for c in version_info))
# We don't need to implement __eq__() and __ne__() since there is always
# only one RequiresPythonCandidate in a resolution, i.e. the host Python.
# The built-in object.__eq__() and object.__ne__() do exactly what we want.
def __str__(self) -> str:
return f"Python {self._version}"
@property
def project_name(self) -> NormalizedName:
return REQUIRES_PYTHON_IDENTIFIER
@property
def name(self) -> str:
return REQUIRES_PYTHON_IDENTIFIER
@property
def version(self) -> CandidateVersion:
return self._version
def format_for_error(self) -> str:
return f"Python {self.version}"
def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]:
return ()
def get_install_requirement(self) -> Optional[InstallRequirement]:
return None

View file

@ -0,0 +1,730 @@
import contextlib
import functools
import logging
from typing import (
TYPE_CHECKING,
Dict,
FrozenSet,
Iterable,
Iterator,
List,
Mapping,
NamedTuple,
Optional,
Sequence,
Set,
Tuple,
TypeVar,
cast,
)
from pip._vendor.packaging.requirements import InvalidRequirement
from pip._vendor.packaging.specifiers import SpecifierSet
from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
from pip._vendor.resolvelib import ResolutionImpossible
from pip._internal.cache import CacheEntry, WheelCache
from pip._internal.exceptions import (
DistributionNotFound,
InstallationError,
MetadataInconsistent,
UnsupportedPythonVersion,
UnsupportedWheel,
)
from pip._internal.index.package_finder import PackageFinder
from pip._internal.metadata import BaseDistribution, get_default_environment
from pip._internal.models.link import Link
from pip._internal.models.wheel import Wheel
from pip._internal.operations.prepare import RequirementPreparer
from pip._internal.req.constructors import install_req_from_link_and_ireq
from pip._internal.req.req_install import (
InstallRequirement,
check_invalid_constraint_type,
)
from pip._internal.resolution.base import InstallRequirementProvider
from pip._internal.utils.compatibility_tags import get_supported
from pip._internal.utils.hashes import Hashes
from pip._internal.utils.packaging import get_requirement
from pip._internal.utils.virtualenv import running_under_virtualenv
from .base import Candidate, CandidateVersion, Constraint, Requirement
from .candidates import (
AlreadyInstalledCandidate,
BaseCandidate,
EditableCandidate,
ExtrasCandidate,
LinkCandidate,
RequiresPythonCandidate,
as_base_candidate,
)
from .found_candidates import FoundCandidates, IndexCandidateInfo
from .requirements import (
ExplicitRequirement,
RequiresPythonRequirement,
SpecifierRequirement,
UnsatisfiableRequirement,
)
if TYPE_CHECKING:
from typing import Protocol
class ConflictCause(Protocol):
requirement: RequiresPythonRequirement
parent: Candidate
logger = logging.getLogger(__name__)
C = TypeVar("C")
Cache = Dict[Link, C]
class CollectedRootRequirements(NamedTuple):
requirements: List[Requirement]
constraints: Dict[str, Constraint]
user_requested: Dict[str, int]
class Factory:
def __init__(
self,
finder: PackageFinder,
preparer: RequirementPreparer,
make_install_req: InstallRequirementProvider,
wheel_cache: Optional[WheelCache],
use_user_site: bool,
force_reinstall: bool,
ignore_installed: bool,
ignore_requires_python: bool,
py_version_info: Optional[Tuple[int, ...]] = None,
) -> None:
self._finder = finder
self.preparer = preparer
self._wheel_cache = wheel_cache
self._python_candidate = RequiresPythonCandidate(py_version_info)
self._make_install_req_from_spec = make_install_req
self._use_user_site = use_user_site
self._force_reinstall = force_reinstall
self._ignore_requires_python = ignore_requires_python
self._build_failures: Cache[InstallationError] = {}
self._link_candidate_cache: Cache[LinkCandidate] = {}
self._editable_candidate_cache: Cache[EditableCandidate] = {}
self._installed_candidate_cache: Dict[str, AlreadyInstalledCandidate] = {}
self._extras_candidate_cache: Dict[
Tuple[int, FrozenSet[str]], ExtrasCandidate
] = {}
if not ignore_installed:
env = get_default_environment()
self._installed_dists = {
dist.canonical_name: dist
for dist in env.iter_installed_distributions(local_only=False)
}
else:
self._installed_dists = {}
@property
def force_reinstall(self) -> bool:
return self._force_reinstall
def _fail_if_link_is_unsupported_wheel(self, link: Link) -> None:
if not link.is_wheel:
return
wheel = Wheel(link.filename)
if wheel.supported(self._finder.target_python.get_tags()):
return
msg = f"{link.filename} is not a supported wheel on this platform."
raise UnsupportedWheel(msg)
def _make_extras_candidate(
self, base: BaseCandidate, extras: FrozenSet[str]
) -> ExtrasCandidate:
cache_key = (id(base), extras)
try:
candidate = self._extras_candidate_cache[cache_key]
except KeyError:
candidate = ExtrasCandidate(base, extras)
self._extras_candidate_cache[cache_key] = candidate
return candidate
def _make_candidate_from_dist(
self,
dist: BaseDistribution,
extras: FrozenSet[str],
template: InstallRequirement,
) -> Candidate:
try:
base = self._installed_candidate_cache[dist.canonical_name]
except KeyError:
base = AlreadyInstalledCandidate(dist, template, factory=self)
self._installed_candidate_cache[dist.canonical_name] = base
if not extras:
return base
return self._make_extras_candidate(base, extras)
def _make_candidate_from_link(
self,
link: Link,
extras: FrozenSet[str],
template: InstallRequirement,
name: Optional[NormalizedName],
version: Optional[CandidateVersion],
) -> Optional[Candidate]:
# TODO: Check already installed candidate, and use it if the link and
# editable flag match.
if link in self._build_failures:
# We already tried this candidate before, and it does not build.
# Don't bother trying again.
return None
if template.editable:
if link not in self._editable_candidate_cache:
try:
self._editable_candidate_cache[link] = EditableCandidate(
link,
template,
factory=self,
name=name,
version=version,
)
except MetadataInconsistent as e:
logger.info(
"Discarding [blue underline]%s[/]: [yellow]%s[reset]",
link,
e,
extra={"markup": True},
)
self._build_failures[link] = e
return None
base: BaseCandidate = self._editable_candidate_cache[link]
else:
if link not in self._link_candidate_cache:
try:
self._link_candidate_cache[link] = LinkCandidate(
link,
template,
factory=self,
name=name,
version=version,
)
except MetadataInconsistent as e:
logger.info(
"Discarding [blue underline]%s[/]: [yellow]%s[reset]",
link,
e,
extra={"markup": True},
)
self._build_failures[link] = e
return None
base = self._link_candidate_cache[link]
if not extras:
return base
return self._make_extras_candidate(base, extras)
def _iter_found_candidates(
self,
ireqs: Sequence[InstallRequirement],
specifier: SpecifierSet,
hashes: Hashes,
prefers_installed: bool,
incompatible_ids: Set[int],
) -> Iterable[Candidate]:
if not ireqs:
return ()
# The InstallRequirement implementation requires us to give it a
# "template". Here we just choose the first requirement to represent
# all of them.
# Hopefully the Project model can correct this mismatch in the future.
template = ireqs[0]
assert template.req, "Candidates found on index must be PEP 508"
name = canonicalize_name(template.req.name)
extras: FrozenSet[str] = frozenset()
for ireq in ireqs:
assert ireq.req, "Candidates found on index must be PEP 508"
specifier &= ireq.req.specifier
hashes &= ireq.hashes(trust_internet=False)
extras |= frozenset(ireq.extras)
def _get_installed_candidate() -> Optional[Candidate]:
"""Get the candidate for the currently-installed version."""
# If --force-reinstall is set, we want the version from the index
# instead, so we "pretend" there is nothing installed.
if self._force_reinstall:
return None
try:
installed_dist = self._installed_dists[name]
except KeyError:
return None
# Don't use the installed distribution if its version does not fit
# the current dependency graph.
if not specifier.contains(installed_dist.version, prereleases=True):
return None
candidate = self._make_candidate_from_dist(
dist=installed_dist,
extras=extras,
template=template,
)
# The candidate is a known incompatibility. Don't use it.
if id(candidate) in incompatible_ids:
return None
return candidate
def iter_index_candidate_infos() -> Iterator[IndexCandidateInfo]:
result = self._finder.find_best_candidate(
project_name=name,
specifier=specifier,
hashes=hashes,
)
icans = list(result.iter_applicable())
# PEP 592: Yanked releases are ignored unless the specifier
# explicitly pins a version (via '==' or '===') that can be
# solely satisfied by a yanked release.
all_yanked = all(ican.link.is_yanked for ican in icans)
def is_pinned(specifier: SpecifierSet) -> bool:
for sp in specifier:
if sp.operator == "===":
return True
if sp.operator != "==":
continue
if sp.version.endswith(".*"):
continue
return True
return False
pinned = is_pinned(specifier)
# PackageFinder returns earlier versions first, so we reverse.
for ican in reversed(icans):
if not (all_yanked and pinned) and ican.link.is_yanked:
continue
func = functools.partial(
self._make_candidate_from_link,
link=ican.link,
extras=extras,
template=template,
name=name,
version=ican.version,
)
yield ican.version, func
return FoundCandidates(
iter_index_candidate_infos,
_get_installed_candidate(),
prefers_installed,
incompatible_ids,
)
def _iter_explicit_candidates_from_base(
self,
base_requirements: Iterable[Requirement],
extras: FrozenSet[str],
) -> Iterator[Candidate]:
"""Produce explicit candidates from the base given an extra-ed package.
:param base_requirements: Requirements known to the resolver. The
requirements are guaranteed to not have extras.
:param extras: The extras to inject into the explicit requirements'
candidates.
"""
for req in base_requirements:
lookup_cand, _ = req.get_candidate_lookup()
if lookup_cand is None: # Not explicit.
continue
# We've stripped extras from the identifier, and should always
# get a BaseCandidate here, unless there's a bug elsewhere.
base_cand = as_base_candidate(lookup_cand)
assert base_cand is not None, "no extras here"
yield self._make_extras_candidate(base_cand, extras)
def _iter_candidates_from_constraints(
self,
identifier: str,
constraint: Constraint,
template: InstallRequirement,
) -> Iterator[Candidate]:
"""Produce explicit candidates from constraints.
This creates "fake" InstallRequirement objects that are basically clones
of what "should" be the template, but with original_link set to link.
"""
for link in constraint.links:
self._fail_if_link_is_unsupported_wheel(link)
candidate = self._make_candidate_from_link(
link,
extras=frozenset(),
template=install_req_from_link_and_ireq(link, template),
name=canonicalize_name(identifier),
version=None,
)
if candidate:
yield candidate
def find_candidates(
self,
identifier: str,
requirements: Mapping[str, Iterable[Requirement]],
incompatibilities: Mapping[str, Iterator[Candidate]],
constraint: Constraint,
prefers_installed: bool,
) -> Iterable[Candidate]:
# Collect basic lookup information from the requirements.
explicit_candidates: Set[Candidate] = set()
ireqs: List[InstallRequirement] = []
for req in requirements[identifier]:
cand, ireq = req.get_candidate_lookup()
if cand is not None:
explicit_candidates.add(cand)
if ireq is not None:
ireqs.append(ireq)
# If the current identifier contains extras, add explicit candidates
# from entries from extra-less identifier.
with contextlib.suppress(InvalidRequirement):
parsed_requirement = get_requirement(identifier)
explicit_candidates.update(
self._iter_explicit_candidates_from_base(
requirements.get(parsed_requirement.name, ()),
frozenset(parsed_requirement.extras),
),
)
# Add explicit candidates from constraints. We only do this if there are
# known ireqs, which represent requirements not already explicit. If
# there are no ireqs, we're constraining already-explicit requirements,
# which is handled later when we return the explicit candidates.
if ireqs:
try:
explicit_candidates.update(
self._iter_candidates_from_constraints(
identifier,
constraint,
template=ireqs[0],
),
)
except UnsupportedWheel:
# If we're constrained to install a wheel incompatible with the
# target architecture, no candidates will ever be valid.
return ()
# Since we cache all the candidates, incompatibility identification
# can be made quicker by comparing only the id() values.
incompat_ids = {id(c) for c in incompatibilities.get(identifier, ())}
# If none of the requirements want an explicit candidate, we can ask
# the finder for candidates.
if not explicit_candidates:
return self._iter_found_candidates(
ireqs,
constraint.specifier,
constraint.hashes,
prefers_installed,
incompat_ids,
)
return (
c
for c in explicit_candidates
if id(c) not in incompat_ids
and constraint.is_satisfied_by(c)
and all(req.is_satisfied_by(c) for req in requirements[identifier])
)
def _make_requirement_from_install_req(
self, ireq: InstallRequirement, requested_extras: Iterable[str]
) -> Optional[Requirement]:
if not ireq.match_markers(requested_extras):
logger.info(
"Ignoring %s: markers '%s' don't match your environment",
ireq.name,
ireq.markers,
)
return None
if not ireq.link:
return SpecifierRequirement(ireq)
self._fail_if_link_is_unsupported_wheel(ireq.link)
cand = self._make_candidate_from_link(
ireq.link,
extras=frozenset(ireq.extras),
template=ireq,
name=canonicalize_name(ireq.name) if ireq.name else None,
version=None,
)
if cand is None:
# There's no way we can satisfy a URL requirement if the underlying
# candidate fails to build. An unnamed URL must be user-supplied, so
# we fail eagerly. If the URL is named, an unsatisfiable requirement
# can make the resolver do the right thing, either backtrack (and
# maybe find some other requirement that's buildable) or raise a
# ResolutionImpossible eventually.
if not ireq.name:
raise self._build_failures[ireq.link]
return UnsatisfiableRequirement(canonicalize_name(ireq.name))
return self.make_requirement_from_candidate(cand)
def collect_root_requirements(
self, root_ireqs: List[InstallRequirement]
) -> CollectedRootRequirements:
collected = CollectedRootRequirements([], {}, {})
for i, ireq in enumerate(root_ireqs):
if ireq.constraint:
# Ensure we only accept valid constraints
problem = check_invalid_constraint_type(ireq)
if problem:
raise InstallationError(problem)
if not ireq.match_markers():
continue
assert ireq.name, "Constraint must be named"
name = canonicalize_name(ireq.name)
if name in collected.constraints:
collected.constraints[name] &= ireq
else:
collected.constraints[name] = Constraint.from_ireq(ireq)
else:
req = self._make_requirement_from_install_req(
ireq,
requested_extras=(),
)
if req is None:
continue
if ireq.user_supplied and req.name not in collected.user_requested:
collected.user_requested[req.name] = i
collected.requirements.append(req)
return collected
def make_requirement_from_candidate(
self, candidate: Candidate
) -> ExplicitRequirement:
return ExplicitRequirement(candidate)
def make_requirement_from_spec(
self,
specifier: str,
comes_from: Optional[InstallRequirement],
requested_extras: Iterable[str] = (),
) -> Optional[Requirement]:
ireq = self._make_install_req_from_spec(specifier, comes_from)
return self._make_requirement_from_install_req(ireq, requested_extras)
def make_requires_python_requirement(
self,
specifier: SpecifierSet,
) -> Optional[Requirement]:
if self._ignore_requires_python:
return None
# Don't bother creating a dependency for an empty Requires-Python.
if not str(specifier):
return None
return RequiresPythonRequirement(specifier, self._python_candidate)
def get_wheel_cache_entry(
self, link: Link, name: Optional[str]
) -> Optional[CacheEntry]:
"""Look up the link in the wheel cache.
If ``preparer.require_hashes`` is True, don't use the wheel cache,
because cached wheels, always built locally, have different hashes
than the files downloaded from the index server and thus throw false
hash mismatches. Furthermore, cached wheels at present have
nondeterministic contents due to file modification times.
"""
if self._wheel_cache is None:
return None
return self._wheel_cache.get_cache_entry(
link=link,
package_name=name,
supported_tags=get_supported(),
)
def get_dist_to_uninstall(self, candidate: Candidate) -> Optional[BaseDistribution]:
# TODO: Are there more cases this needs to return True? Editable?
dist = self._installed_dists.get(candidate.project_name)
if dist is None: # Not installed, no uninstallation required.
return None
# We're installing into global site. The current installation must
# be uninstalled, no matter it's in global or user site, because the
# user site installation has precedence over global.
if not self._use_user_site:
return dist
# We're installing into user site. Remove the user site installation.
if dist.in_usersite:
return dist
# We're installing into user site, but the installed incompatible
# package is in global site. We can't uninstall that, and would let
# the new user installation to "shadow" it. But shadowing won't work
# in virtual environments, so we error out.
if running_under_virtualenv() and dist.in_site_packages:
message = (
f"Will not install to the user site because it will lack "
f"sys.path precedence to {dist.raw_name} in {dist.location}"
)
raise InstallationError(message)
return None
def _report_requires_python_error(
self, causes: Sequence["ConflictCause"]
) -> UnsupportedPythonVersion:
assert causes, "Requires-Python error reported with no cause"
version = self._python_candidate.version
if len(causes) == 1:
specifier = str(causes[0].requirement.specifier)
message = (
f"Package {causes[0].parent.name!r} requires a different "
f"Python: {version} not in {specifier!r}"
)
return UnsupportedPythonVersion(message)
message = f"Packages require a different Python. {version} not in:"
for cause in causes:
package = cause.parent.format_for_error()
specifier = str(cause.requirement.specifier)
message += f"\n{specifier!r} (required by {package})"
return UnsupportedPythonVersion(message)
def _report_single_requirement_conflict(
self, req: Requirement, parent: Optional[Candidate]
) -> DistributionNotFound:
if parent is None:
req_disp = str(req)
else:
req_disp = f"{req} (from {parent.name})"
cands = self._finder.find_all_candidates(req.project_name)
skipped_by_requires_python = self._finder.requires_python_skipped_reasons()
versions = [str(v) for v in sorted({c.version for c in cands})]
if skipped_by_requires_python:
logger.critical(
"Ignored the following versions that require a different python "
"version: %s",
"; ".join(skipped_by_requires_python) or "none",
)
logger.critical(
"Could not find a version that satisfies the requirement %s "
"(from versions: %s)",
req_disp,
", ".join(versions) or "none",
)
if str(req) == "requirements.txt":
logger.info(
"HINT: You are attempting to install a package literally "
'named "requirements.txt" (which cannot exist). Consider '
"using the '-r' flag to install the packages listed in "
"requirements.txt"
)
return DistributionNotFound(f"No matching distribution found for {req}")
def get_installation_error(
self,
e: "ResolutionImpossible[Requirement, Candidate]",
constraints: Dict[str, Constraint],
) -> InstallationError:
assert e.causes, "Installation error reported with no cause"
# If one of the things we can't solve is "we need Python X.Y",
# that is what we report.
requires_python_causes = [
cause
for cause in e.causes
if isinstance(cause.requirement, RequiresPythonRequirement)
and not cause.requirement.is_satisfied_by(self._python_candidate)
]
if requires_python_causes:
# The comprehension above makes sure all Requirement instances are
# RequiresPythonRequirement, so let's cast for convenience.
return self._report_requires_python_error(
cast("Sequence[ConflictCause]", requires_python_causes),
)
# Otherwise, we have a set of causes which can't all be satisfied
# at once.
# The simplest case is when we have *one* cause that can't be
# satisfied. We just report that case.
if len(e.causes) == 1:
req, parent = e.causes[0]
if req.name not in constraints:
return self._report_single_requirement_conflict(req, parent)
# OK, we now have a list of requirements that can't all be
# satisfied at once.
# A couple of formatting helpers
def text_join(parts: List[str]) -> str:
if len(parts) == 1:
return parts[0]
return ", ".join(parts[:-1]) + " and " + parts[-1]
def describe_trigger(parent: Candidate) -> str:
ireq = parent.get_install_requirement()
if not ireq or not ireq.comes_from:
return f"{parent.name}=={parent.version}"
if isinstance(ireq.comes_from, InstallRequirement):
return str(ireq.comes_from.name)
return str(ireq.comes_from)
triggers = set()
for req, parent in e.causes:
if parent is None:
# This is a root requirement, so we can report it directly
trigger = req.format_for_error()
else:
trigger = describe_trigger(parent)
triggers.add(trigger)
if triggers:
info = text_join(sorted(triggers))
else:
info = "the requested packages"
msg = (
"Cannot install {} because these package versions "
"have conflicting dependencies.".format(info)
)
logger.critical(msg)
msg = "\nThe conflict is caused by:"
relevant_constraints = set()
for req, parent in e.causes:
if req.name in constraints:
relevant_constraints.add(req.name)
msg = msg + "\n "
if parent:
msg = msg + f"{parent.name} {parent.version} depends on "
else:
msg = msg + "The user requested "
msg = msg + req.format_for_error()
for key in relevant_constraints:
spec = constraints[key].specifier
msg += f"\n The user requested (constraint) {key}{spec}"
msg = (
msg
+ "\n\n"
+ "To fix this you could try to:\n"
+ "1. loosen the range of package versions you've specified\n"
+ "2. remove package versions to allow pip attempt to solve "
+ "the dependency conflict\n"
)
logger.info(msg)
return DistributionNotFound(
"ResolutionImpossible: for help visit "
"https://pip.pypa.io/en/latest/topics/dependency-resolution/"
"#dealing-with-dependency-conflicts"
)

View file

@ -0,0 +1,155 @@
"""Utilities to lazily create and visit candidates found.
Creating and visiting a candidate is a *very* costly operation. It involves
fetching, extracting, potentially building modules from source, and verifying
distribution metadata. It is therefore crucial for performance to keep
everything here lazy all the way down, so we only touch candidates that we
absolutely need, and not "download the world" when we only need one version of
something.
"""
import functools
from collections.abc import Sequence
from typing import TYPE_CHECKING, Any, Callable, Iterator, Optional, Set, Tuple
from pip._vendor.packaging.version import _BaseVersion
from .base import Candidate
IndexCandidateInfo = Tuple[_BaseVersion, Callable[[], Optional[Candidate]]]
if TYPE_CHECKING:
SequenceCandidate = Sequence[Candidate]
else:
# For compatibility: Python before 3.9 does not support using [] on the
# Sequence class.
#
# >>> from collections.abc import Sequence
# >>> Sequence[str]
# Traceback (most recent call last):
# File "<stdin>", line 1, in <module>
# TypeError: 'ABCMeta' object is not subscriptable
#
# TODO: Remove this block after dropping Python 3.8 support.
SequenceCandidate = Sequence
def _iter_built(infos: Iterator[IndexCandidateInfo]) -> Iterator[Candidate]:
"""Iterator for ``FoundCandidates``.
This iterator is used when the package is not already installed. Candidates
from index come later in their normal ordering.
"""
versions_found: Set[_BaseVersion] = set()
for version, func in infos:
if version in versions_found:
continue
candidate = func()
if candidate is None:
continue
yield candidate
versions_found.add(version)
def _iter_built_with_prepended(
installed: Candidate, infos: Iterator[IndexCandidateInfo]
) -> Iterator[Candidate]:
"""Iterator for ``FoundCandidates``.
This iterator is used when the resolver prefers the already-installed
candidate and NOT to upgrade. The installed candidate is therefore
always yielded first, and candidates from index come later in their
normal ordering, except skipped when the version is already installed.
"""
yield installed
versions_found: Set[_BaseVersion] = {installed.version}
for version, func in infos:
if version in versions_found:
continue
candidate = func()
if candidate is None:
continue
yield candidate
versions_found.add(version)
def _iter_built_with_inserted(
installed: Candidate, infos: Iterator[IndexCandidateInfo]
) -> Iterator[Candidate]:
"""Iterator for ``FoundCandidates``.
This iterator is used when the resolver prefers to upgrade an
already-installed package. Candidates from index are returned in their
normal ordering, except replaced when the version is already installed.
The implementation iterates through and yields other candidates, inserting
the installed candidate exactly once before we start yielding older or
equivalent candidates, or after all other candidates if they are all newer.
"""
versions_found: Set[_BaseVersion] = set()
for version, func in infos:
if version in versions_found:
continue
# If the installed candidate is better, yield it first.
if installed.version >= version:
yield installed
versions_found.add(installed.version)
candidate = func()
if candidate is None:
continue
yield candidate
versions_found.add(version)
# If the installed candidate is older than all other candidates.
if installed.version not in versions_found:
yield installed
class FoundCandidates(SequenceCandidate):
"""A lazy sequence to provide candidates to the resolver.
The intended usage is to return this from `find_matches()` so the resolver
can iterate through the sequence multiple times, but only access the index
page when remote packages are actually needed. This improve performances
when suitable candidates are already installed on disk.
"""
def __init__(
self,
get_infos: Callable[[], Iterator[IndexCandidateInfo]],
installed: Optional[Candidate],
prefers_installed: bool,
incompatible_ids: Set[int],
):
self._get_infos = get_infos
self._installed = installed
self._prefers_installed = prefers_installed
self._incompatible_ids = incompatible_ids
def __getitem__(self, index: Any) -> Any:
# Implemented to satisfy the ABC check. This is not needed by the
# resolver, and should not be used by the provider either (for
# performance reasons).
raise NotImplementedError("don't do this")
def __iter__(self) -> Iterator[Candidate]:
infos = self._get_infos()
if not self._installed:
iterator = _iter_built(infos)
elif self._prefers_installed:
iterator = _iter_built_with_prepended(self._installed, infos)
else:
iterator = _iter_built_with_inserted(self._installed, infos)
return (c for c in iterator if id(c) not in self._incompatible_ids)
def __len__(self) -> int:
# Implemented to satisfy the ABC check. This is not needed by the
# resolver, and should not be used by the provider either (for
# performance reasons).
raise NotImplementedError("don't do this")
@functools.lru_cache(maxsize=1)
def __bool__(self) -> bool:
if self._prefers_installed and self._installed:
return True
return any(self)

View file

@ -0,0 +1,255 @@
import collections
import math
from typing import (
TYPE_CHECKING,
Dict,
Iterable,
Iterator,
Mapping,
Sequence,
TypeVar,
Union,
)
from pip._vendor.resolvelib.providers import AbstractProvider
from .base import Candidate, Constraint, Requirement
from .candidates import REQUIRES_PYTHON_IDENTIFIER
from .factory import Factory
if TYPE_CHECKING:
from pip._vendor.resolvelib.providers import Preference
from pip._vendor.resolvelib.resolvers import RequirementInformation
PreferenceInformation = RequirementInformation[Requirement, Candidate]
_ProviderBase = AbstractProvider[Requirement, Candidate, str]
else:
_ProviderBase = AbstractProvider
# Notes on the relationship between the provider, the factory, and the
# candidate and requirement classes.
#
# The provider is a direct implementation of the resolvelib class. Its role
# is to deliver the API that resolvelib expects.
#
# Rather than work with completely abstract "requirement" and "candidate"
# concepts as resolvelib does, pip has concrete classes implementing these two
# ideas. The API of Requirement and Candidate objects are defined in the base
# classes, but essentially map fairly directly to the equivalent provider
# methods. In particular, `find_matches` and `is_satisfied_by` are
# requirement methods, and `get_dependencies` is a candidate method.
#
# The factory is the interface to pip's internal mechanisms. It is stateless,
# and is created by the resolver and held as a property of the provider. It is
# responsible for creating Requirement and Candidate objects, and provides
# services to those objects (access to pip's finder and preparer).
D = TypeVar("D")
V = TypeVar("V")
def _get_with_identifier(
mapping: Mapping[str, V],
identifier: str,
default: D,
) -> Union[D, V]:
"""Get item from a package name lookup mapping with a resolver identifier.
This extra logic is needed when the target mapping is keyed by package
name, which cannot be directly looked up with an identifier (which may
contain requested extras). Additional logic is added to also look up a value
by "cleaning up" the extras from the identifier.
"""
if identifier in mapping:
return mapping[identifier]
# HACK: Theoretically we should check whether this identifier is a valid
# "NAME[EXTRAS]" format, and parse out the name part with packaging or
# some regular expression. But since pip's resolver only spits out three
# kinds of identifiers: normalized PEP 503 names, normalized names plus
# extras, and Requires-Python, we can cheat a bit here.
name, open_bracket, _ = identifier.partition("[")
if open_bracket and name in mapping:
return mapping[name]
return default
class PipProvider(_ProviderBase):
"""Pip's provider implementation for resolvelib.
:params constraints: A mapping of constraints specified by the user. Keys
are canonicalized project names.
:params ignore_dependencies: Whether the user specified ``--no-deps``.
:params upgrade_strategy: The user-specified upgrade strategy.
:params user_requested: A set of canonicalized package names that the user
supplied for pip to install/upgrade.
"""
def __init__(
self,
factory: Factory,
constraints: Dict[str, Constraint],
ignore_dependencies: bool,
upgrade_strategy: str,
user_requested: Dict[str, int],
) -> None:
self._factory = factory
self._constraints = constraints
self._ignore_dependencies = ignore_dependencies
self._upgrade_strategy = upgrade_strategy
self._user_requested = user_requested
self._known_depths: Dict[str, float] = collections.defaultdict(lambda: math.inf)
def identify(self, requirement_or_candidate: Union[Requirement, Candidate]) -> str:
return requirement_or_candidate.name
def get_preference(
self,
identifier: str,
resolutions: Mapping[str, Candidate],
candidates: Mapping[str, Iterator[Candidate]],
information: Mapping[str, Iterable["PreferenceInformation"]],
backtrack_causes: Sequence["PreferenceInformation"],
) -> "Preference":
"""Produce a sort key for given requirement based on preference.
The lower the return value is, the more preferred this group of
arguments is.
Currently pip considers the following in order:
* Prefer if any of the known requirements is "direct", e.g. points to an
explicit URL.
* If equal, prefer if any requirement is "pinned", i.e. contains
operator ``===`` or ``==``.
* If equal, calculate an approximate "depth" and resolve requirements
closer to the user-specified requirements first. If the depth cannot
by determined (eg: due to no matching parents), it is considered
infinite.
* Order user-specified requirements by the order they are specified.
* If equal, prefers "non-free" requirements, i.e. contains at least one
operator, such as ``>=`` or ``<``.
* If equal, order alphabetically for consistency (helps debuggability).
"""
try:
next(iter(information[identifier]))
except StopIteration:
# There is no information for this identifier, so there's no known
# candidates.
has_information = False
else:
has_information = True
if has_information:
lookups = (r.get_candidate_lookup() for r, _ in information[identifier])
candidate, ireqs = zip(*lookups)
else:
candidate, ireqs = None, ()
operators = [
specifier.operator
for specifier_set in (ireq.specifier for ireq in ireqs if ireq)
for specifier in specifier_set
]
direct = candidate is not None
pinned = any(op[:2] == "==" for op in operators)
unfree = bool(operators)
try:
requested_order: Union[int, float] = self._user_requested[identifier]
except KeyError:
requested_order = math.inf
if has_information:
parent_depths = (
self._known_depths[parent.name] if parent is not None else 0.0
for _, parent in information[identifier]
)
inferred_depth = min(d for d in parent_depths) + 1.0
else:
inferred_depth = math.inf
else:
inferred_depth = 1.0
self._known_depths[identifier] = inferred_depth
requested_order = self._user_requested.get(identifier, math.inf)
# Requires-Python has only one candidate and the check is basically
# free, so we always do it first to avoid needless work if it fails.
requires_python = identifier == REQUIRES_PYTHON_IDENTIFIER
# Prefer the causes of backtracking on the assumption that the problem
# resolving the dependency tree is related to the failures that caused
# the backtracking
backtrack_cause = self.is_backtrack_cause(identifier, backtrack_causes)
return (
not requires_python,
not direct,
not pinned,
not backtrack_cause,
inferred_depth,
requested_order,
not unfree,
identifier,
)
def find_matches(
self,
identifier: str,
requirements: Mapping[str, Iterator[Requirement]],
incompatibilities: Mapping[str, Iterator[Candidate]],
) -> Iterable[Candidate]:
def _eligible_for_upgrade(identifier: str) -> bool:
"""Are upgrades allowed for this project?
This checks the upgrade strategy, and whether the project was one
that the user specified in the command line, in order to decide
whether we should upgrade if there's a newer version available.
(Note that we don't need access to the `--upgrade` flag, because
an upgrade strategy of "to-satisfy-only" means that `--upgrade`
was not specified).
"""
if self._upgrade_strategy == "eager":
return True
elif self._upgrade_strategy == "only-if-needed":
user_order = _get_with_identifier(
self._user_requested,
identifier,
default=None,
)
return user_order is not None
return False
constraint = _get_with_identifier(
self._constraints,
identifier,
default=Constraint.empty(),
)
return self._factory.find_candidates(
identifier=identifier,
requirements=requirements,
constraint=constraint,
prefers_installed=(not _eligible_for_upgrade(identifier)),
incompatibilities=incompatibilities,
)
def is_satisfied_by(self, requirement: Requirement, candidate: Candidate) -> bool:
return requirement.is_satisfied_by(candidate)
def get_dependencies(self, candidate: Candidate) -> Sequence[Requirement]:
with_requires = not self._ignore_dependencies
return [r for r in candidate.iter_dependencies(with_requires) if r is not None]
@staticmethod
def is_backtrack_cause(
identifier: str, backtrack_causes: Sequence["PreferenceInformation"]
) -> bool:
for backtrack_cause in backtrack_causes:
if identifier == backtrack_cause.requirement.name:
return True
if backtrack_cause.parent and identifier == backtrack_cause.parent.name:
return True
return False

View file

@ -0,0 +1,80 @@
from collections import defaultdict
from logging import getLogger
from typing import Any, DefaultDict
from pip._vendor.resolvelib.reporters import BaseReporter
from .base import Candidate, Requirement
logger = getLogger(__name__)
class PipReporter(BaseReporter):
def __init__(self) -> None:
self.reject_count_by_package: DefaultDict[str, int] = defaultdict(int)
self._messages_at_reject_count = {
1: (
"pip is looking at multiple versions of {package_name} to "
"determine which version is compatible with other "
"requirements. This could take a while."
),
8: (
"pip is still looking at multiple versions of {package_name} to "
"determine which version is compatible with other "
"requirements. This could take a while."
),
13: (
"This is taking longer than usual. You might need to provide "
"the dependency resolver with stricter constraints to reduce "
"runtime. See https://pip.pypa.io/warnings/backtracking for "
"guidance. If you want to abort this run, press Ctrl + C."
),
}
def rejecting_candidate(self, criterion: Any, candidate: Candidate) -> None:
self.reject_count_by_package[candidate.name] += 1
count = self.reject_count_by_package[candidate.name]
if count not in self._messages_at_reject_count:
return
message = self._messages_at_reject_count[count]
logger.info("INFO: %s", message.format(package_name=candidate.name))
msg = "Will try a different candidate, due to conflict:"
for req_info in criterion.information:
req, parent = req_info.requirement, req_info.parent
# Inspired by Factory.get_installation_error
msg += "\n "
if parent:
msg += f"{parent.name} {parent.version} depends on "
else:
msg += "The user requested "
msg += req.format_for_error()
logger.debug(msg)
class PipDebuggingReporter(BaseReporter):
"""A reporter that does an info log for every event it sees."""
def starting(self) -> None:
logger.info("Reporter.starting()")
def starting_round(self, index: int) -> None:
logger.info("Reporter.starting_round(%r)", index)
def ending_round(self, index: int, state: Any) -> None:
logger.info("Reporter.ending_round(%r, state)", index)
def ending(self, state: Any) -> None:
logger.info("Reporter.ending(%r)", state)
def adding_requirement(self, requirement: Requirement, parent: Candidate) -> None:
logger.info("Reporter.adding_requirement(%r, %r)", requirement, parent)
def rejecting_candidate(self, criterion: Any, candidate: Candidate) -> None:
logger.info("Reporter.rejecting_candidate(%r, %r)", criterion, candidate)
def pinning(self, candidate: Candidate) -> None:
logger.info("Reporter.pinning(%r)", candidate)

View file

@ -0,0 +1,165 @@
from pip._vendor.packaging.specifiers import SpecifierSet
from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
from pip._internal.req.req_install import InstallRequirement
from .base import Candidate, CandidateLookup, Requirement, format_name
class ExplicitRequirement(Requirement):
def __init__(self, candidate: Candidate) -> None:
self.candidate = candidate
def __str__(self) -> str:
return str(self.candidate)
def __repr__(self) -> str:
return "{class_name}({candidate!r})".format(
class_name=self.__class__.__name__,
candidate=self.candidate,
)
@property
def project_name(self) -> NormalizedName:
# No need to canonicalize - the candidate did this
return self.candidate.project_name
@property
def name(self) -> str:
# No need to canonicalize - the candidate did this
return self.candidate.name
def format_for_error(self) -> str:
return self.candidate.format_for_error()
def get_candidate_lookup(self) -> CandidateLookup:
return self.candidate, None
def is_satisfied_by(self, candidate: Candidate) -> bool:
return candidate == self.candidate
class SpecifierRequirement(Requirement):
def __init__(self, ireq: InstallRequirement) -> None:
assert ireq.link is None, "This is a link, not a specifier"
self._ireq = ireq
self._extras = frozenset(ireq.extras)
def __str__(self) -> str:
return str(self._ireq.req)
def __repr__(self) -> str:
return "{class_name}({requirement!r})".format(
class_name=self.__class__.__name__,
requirement=str(self._ireq.req),
)
@property
def project_name(self) -> NormalizedName:
assert self._ireq.req, "Specifier-backed ireq is always PEP 508"
return canonicalize_name(self._ireq.req.name)
@property
def name(self) -> str:
return format_name(self.project_name, self._extras)
def format_for_error(self) -> str:
# Convert comma-separated specifiers into "A, B, ..., F and G"
# This makes the specifier a bit more "human readable", without
# risking a change in meaning. (Hopefully! Not all edge cases have
# been checked)
parts = [s.strip() for s in str(self).split(",")]
if len(parts) == 0:
return ""
elif len(parts) == 1:
return parts[0]
return ", ".join(parts[:-1]) + " and " + parts[-1]
def get_candidate_lookup(self) -> CandidateLookup:
return None, self._ireq
def is_satisfied_by(self, candidate: Candidate) -> bool:
assert candidate.name == self.name, (
f"Internal issue: Candidate is not for this requirement "
f"{candidate.name} vs {self.name}"
)
# We can safely always allow prereleases here since PackageFinder
# already implements the prerelease logic, and would have filtered out
# prerelease candidates if the user does not expect them.
assert self._ireq.req, "Specifier-backed ireq is always PEP 508"
spec = self._ireq.req.specifier
return spec.contains(candidate.version, prereleases=True)
class RequiresPythonRequirement(Requirement):
"""A requirement representing Requires-Python metadata."""
def __init__(self, specifier: SpecifierSet, match: Candidate) -> None:
self.specifier = specifier
self._candidate = match
def __str__(self) -> str:
return f"Python {self.specifier}"
def __repr__(self) -> str:
return "{class_name}({specifier!r})".format(
class_name=self.__class__.__name__,
specifier=str(self.specifier),
)
@property
def project_name(self) -> NormalizedName:
return self._candidate.project_name
@property
def name(self) -> str:
return self._candidate.name
def format_for_error(self) -> str:
return str(self)
def get_candidate_lookup(self) -> CandidateLookup:
if self.specifier.contains(self._candidate.version, prereleases=True):
return self._candidate, None
return None, None
def is_satisfied_by(self, candidate: Candidate) -> bool:
assert candidate.name == self._candidate.name, "Not Python candidate"
# We can safely always allow prereleases here since PackageFinder
# already implements the prerelease logic, and would have filtered out
# prerelease candidates if the user does not expect them.
return self.specifier.contains(candidate.version, prereleases=True)
class UnsatisfiableRequirement(Requirement):
"""A requirement that cannot be satisfied."""
def __init__(self, name: NormalizedName) -> None:
self._name = name
def __str__(self) -> str:
return f"{self._name} (unavailable)"
def __repr__(self) -> str:
return "{class_name}({name!r})".format(
class_name=self.__class__.__name__,
name=str(self._name),
)
@property
def project_name(self) -> NormalizedName:
return self._name
@property
def name(self) -> str:
return self._name
def format_for_error(self) -> str:
return str(self)
def get_candidate_lookup(self) -> CandidateLookup:
return None, None
def is_satisfied_by(self, candidate: Candidate) -> bool:
return False

View file

@ -0,0 +1,299 @@
import functools
import logging
import os
from typing import TYPE_CHECKING, Dict, List, Optional, Set, Tuple, cast
from pip._vendor.packaging.utils import canonicalize_name
from pip._vendor.resolvelib import BaseReporter, ResolutionImpossible
from pip._vendor.resolvelib import Resolver as RLResolver
from pip._vendor.resolvelib.structs import DirectedGraph
from pip._internal.cache import WheelCache
from pip._internal.index.package_finder import PackageFinder
from pip._internal.operations.prepare import RequirementPreparer
from pip._internal.req.req_install import InstallRequirement
from pip._internal.req.req_set import RequirementSet
from pip._internal.resolution.base import BaseResolver, InstallRequirementProvider
from pip._internal.resolution.resolvelib.provider import PipProvider
from pip._internal.resolution.resolvelib.reporter import (
PipDebuggingReporter,
PipReporter,
)
from .base import Candidate, Requirement
from .factory import Factory
if TYPE_CHECKING:
from pip._vendor.resolvelib.resolvers import Result as RLResult
Result = RLResult[Requirement, Candidate, str]
logger = logging.getLogger(__name__)
class Resolver(BaseResolver):
_allowed_strategies = {"eager", "only-if-needed", "to-satisfy-only"}
def __init__(
self,
preparer: RequirementPreparer,
finder: PackageFinder,
wheel_cache: Optional[WheelCache],
make_install_req: InstallRequirementProvider,
use_user_site: bool,
ignore_dependencies: bool,
ignore_installed: bool,
ignore_requires_python: bool,
force_reinstall: bool,
upgrade_strategy: str,
py_version_info: Optional[Tuple[int, ...]] = None,
):
super().__init__()
assert upgrade_strategy in self._allowed_strategies
self.factory = Factory(
finder=finder,
preparer=preparer,
make_install_req=make_install_req,
wheel_cache=wheel_cache,
use_user_site=use_user_site,
force_reinstall=force_reinstall,
ignore_installed=ignore_installed,
ignore_requires_python=ignore_requires_python,
py_version_info=py_version_info,
)
self.ignore_dependencies = ignore_dependencies
self.upgrade_strategy = upgrade_strategy
self._result: Optional[Result] = None
def resolve(
self, root_reqs: List[InstallRequirement], check_supported_wheels: bool
) -> RequirementSet:
collected = self.factory.collect_root_requirements(root_reqs)
provider = PipProvider(
factory=self.factory,
constraints=collected.constraints,
ignore_dependencies=self.ignore_dependencies,
upgrade_strategy=self.upgrade_strategy,
user_requested=collected.user_requested,
)
if "PIP_RESOLVER_DEBUG" in os.environ:
reporter: BaseReporter = PipDebuggingReporter()
else:
reporter = PipReporter()
resolver: RLResolver[Requirement, Candidate, str] = RLResolver(
provider,
reporter,
)
try:
limit_how_complex_resolution_can_be = 200000
result = self._result = resolver.resolve(
collected.requirements, max_rounds=limit_how_complex_resolution_can_be
)
except ResolutionImpossible as e:
error = self.factory.get_installation_error(
cast("ResolutionImpossible[Requirement, Candidate]", e),
collected.constraints,
)
raise error from e
req_set = RequirementSet(check_supported_wheels=check_supported_wheels)
for candidate in result.mapping.values():
ireq = candidate.get_install_requirement()
if ireq is None:
continue
# Check if there is already an installation under the same name,
# and set a flag for later stages to uninstall it, if needed.
installed_dist = self.factory.get_dist_to_uninstall(candidate)
if installed_dist is None:
# There is no existing installation -- nothing to uninstall.
ireq.should_reinstall = False
elif self.factory.force_reinstall:
# The --force-reinstall flag is set -- reinstall.
ireq.should_reinstall = True
elif installed_dist.version != candidate.version:
# The installation is different in version -- reinstall.
ireq.should_reinstall = True
elif candidate.is_editable or installed_dist.editable:
# The incoming distribution is editable, or different in
# editable-ness to installation -- reinstall.
ireq.should_reinstall = True
elif candidate.source_link and candidate.source_link.is_file:
# The incoming distribution is under file://
if candidate.source_link.is_wheel:
# is a local wheel -- do nothing.
logger.info(
"%s is already installed with the same version as the "
"provided wheel. Use --force-reinstall to force an "
"installation of the wheel.",
ireq.name,
)
continue
# is a local sdist or path -- reinstall
ireq.should_reinstall = True
else:
continue
link = candidate.source_link
if link and link.is_yanked:
# The reason can contain non-ASCII characters, Unicode
# is required for Python 2.
msg = (
"The candidate selected for download or install is a "
"yanked version: {name!r} candidate (version {version} "
"at {link})\nReason for being yanked: {reason}"
).format(
name=candidate.name,
version=candidate.version,
link=link,
reason=link.yanked_reason or "<none given>",
)
logger.warning(msg)
req_set.add_named_requirement(ireq)
reqs = req_set.all_requirements
self.factory.preparer.prepare_linked_requirements_more(reqs)
for req in reqs:
req.prepared = True
req.needs_more_preparation = False
return req_set
def get_installation_order(
self, req_set: RequirementSet
) -> List[InstallRequirement]:
"""Get order for installation of requirements in RequirementSet.
The returned list contains a requirement before another that depends on
it. This helps ensure that the environment is kept consistent as they
get installed one-by-one.
The current implementation creates a topological ordering of the
dependency graph, giving more weight to packages with less
or no dependencies, while breaking any cycles in the graph at
arbitrary points. We make no guarantees about where the cycle
would be broken, other than it *would* be broken.
"""
assert self._result is not None, "must call resolve() first"
if not req_set.requirements:
# Nothing is left to install, so we do not need an order.
return []
graph = self._result.graph
weights = get_topological_weights(graph, set(req_set.requirements.keys()))
sorted_items = sorted(
req_set.requirements.items(),
key=functools.partial(_req_set_item_sorter, weights=weights),
reverse=True,
)
return [ireq for _, ireq in sorted_items]
def get_topological_weights(
graph: "DirectedGraph[Optional[str]]", requirement_keys: Set[str]
) -> Dict[Optional[str], int]:
"""Assign weights to each node based on how "deep" they are.
This implementation may change at any point in the future without prior
notice.
We first simplify the dependency graph by pruning any leaves and giving them
the highest weight: a package without any dependencies should be installed
first. This is done again and again in the same way, giving ever less weight
to the newly found leaves. The loop stops when no leaves are left: all
remaining packages have at least one dependency left in the graph.
Then we continue with the remaining graph, by taking the length for the
longest path to any node from root, ignoring any paths that contain a single
node twice (i.e. cycles). This is done through a depth-first search through
the graph, while keeping track of the path to the node.
Cycles in the graph result would result in node being revisited while also
being on its own path. In this case, take no action. This helps ensure we
don't get stuck in a cycle.
When assigning weight, the longer path (i.e. larger length) is preferred.
We are only interested in the weights of packages that are in the
requirement_keys.
"""
path: Set[Optional[str]] = set()
weights: Dict[Optional[str], int] = {}
def visit(node: Optional[str]) -> None:
if node in path:
# We hit a cycle, so we'll break it here.
return
# Time to visit the children!
path.add(node)
for child in graph.iter_children(node):
visit(child)
path.remove(node)
if node not in requirement_keys:
return
last_known_parent_count = weights.get(node, 0)
weights[node] = max(last_known_parent_count, len(path))
# Simplify the graph, pruning leaves that have no dependencies.
# This is needed for large graphs (say over 200 packages) because the
# `visit` function is exponentially slower then, taking minutes.
# See https://github.com/pypa/pip/issues/10557
# We will loop until we explicitly break the loop.
while True:
leaves = set()
for key in graph:
if key is None:
continue
for _child in graph.iter_children(key):
# This means we have at least one child
break
else:
# No child.
leaves.add(key)
if not leaves:
# We are done simplifying.
break
# Calculate the weight for the leaves.
weight = len(graph) - 1
for leaf in leaves:
if leaf not in requirement_keys:
continue
weights[leaf] = weight
# Remove the leaves from the graph, making it simpler.
for leaf in leaves:
graph.remove(leaf)
# Visit the remaining graph.
# `None` is guaranteed to be the root node by resolvelib.
visit(None)
# Sanity check: all requirement keys should be in the weights,
# and no other keys should be in the weights.
difference = set(weights.keys()).difference(requirement_keys)
assert not difference, difference
return weights
def _req_set_item_sorter(
item: Tuple[str, InstallRequirement],
weights: Dict[Optional[str], int],
) -> Tuple[int, str]:
"""Key function used to sort install requirements for installation.
Based on the "weight" mapping calculated in ``get_installation_order()``.
The canonical package name is returned as the second member as a tie-
breaker to ensure the result is predictable, which is useful in tests.
"""
name = canonicalize_name(item[0])
return weights[name], name