tests versuch 2
This commit is contained in:
parent
fdf385fe06
commit
c88f7df83a
2363 changed files with 408191 additions and 0 deletions
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -0,0 +1,109 @@
|
|||
"""Functions brought over from jaraco.text.
|
||||
|
||||
These functions are not supposed to be used within `pip._internal`. These are
|
||||
helper functions brought over from `jaraco.text` to enable vendoring newer
|
||||
copies of `pkg_resources` without having to vendor `jaraco.text` and its entire
|
||||
dependency cone; something that our vendoring setup is not currently capable of
|
||||
handling.
|
||||
|
||||
License reproduced from original source below:
|
||||
|
||||
Copyright Jason R. Coombs
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to
|
||||
deal in the Software without restriction, including without limitation the
|
||||
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
||||
sell copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||
IN THE SOFTWARE.
|
||||
"""
|
||||
|
||||
import functools
|
||||
import itertools
|
||||
|
||||
|
||||
def _nonblank(str):
|
||||
return str and not str.startswith("#")
|
||||
|
||||
|
||||
@functools.singledispatch
|
||||
def yield_lines(iterable):
|
||||
r"""
|
||||
Yield valid lines of a string or iterable.
|
||||
|
||||
>>> list(yield_lines(''))
|
||||
[]
|
||||
>>> list(yield_lines(['foo', 'bar']))
|
||||
['foo', 'bar']
|
||||
>>> list(yield_lines('foo\nbar'))
|
||||
['foo', 'bar']
|
||||
>>> list(yield_lines('\nfoo\n#bar\nbaz #comment'))
|
||||
['foo', 'baz #comment']
|
||||
>>> list(yield_lines(['foo\nbar', 'baz', 'bing\n\n\n']))
|
||||
['foo', 'bar', 'baz', 'bing']
|
||||
"""
|
||||
return itertools.chain.from_iterable(map(yield_lines, iterable))
|
||||
|
||||
|
||||
@yield_lines.register(str)
|
||||
def _(text):
|
||||
return filter(_nonblank, map(str.strip, text.splitlines()))
|
||||
|
||||
|
||||
def drop_comment(line):
|
||||
"""
|
||||
Drop comments.
|
||||
|
||||
>>> drop_comment('foo # bar')
|
||||
'foo'
|
||||
|
||||
A hash without a space may be in a URL.
|
||||
|
||||
>>> drop_comment('http://example.com/foo#bar')
|
||||
'http://example.com/foo#bar'
|
||||
"""
|
||||
return line.partition(" #")[0]
|
||||
|
||||
|
||||
def join_continuation(lines):
|
||||
r"""
|
||||
Join lines continued by a trailing backslash.
|
||||
|
||||
>>> list(join_continuation(['foo \\', 'bar', 'baz']))
|
||||
['foobar', 'baz']
|
||||
>>> list(join_continuation(['foo \\', 'bar', 'baz']))
|
||||
['foobar', 'baz']
|
||||
>>> list(join_continuation(['foo \\', 'bar \\', 'baz']))
|
||||
['foobarbaz']
|
||||
|
||||
Not sure why, but...
|
||||
The character preceeding the backslash is also elided.
|
||||
|
||||
>>> list(join_continuation(['goo\\', 'dly']))
|
||||
['godly']
|
||||
|
||||
A terrible idea, but...
|
||||
If no line is available to continue, suppress the lines.
|
||||
|
||||
>>> list(join_continuation(['foo', 'bar\\', 'baz\\']))
|
||||
['foo']
|
||||
"""
|
||||
lines = iter(lines)
|
||||
for item in lines:
|
||||
while item.endswith("\\"):
|
||||
try:
|
||||
item = item[:-2].strip() + next(lines)
|
||||
except StopIteration:
|
||||
return
|
||||
yield item
|
|
@ -0,0 +1,38 @@
|
|||
"""Customize logging
|
||||
|
||||
Defines custom logger class for the `logger.verbose(...)` method.
|
||||
|
||||
init_logging() must be called before any other modules that call logging.getLogger.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import Any, cast
|
||||
|
||||
# custom log level for `--verbose` output
|
||||
# between DEBUG and INFO
|
||||
VERBOSE = 15
|
||||
|
||||
|
||||
class VerboseLogger(logging.Logger):
|
||||
"""Custom Logger, defining a verbose log-level
|
||||
|
||||
VERBOSE is between INFO and DEBUG.
|
||||
"""
|
||||
|
||||
def verbose(self, msg: str, *args: Any, **kwargs: Any) -> None:
|
||||
return self.log(VERBOSE, msg, *args, **kwargs)
|
||||
|
||||
|
||||
def getLogger(name: str) -> VerboseLogger:
|
||||
"""logging.getLogger, but ensures our VerboseLogger class is returned"""
|
||||
return cast(VerboseLogger, logging.getLogger(name))
|
||||
|
||||
|
||||
def init_logging() -> None:
|
||||
"""Register our VerboseLogger and VERBOSE log level.
|
||||
|
||||
Should be called before any calls to getLogger(),
|
||||
i.e. in pip._internal.__init__
|
||||
"""
|
||||
logging.setLoggerClass(VerboseLogger)
|
||||
logging.addLevelName(VERBOSE, "VERBOSE")
|
|
@ -0,0 +1,52 @@
|
|||
"""
|
||||
This code wraps the vendored appdirs module to so the return values are
|
||||
compatible for the current pip code base.
|
||||
|
||||
The intention is to rewrite current usages gradually, keeping the tests pass,
|
||||
and eventually drop this after all usages are changed.
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
from typing import List
|
||||
|
||||
from pip._vendor import platformdirs as _appdirs
|
||||
|
||||
|
||||
def user_cache_dir(appname: str) -> str:
|
||||
return _appdirs.user_cache_dir(appname, appauthor=False)
|
||||
|
||||
|
||||
def _macos_user_config_dir(appname: str, roaming: bool = True) -> str:
|
||||
# Use ~/Application Support/pip, if the directory exists.
|
||||
path = _appdirs.user_data_dir(appname, appauthor=False, roaming=roaming)
|
||||
if os.path.isdir(path):
|
||||
return path
|
||||
|
||||
# Use a Linux-like ~/.config/pip, by default.
|
||||
linux_like_path = "~/.config/"
|
||||
if appname:
|
||||
linux_like_path = os.path.join(linux_like_path, appname)
|
||||
|
||||
return os.path.expanduser(linux_like_path)
|
||||
|
||||
|
||||
def user_config_dir(appname: str, roaming: bool = True) -> str:
|
||||
if sys.platform == "darwin":
|
||||
return _macos_user_config_dir(appname, roaming)
|
||||
|
||||
return _appdirs.user_config_dir(appname, appauthor=False, roaming=roaming)
|
||||
|
||||
|
||||
# for the discussion regarding site_config_dir locations
|
||||
# see <https://github.com/pypa/pip/issues/1733>
|
||||
def site_config_dirs(appname: str) -> List[str]:
|
||||
if sys.platform == "darwin":
|
||||
return [_appdirs.site_data_dir(appname, appauthor=False, multipath=True)]
|
||||
|
||||
dirval = _appdirs.site_config_dir(appname, appauthor=False, multipath=True)
|
||||
if sys.platform == "win32":
|
||||
return [dirval]
|
||||
|
||||
# Unix-y system. Look in /etc as well.
|
||||
return dirval.split(os.pathsep) + ["/etc"]
|
|
@ -0,0 +1,63 @@
|
|||
"""Stuff that differs in different Python versions and platform
|
||||
distributions."""
|
||||
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
|
||||
__all__ = ["get_path_uid", "stdlib_pkgs", "WINDOWS"]
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def has_tls() -> bool:
|
||||
try:
|
||||
import _ssl # noqa: F401 # ignore unused
|
||||
|
||||
return True
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
from pip._vendor.urllib3.util import IS_PYOPENSSL
|
||||
|
||||
return IS_PYOPENSSL
|
||||
|
||||
|
||||
def get_path_uid(path: str) -> int:
|
||||
"""
|
||||
Return path's uid.
|
||||
|
||||
Does not follow symlinks:
|
||||
https://github.com/pypa/pip/pull/935#discussion_r5307003
|
||||
|
||||
Placed this function in compat due to differences on AIX and
|
||||
Jython, that should eventually go away.
|
||||
|
||||
:raises OSError: When path is a symlink or can't be read.
|
||||
"""
|
||||
if hasattr(os, "O_NOFOLLOW"):
|
||||
fd = os.open(path, os.O_RDONLY | os.O_NOFOLLOW)
|
||||
file_uid = os.fstat(fd).st_uid
|
||||
os.close(fd)
|
||||
else: # AIX and Jython
|
||||
# WARNING: time of check vulnerability, but best we can do w/o NOFOLLOW
|
||||
if not os.path.islink(path):
|
||||
# older versions of Jython don't have `os.fstat`
|
||||
file_uid = os.stat(path).st_uid
|
||||
else:
|
||||
# raise OSError for parity with os.O_NOFOLLOW above
|
||||
raise OSError(f"{path} is a symlink; Will not return uid for symlinks")
|
||||
return file_uid
|
||||
|
||||
|
||||
# packages in the stdlib that may have installation metadata, but should not be
|
||||
# considered 'installed'. this theoretically could be determined based on
|
||||
# dist.location (py27:`sysconfig.get_paths()['stdlib']`,
|
||||
# py26:sysconfig.get_config_vars('LIBDEST')), but fear platform variation may
|
||||
# make this ineffective, so hard-coding
|
||||
stdlib_pkgs = {"python", "wsgiref", "argparse"}
|
||||
|
||||
|
||||
# windows detection, covers cpython and ironpython
|
||||
WINDOWS = sys.platform.startswith("win") or (sys.platform == "cli" and os.name == "nt")
|
|
@ -0,0 +1,165 @@
|
|||
"""Generate and work with PEP 425 Compatibility Tags.
|
||||
"""
|
||||
|
||||
import re
|
||||
from typing import List, Optional, Tuple
|
||||
|
||||
from pip._vendor.packaging.tags import (
|
||||
PythonVersion,
|
||||
Tag,
|
||||
compatible_tags,
|
||||
cpython_tags,
|
||||
generic_tags,
|
||||
interpreter_name,
|
||||
interpreter_version,
|
||||
mac_platforms,
|
||||
)
|
||||
|
||||
_osx_arch_pat = re.compile(r"(.+)_(\d+)_(\d+)_(.+)")
|
||||
|
||||
|
||||
def version_info_to_nodot(version_info: Tuple[int, ...]) -> str:
|
||||
# Only use up to the first two numbers.
|
||||
return "".join(map(str, version_info[:2]))
|
||||
|
||||
|
||||
def _mac_platforms(arch: str) -> List[str]:
|
||||
match = _osx_arch_pat.match(arch)
|
||||
if match:
|
||||
name, major, minor, actual_arch = match.groups()
|
||||
mac_version = (int(major), int(minor))
|
||||
arches = [
|
||||
# Since we have always only checked that the platform starts
|
||||
# with "macosx", for backwards-compatibility we extract the
|
||||
# actual prefix provided by the user in case they provided
|
||||
# something like "macosxcustom_". It may be good to remove
|
||||
# this as undocumented or deprecate it in the future.
|
||||
"{}_{}".format(name, arch[len("macosx_") :])
|
||||
for arch in mac_platforms(mac_version, actual_arch)
|
||||
]
|
||||
else:
|
||||
# arch pattern didn't match (?!)
|
||||
arches = [arch]
|
||||
return arches
|
||||
|
||||
|
||||
def _custom_manylinux_platforms(arch: str) -> List[str]:
|
||||
arches = [arch]
|
||||
arch_prefix, arch_sep, arch_suffix = arch.partition("_")
|
||||
if arch_prefix == "manylinux2014":
|
||||
# manylinux1/manylinux2010 wheels run on most manylinux2014 systems
|
||||
# with the exception of wheels depending on ncurses. PEP 599 states
|
||||
# manylinux1/manylinux2010 wheels should be considered
|
||||
# manylinux2014 wheels:
|
||||
# https://www.python.org/dev/peps/pep-0599/#backwards-compatibility-with-manylinux2010-wheels
|
||||
if arch_suffix in {"i686", "x86_64"}:
|
||||
arches.append("manylinux2010" + arch_sep + arch_suffix)
|
||||
arches.append("manylinux1" + arch_sep + arch_suffix)
|
||||
elif arch_prefix == "manylinux2010":
|
||||
# manylinux1 wheels run on most manylinux2010 systems with the
|
||||
# exception of wheels depending on ncurses. PEP 571 states
|
||||
# manylinux1 wheels should be considered manylinux2010 wheels:
|
||||
# https://www.python.org/dev/peps/pep-0571/#backwards-compatibility-with-manylinux1-wheels
|
||||
arches.append("manylinux1" + arch_sep + arch_suffix)
|
||||
return arches
|
||||
|
||||
|
||||
def _get_custom_platforms(arch: str) -> List[str]:
|
||||
arch_prefix, arch_sep, arch_suffix = arch.partition("_")
|
||||
if arch.startswith("macosx"):
|
||||
arches = _mac_platforms(arch)
|
||||
elif arch_prefix in ["manylinux2014", "manylinux2010"]:
|
||||
arches = _custom_manylinux_platforms(arch)
|
||||
else:
|
||||
arches = [arch]
|
||||
return arches
|
||||
|
||||
|
||||
def _expand_allowed_platforms(platforms: Optional[List[str]]) -> Optional[List[str]]:
|
||||
if not platforms:
|
||||
return None
|
||||
|
||||
seen = set()
|
||||
result = []
|
||||
|
||||
for p in platforms:
|
||||
if p in seen:
|
||||
continue
|
||||
additions = [c for c in _get_custom_platforms(p) if c not in seen]
|
||||
seen.update(additions)
|
||||
result.extend(additions)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def _get_python_version(version: str) -> PythonVersion:
|
||||
if len(version) > 1:
|
||||
return int(version[0]), int(version[1:])
|
||||
else:
|
||||
return (int(version[0]),)
|
||||
|
||||
|
||||
def _get_custom_interpreter(
|
||||
implementation: Optional[str] = None, version: Optional[str] = None
|
||||
) -> str:
|
||||
if implementation is None:
|
||||
implementation = interpreter_name()
|
||||
if version is None:
|
||||
version = interpreter_version()
|
||||
return f"{implementation}{version}"
|
||||
|
||||
|
||||
def get_supported(
|
||||
version: Optional[str] = None,
|
||||
platforms: Optional[List[str]] = None,
|
||||
impl: Optional[str] = None,
|
||||
abis: Optional[List[str]] = None,
|
||||
) -> List[Tag]:
|
||||
"""Return a list of supported tags for each version specified in
|
||||
`versions`.
|
||||
|
||||
:param version: a string version, of the form "33" or "32",
|
||||
or None. The version will be assumed to support our ABI.
|
||||
:param platform: specify a list of platforms you want valid
|
||||
tags for, or None. If None, use the local system platform.
|
||||
:param impl: specify the exact implementation you want valid
|
||||
tags for, or None. If None, use the local interpreter impl.
|
||||
:param abis: specify a list of abis you want valid
|
||||
tags for, or None. If None, use the local interpreter abi.
|
||||
"""
|
||||
supported: List[Tag] = []
|
||||
|
||||
python_version: Optional[PythonVersion] = None
|
||||
if version is not None:
|
||||
python_version = _get_python_version(version)
|
||||
|
||||
interpreter = _get_custom_interpreter(impl, version)
|
||||
|
||||
platforms = _expand_allowed_platforms(platforms)
|
||||
|
||||
is_cpython = (impl or interpreter_name()) == "cp"
|
||||
if is_cpython:
|
||||
supported.extend(
|
||||
cpython_tags(
|
||||
python_version=python_version,
|
||||
abis=abis,
|
||||
platforms=platforms,
|
||||
)
|
||||
)
|
||||
else:
|
||||
supported.extend(
|
||||
generic_tags(
|
||||
interpreter=interpreter,
|
||||
abis=abis,
|
||||
platforms=platforms,
|
||||
)
|
||||
)
|
||||
supported.extend(
|
||||
compatible_tags(
|
||||
python_version=python_version,
|
||||
interpreter=interpreter,
|
||||
platforms=platforms,
|
||||
)
|
||||
)
|
||||
|
||||
return supported
|
|
@ -0,0 +1,11 @@
|
|||
"""For when pip wants to check the date or time.
|
||||
"""
|
||||
|
||||
import datetime
|
||||
|
||||
|
||||
def today_is_later_than(year: int, month: int, day: int) -> bool:
|
||||
today = datetime.date.today()
|
||||
given = datetime.date(year, month, day)
|
||||
|
||||
return today > given
|
|
@ -0,0 +1,120 @@
|
|||
"""
|
||||
A module that implements tooling to enable easy warnings about deprecations.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import warnings
|
||||
from typing import Any, Optional, TextIO, Type, Union
|
||||
|
||||
from pip._vendor.packaging.version import parse
|
||||
|
||||
from pip import __version__ as current_version # NOTE: tests patch this name.
|
||||
|
||||
DEPRECATION_MSG_PREFIX = "DEPRECATION: "
|
||||
|
||||
|
||||
class PipDeprecationWarning(Warning):
|
||||
pass
|
||||
|
||||
|
||||
_original_showwarning: Any = None
|
||||
|
||||
|
||||
# Warnings <-> Logging Integration
|
||||
def _showwarning(
|
||||
message: Union[Warning, str],
|
||||
category: Type[Warning],
|
||||
filename: str,
|
||||
lineno: int,
|
||||
file: Optional[TextIO] = None,
|
||||
line: Optional[str] = None,
|
||||
) -> None:
|
||||
if file is not None:
|
||||
if _original_showwarning is not None:
|
||||
_original_showwarning(message, category, filename, lineno, file, line)
|
||||
elif issubclass(category, PipDeprecationWarning):
|
||||
# We use a specially named logger which will handle all of the
|
||||
# deprecation messages for pip.
|
||||
logger = logging.getLogger("pip._internal.deprecations")
|
||||
logger.warning(message)
|
||||
else:
|
||||
_original_showwarning(message, category, filename, lineno, file, line)
|
||||
|
||||
|
||||
def install_warning_logger() -> None:
|
||||
# Enable our Deprecation Warnings
|
||||
warnings.simplefilter("default", PipDeprecationWarning, append=True)
|
||||
|
||||
global _original_showwarning
|
||||
|
||||
if _original_showwarning is None:
|
||||
_original_showwarning = warnings.showwarning
|
||||
warnings.showwarning = _showwarning
|
||||
|
||||
|
||||
def deprecated(
|
||||
*,
|
||||
reason: str,
|
||||
replacement: Optional[str],
|
||||
gone_in: Optional[str],
|
||||
feature_flag: Optional[str] = None,
|
||||
issue: Optional[int] = None,
|
||||
) -> None:
|
||||
"""Helper to deprecate existing functionality.
|
||||
|
||||
reason:
|
||||
Textual reason shown to the user about why this functionality has
|
||||
been deprecated. Should be a complete sentence.
|
||||
replacement:
|
||||
Textual suggestion shown to the user about what alternative
|
||||
functionality they can use.
|
||||
gone_in:
|
||||
The version of pip does this functionality should get removed in.
|
||||
Raises an error if pip's current version is greater than or equal to
|
||||
this.
|
||||
feature_flag:
|
||||
Command-line flag of the form --use-feature={feature_flag} for testing
|
||||
upcoming functionality.
|
||||
issue:
|
||||
Issue number on the tracker that would serve as a useful place for
|
||||
users to find related discussion and provide feedback.
|
||||
"""
|
||||
|
||||
# Determine whether or not the feature is already gone in this version.
|
||||
is_gone = gone_in is not None and parse(current_version) >= parse(gone_in)
|
||||
|
||||
message_parts = [
|
||||
(reason, f"{DEPRECATION_MSG_PREFIX}{{}}"),
|
||||
(
|
||||
gone_in,
|
||||
"pip {} will enforce this behaviour change."
|
||||
if not is_gone
|
||||
else "Since pip {}, this is no longer supported.",
|
||||
),
|
||||
(
|
||||
replacement,
|
||||
"A possible replacement is {}.",
|
||||
),
|
||||
(
|
||||
feature_flag,
|
||||
"You can use the flag --use-feature={} to test the upcoming behaviour."
|
||||
if not is_gone
|
||||
else None,
|
||||
),
|
||||
(
|
||||
issue,
|
||||
"Discussion can be found at https://github.com/pypa/pip/issues/{}",
|
||||
),
|
||||
]
|
||||
|
||||
message = " ".join(
|
||||
format_str.format(value)
|
||||
for value, format_str in message_parts
|
||||
if format_str is not None and value is not None
|
||||
)
|
||||
|
||||
# Raise as an error if this behaviour is deprecated.
|
||||
if is_gone:
|
||||
raise PipDeprecationWarning(message)
|
||||
|
||||
warnings.warn(message, category=PipDeprecationWarning, stacklevel=2)
|
|
@ -0,0 +1,87 @@
|
|||
from typing import Optional
|
||||
|
||||
from pip._internal.models.direct_url import ArchiveInfo, DirectUrl, DirInfo, VcsInfo
|
||||
from pip._internal.models.link import Link
|
||||
from pip._internal.utils.urls import path_to_url
|
||||
from pip._internal.vcs import vcs
|
||||
|
||||
|
||||
def direct_url_as_pep440_direct_reference(direct_url: DirectUrl, name: str) -> str:
|
||||
"""Convert a DirectUrl to a pip requirement string."""
|
||||
direct_url.validate() # if invalid, this is a pip bug
|
||||
requirement = name + " @ "
|
||||
fragments = []
|
||||
if isinstance(direct_url.info, VcsInfo):
|
||||
requirement += "{}+{}@{}".format(
|
||||
direct_url.info.vcs, direct_url.url, direct_url.info.commit_id
|
||||
)
|
||||
elif isinstance(direct_url.info, ArchiveInfo):
|
||||
requirement += direct_url.url
|
||||
if direct_url.info.hash:
|
||||
fragments.append(direct_url.info.hash)
|
||||
else:
|
||||
assert isinstance(direct_url.info, DirInfo)
|
||||
requirement += direct_url.url
|
||||
if direct_url.subdirectory:
|
||||
fragments.append("subdirectory=" + direct_url.subdirectory)
|
||||
if fragments:
|
||||
requirement += "#" + "&".join(fragments)
|
||||
return requirement
|
||||
|
||||
|
||||
def direct_url_for_editable(source_dir: str) -> DirectUrl:
|
||||
return DirectUrl(
|
||||
url=path_to_url(source_dir),
|
||||
info=DirInfo(editable=True),
|
||||
)
|
||||
|
||||
|
||||
def direct_url_from_link(
|
||||
link: Link, source_dir: Optional[str] = None, link_is_in_wheel_cache: bool = False
|
||||
) -> DirectUrl:
|
||||
if link.is_vcs:
|
||||
vcs_backend = vcs.get_backend_for_scheme(link.scheme)
|
||||
assert vcs_backend
|
||||
url, requested_revision, _ = vcs_backend.get_url_rev_and_auth(
|
||||
link.url_without_fragment
|
||||
)
|
||||
# For VCS links, we need to find out and add commit_id.
|
||||
if link_is_in_wheel_cache:
|
||||
# If the requested VCS link corresponds to a cached
|
||||
# wheel, it means the requested revision was an
|
||||
# immutable commit hash, otherwise it would not have
|
||||
# been cached. In that case we don't have a source_dir
|
||||
# with the VCS checkout.
|
||||
assert requested_revision
|
||||
commit_id = requested_revision
|
||||
else:
|
||||
# If the wheel was not in cache, it means we have
|
||||
# had to checkout from VCS to build and we have a source_dir
|
||||
# which we can inspect to find out the commit id.
|
||||
assert source_dir
|
||||
commit_id = vcs_backend.get_revision(source_dir)
|
||||
return DirectUrl(
|
||||
url=url,
|
||||
info=VcsInfo(
|
||||
vcs=vcs_backend.name,
|
||||
commit_id=commit_id,
|
||||
requested_revision=requested_revision,
|
||||
),
|
||||
subdirectory=link.subdirectory_fragment,
|
||||
)
|
||||
elif link.is_existing_dir():
|
||||
return DirectUrl(
|
||||
url=link.url_without_fragment,
|
||||
info=DirInfo(),
|
||||
subdirectory=link.subdirectory_fragment,
|
||||
)
|
||||
else:
|
||||
hash = None
|
||||
hash_name = link.hash_name
|
||||
if hash_name:
|
||||
hash = f"{hash_name}={link.hash}"
|
||||
return DirectUrl(
|
||||
url=link.url_without_fragment,
|
||||
info=ArchiveInfo(hash=hash),
|
||||
subdirectory=link.subdirectory_fragment,
|
||||
)
|
|
@ -0,0 +1,72 @@
|
|||
import os
|
||||
import re
|
||||
import sys
|
||||
from typing import List, Optional
|
||||
|
||||
from pip._internal.locations import site_packages, user_site
|
||||
from pip._internal.utils.virtualenv import (
|
||||
running_under_virtualenv,
|
||||
virtualenv_no_global,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"egg_link_path_from_sys_path",
|
||||
"egg_link_path_from_location",
|
||||
]
|
||||
|
||||
|
||||
def _egg_link_name(raw_name: str) -> str:
|
||||
"""
|
||||
Convert a Name metadata value to a .egg-link name, by applying
|
||||
the same substitution as pkg_resources's safe_name function.
|
||||
Note: we cannot use canonicalize_name because it has a different logic.
|
||||
"""
|
||||
return re.sub("[^A-Za-z0-9.]+", "-", raw_name) + ".egg-link"
|
||||
|
||||
|
||||
def egg_link_path_from_sys_path(raw_name: str) -> Optional[str]:
|
||||
"""
|
||||
Look for a .egg-link file for project name, by walking sys.path.
|
||||
"""
|
||||
egg_link_name = _egg_link_name(raw_name)
|
||||
for path_item in sys.path:
|
||||
egg_link = os.path.join(path_item, egg_link_name)
|
||||
if os.path.isfile(egg_link):
|
||||
return egg_link
|
||||
return None
|
||||
|
||||
|
||||
def egg_link_path_from_location(raw_name: str) -> Optional[str]:
|
||||
"""
|
||||
Return the path for the .egg-link file if it exists, otherwise, None.
|
||||
|
||||
There's 3 scenarios:
|
||||
1) not in a virtualenv
|
||||
try to find in site.USER_SITE, then site_packages
|
||||
2) in a no-global virtualenv
|
||||
try to find in site_packages
|
||||
3) in a yes-global virtualenv
|
||||
try to find in site_packages, then site.USER_SITE
|
||||
(don't look in global location)
|
||||
|
||||
For #1 and #3, there could be odd cases, where there's an egg-link in 2
|
||||
locations.
|
||||
|
||||
This method will just return the first one found.
|
||||
"""
|
||||
sites: List[str] = []
|
||||
if running_under_virtualenv():
|
||||
sites.append(site_packages)
|
||||
if not virtualenv_no_global() and user_site:
|
||||
sites.append(user_site)
|
||||
else:
|
||||
if user_site:
|
||||
sites.append(user_site)
|
||||
sites.append(site_packages)
|
||||
|
||||
egg_link_name = _egg_link_name(raw_name)
|
||||
for site in sites:
|
||||
egglink = os.path.join(site, egg_link_name)
|
||||
if os.path.isfile(egglink):
|
||||
return egglink
|
||||
return None
|
|
@ -0,0 +1,36 @@
|
|||
import codecs
|
||||
import locale
|
||||
import re
|
||||
import sys
|
||||
from typing import List, Tuple
|
||||
|
||||
BOMS: List[Tuple[bytes, str]] = [
|
||||
(codecs.BOM_UTF8, "utf-8"),
|
||||
(codecs.BOM_UTF16, "utf-16"),
|
||||
(codecs.BOM_UTF16_BE, "utf-16-be"),
|
||||
(codecs.BOM_UTF16_LE, "utf-16-le"),
|
||||
(codecs.BOM_UTF32, "utf-32"),
|
||||
(codecs.BOM_UTF32_BE, "utf-32-be"),
|
||||
(codecs.BOM_UTF32_LE, "utf-32-le"),
|
||||
]
|
||||
|
||||
ENCODING_RE = re.compile(rb"coding[:=]\s*([-\w.]+)")
|
||||
|
||||
|
||||
def auto_decode(data: bytes) -> str:
|
||||
"""Check a bytes string for a BOM to correctly detect the encoding
|
||||
|
||||
Fallback to locale.getpreferredencoding(False) like open() on Python3"""
|
||||
for bom, encoding in BOMS:
|
||||
if data.startswith(bom):
|
||||
return data[len(bom) :].decode(encoding)
|
||||
# Lets check the first two lines as in PEP263
|
||||
for line in data.split(b"\n")[:2]:
|
||||
if line[0:1] == b"#" and ENCODING_RE.search(line):
|
||||
result = ENCODING_RE.search(line)
|
||||
assert result is not None
|
||||
encoding = result.groups()[0].decode("ascii")
|
||||
return data.decode(encoding)
|
||||
return data.decode(
|
||||
locale.getpreferredencoding(False) or sys.getdefaultencoding(),
|
||||
)
|
|
@ -0,0 +1,84 @@
|
|||
import itertools
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
from typing import List, Optional
|
||||
|
||||
from pip._internal.cli.main import main
|
||||
from pip._internal.utils.compat import WINDOWS
|
||||
|
||||
_EXECUTABLE_NAMES = [
|
||||
"pip",
|
||||
f"pip{sys.version_info.major}",
|
||||
f"pip{sys.version_info.major}.{sys.version_info.minor}",
|
||||
]
|
||||
if WINDOWS:
|
||||
_allowed_extensions = {"", ".exe"}
|
||||
_EXECUTABLE_NAMES = [
|
||||
"".join(parts)
|
||||
for parts in itertools.product(_EXECUTABLE_NAMES, _allowed_extensions)
|
||||
]
|
||||
|
||||
|
||||
def _wrapper(args: Optional[List[str]] = None) -> int:
|
||||
"""Central wrapper for all old entrypoints.
|
||||
|
||||
Historically pip has had several entrypoints defined. Because of issues
|
||||
arising from PATH, sys.path, multiple Pythons, their interactions, and most
|
||||
of them having a pip installed, users suffer every time an entrypoint gets
|
||||
moved.
|
||||
|
||||
To alleviate this pain, and provide a mechanism for warning users and
|
||||
directing them to an appropriate place for help, we now define all of
|
||||
our old entrypoints as wrappers for the current one.
|
||||
"""
|
||||
sys.stderr.write(
|
||||
"WARNING: pip is being invoked by an old script wrapper. This will "
|
||||
"fail in a future version of pip.\n"
|
||||
"Please see https://github.com/pypa/pip/issues/5599 for advice on "
|
||||
"fixing the underlying issue.\n"
|
||||
"To avoid this problem you can invoke Python with '-m pip' instead of "
|
||||
"running pip directly.\n"
|
||||
)
|
||||
return main(args)
|
||||
|
||||
|
||||
def get_best_invocation_for_this_pip() -> str:
|
||||
"""Try to figure out the best way to invoke pip in the current environment."""
|
||||
binary_directory = "Scripts" if WINDOWS else "bin"
|
||||
binary_prefix = os.path.join(sys.prefix, binary_directory)
|
||||
|
||||
# Try to use pip[X[.Y]] names, if those executables for this environment are
|
||||
# the first on PATH with that name.
|
||||
path_parts = os.path.normcase(os.environ.get("PATH", "")).split(os.pathsep)
|
||||
exe_are_in_PATH = os.path.normcase(binary_prefix) in path_parts
|
||||
if exe_are_in_PATH:
|
||||
for exe_name in _EXECUTABLE_NAMES:
|
||||
found_executable = shutil.which(exe_name)
|
||||
binary_executable = os.path.join(binary_prefix, exe_name)
|
||||
if (
|
||||
found_executable
|
||||
and os.path.exists(binary_executable)
|
||||
and os.path.samefile(
|
||||
found_executable,
|
||||
binary_executable,
|
||||
)
|
||||
):
|
||||
return exe_name
|
||||
|
||||
# Use the `-m` invocation, if there's no "nice" invocation.
|
||||
return f"{get_best_invocation_for_this_python()} -m pip"
|
||||
|
||||
|
||||
def get_best_invocation_for_this_python() -> str:
|
||||
"""Try to figure out the best way to invoke the current Python."""
|
||||
exe = sys.executable
|
||||
exe_name = os.path.basename(exe)
|
||||
|
||||
# Try to use the basename, if it's the first executable.
|
||||
found_executable = shutil.which(exe_name)
|
||||
if found_executable and os.path.samefile(found_executable, exe):
|
||||
return exe_name
|
||||
|
||||
# Use the full executable name, because we couldn't find something simpler.
|
||||
return exe
|
|
@ -0,0 +1,153 @@
|
|||
import fnmatch
|
||||
import os
|
||||
import os.path
|
||||
import random
|
||||
import sys
|
||||
from contextlib import contextmanager
|
||||
from tempfile import NamedTemporaryFile
|
||||
from typing import Any, BinaryIO, Generator, List, Union, cast
|
||||
|
||||
from pip._vendor.tenacity import retry, stop_after_delay, wait_fixed
|
||||
|
||||
from pip._internal.utils.compat import get_path_uid
|
||||
from pip._internal.utils.misc import format_size
|
||||
|
||||
|
||||
def check_path_owner(path: str) -> bool:
|
||||
# If we don't have a way to check the effective uid of this process, then
|
||||
# we'll just assume that we own the directory.
|
||||
if sys.platform == "win32" or not hasattr(os, "geteuid"):
|
||||
return True
|
||||
|
||||
assert os.path.isabs(path)
|
||||
|
||||
previous = None
|
||||
while path != previous:
|
||||
if os.path.lexists(path):
|
||||
# Check if path is writable by current user.
|
||||
if os.geteuid() == 0:
|
||||
# Special handling for root user in order to handle properly
|
||||
# cases where users use sudo without -H flag.
|
||||
try:
|
||||
path_uid = get_path_uid(path)
|
||||
except OSError:
|
||||
return False
|
||||
return path_uid == 0
|
||||
else:
|
||||
return os.access(path, os.W_OK)
|
||||
else:
|
||||
previous, path = path, os.path.dirname(path)
|
||||
return False # assume we don't own the path
|
||||
|
||||
|
||||
@contextmanager
|
||||
def adjacent_tmp_file(path: str, **kwargs: Any) -> Generator[BinaryIO, None, None]:
|
||||
"""Return a file-like object pointing to a tmp file next to path.
|
||||
|
||||
The file is created securely and is ensured to be written to disk
|
||||
after the context reaches its end.
|
||||
|
||||
kwargs will be passed to tempfile.NamedTemporaryFile to control
|
||||
the way the temporary file will be opened.
|
||||
"""
|
||||
with NamedTemporaryFile(
|
||||
delete=False,
|
||||
dir=os.path.dirname(path),
|
||||
prefix=os.path.basename(path),
|
||||
suffix=".tmp",
|
||||
**kwargs,
|
||||
) as f:
|
||||
result = cast(BinaryIO, f)
|
||||
try:
|
||||
yield result
|
||||
finally:
|
||||
result.flush()
|
||||
os.fsync(result.fileno())
|
||||
|
||||
|
||||
# Tenacity raises RetryError by default, explicitly raise the original exception
|
||||
_replace_retry = retry(reraise=True, stop=stop_after_delay(1), wait=wait_fixed(0.25))
|
||||
|
||||
replace = _replace_retry(os.replace)
|
||||
|
||||
|
||||
# test_writable_dir and _test_writable_dir_win are copied from Flit,
|
||||
# with the author's agreement to also place them under pip's license.
|
||||
def test_writable_dir(path: str) -> bool:
|
||||
"""Check if a directory is writable.
|
||||
|
||||
Uses os.access() on POSIX, tries creating files on Windows.
|
||||
"""
|
||||
# If the directory doesn't exist, find the closest parent that does.
|
||||
while not os.path.isdir(path):
|
||||
parent = os.path.dirname(path)
|
||||
if parent == path:
|
||||
break # Should never get here, but infinite loops are bad
|
||||
path = parent
|
||||
|
||||
if os.name == "posix":
|
||||
return os.access(path, os.W_OK)
|
||||
|
||||
return _test_writable_dir_win(path)
|
||||
|
||||
|
||||
def _test_writable_dir_win(path: str) -> bool:
|
||||
# os.access doesn't work on Windows: http://bugs.python.org/issue2528
|
||||
# and we can't use tempfile: http://bugs.python.org/issue22107
|
||||
basename = "accesstest_deleteme_fishfingers_custard_"
|
||||
alphabet = "abcdefghijklmnopqrstuvwxyz0123456789"
|
||||
for _ in range(10):
|
||||
name = basename + "".join(random.choice(alphabet) for _ in range(6))
|
||||
file = os.path.join(path, name)
|
||||
try:
|
||||
fd = os.open(file, os.O_RDWR | os.O_CREAT | os.O_EXCL)
|
||||
except FileExistsError:
|
||||
pass
|
||||
except PermissionError:
|
||||
# This could be because there's a directory with the same name.
|
||||
# But it's highly unlikely there's a directory called that,
|
||||
# so we'll assume it's because the parent dir is not writable.
|
||||
# This could as well be because the parent dir is not readable,
|
||||
# due to non-privileged user access.
|
||||
return False
|
||||
else:
|
||||
os.close(fd)
|
||||
os.unlink(file)
|
||||
return True
|
||||
|
||||
# This should never be reached
|
||||
raise OSError("Unexpected condition testing for writable directory")
|
||||
|
||||
|
||||
def find_files(path: str, pattern: str) -> List[str]:
|
||||
"""Returns a list of absolute paths of files beneath path, recursively,
|
||||
with filenames which match the UNIX-style shell glob pattern."""
|
||||
result: List[str] = []
|
||||
for root, _, files in os.walk(path):
|
||||
matches = fnmatch.filter(files, pattern)
|
||||
result.extend(os.path.join(root, f) for f in matches)
|
||||
return result
|
||||
|
||||
|
||||
def file_size(path: str) -> Union[int, float]:
|
||||
# If it's a symlink, return 0.
|
||||
if os.path.islink(path):
|
||||
return 0
|
||||
return os.path.getsize(path)
|
||||
|
||||
|
||||
def format_file_size(path: str) -> str:
|
||||
return format_size(file_size(path))
|
||||
|
||||
|
||||
def directory_size(path: str) -> Union[int, float]:
|
||||
size = 0.0
|
||||
for root, _dirs, files in os.walk(path):
|
||||
for filename in files:
|
||||
file_path = os.path.join(root, filename)
|
||||
size += file_size(file_path)
|
||||
return size
|
||||
|
||||
|
||||
def format_directory_size(path: str) -> str:
|
||||
return format_size(directory_size(path))
|
|
@ -0,0 +1,27 @@
|
|||
"""Filetype information.
|
||||
"""
|
||||
|
||||
from typing import Tuple
|
||||
|
||||
from pip._internal.utils.misc import splitext
|
||||
|
||||
WHEEL_EXTENSION = ".whl"
|
||||
BZ2_EXTENSIONS: Tuple[str, ...] = (".tar.bz2", ".tbz")
|
||||
XZ_EXTENSIONS: Tuple[str, ...] = (
|
||||
".tar.xz",
|
||||
".txz",
|
||||
".tlz",
|
||||
".tar.lz",
|
||||
".tar.lzma",
|
||||
)
|
||||
ZIP_EXTENSIONS: Tuple[str, ...] = (".zip", WHEEL_EXTENSION)
|
||||
TAR_EXTENSIONS: Tuple[str, ...] = (".tar.gz", ".tgz", ".tar")
|
||||
ARCHIVE_EXTENSIONS = ZIP_EXTENSIONS + BZ2_EXTENSIONS + TAR_EXTENSIONS + XZ_EXTENSIONS
|
||||
|
||||
|
||||
def is_archive_file(name: str) -> bool:
|
||||
"""Return True if `name` is a considered as an archive file."""
|
||||
ext = splitext(name)[1].lower()
|
||||
if ext in ARCHIVE_EXTENSIONS:
|
||||
return True
|
||||
return False
|
|
@ -0,0 +1,88 @@
|
|||
import os
|
||||
import sys
|
||||
from typing import Optional, Tuple
|
||||
|
||||
|
||||
def glibc_version_string() -> Optional[str]:
|
||||
"Returns glibc version string, or None if not using glibc."
|
||||
return glibc_version_string_confstr() or glibc_version_string_ctypes()
|
||||
|
||||
|
||||
def glibc_version_string_confstr() -> Optional[str]:
|
||||
"Primary implementation of glibc_version_string using os.confstr."
|
||||
# os.confstr is quite a bit faster than ctypes.DLL. It's also less likely
|
||||
# to be broken or missing. This strategy is used in the standard library
|
||||
# platform module:
|
||||
# https://github.com/python/cpython/blob/fcf1d003bf4f0100c9d0921ff3d70e1127ca1b71/Lib/platform.py#L175-L183
|
||||
if sys.platform == "win32":
|
||||
return None
|
||||
try:
|
||||
gnu_libc_version = os.confstr("CS_GNU_LIBC_VERSION")
|
||||
if gnu_libc_version is None:
|
||||
return None
|
||||
# os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17":
|
||||
_, version = gnu_libc_version.split()
|
||||
except (AttributeError, OSError, ValueError):
|
||||
# os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)...
|
||||
return None
|
||||
return version
|
||||
|
||||
|
||||
def glibc_version_string_ctypes() -> Optional[str]:
|
||||
"Fallback implementation of glibc_version_string using ctypes."
|
||||
|
||||
try:
|
||||
import ctypes
|
||||
except ImportError:
|
||||
return None
|
||||
|
||||
# ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen
|
||||
# manpage says, "If filename is NULL, then the returned handle is for the
|
||||
# main program". This way we can let the linker do the work to figure out
|
||||
# which libc our process is actually using.
|
||||
process_namespace = ctypes.CDLL(None)
|
||||
try:
|
||||
gnu_get_libc_version = process_namespace.gnu_get_libc_version
|
||||
except AttributeError:
|
||||
# Symbol doesn't exist -> therefore, we are not linked to
|
||||
# glibc.
|
||||
return None
|
||||
|
||||
# Call gnu_get_libc_version, which returns a string like "2.5"
|
||||
gnu_get_libc_version.restype = ctypes.c_char_p
|
||||
version_str = gnu_get_libc_version()
|
||||
# py2 / py3 compatibility:
|
||||
if not isinstance(version_str, str):
|
||||
version_str = version_str.decode("ascii")
|
||||
|
||||
return version_str
|
||||
|
||||
|
||||
# platform.libc_ver regularly returns completely nonsensical glibc
|
||||
# versions. E.g. on my computer, platform says:
|
||||
#
|
||||
# ~$ python2.7 -c 'import platform; print(platform.libc_ver())'
|
||||
# ('glibc', '2.7')
|
||||
# ~$ python3.5 -c 'import platform; print(platform.libc_ver())'
|
||||
# ('glibc', '2.9')
|
||||
#
|
||||
# But the truth is:
|
||||
#
|
||||
# ~$ ldd --version
|
||||
# ldd (Debian GLIBC 2.22-11) 2.22
|
||||
#
|
||||
# This is unfortunate, because it means that the linehaul data on libc
|
||||
# versions that was generated by pip 8.1.2 and earlier is useless and
|
||||
# misleading. Solution: instead of using platform, use our code that actually
|
||||
# works.
|
||||
def libc_ver() -> Tuple[str, str]:
|
||||
"""Try to determine the glibc version
|
||||
|
||||
Returns a tuple of strings (lib, version) which default to empty strings
|
||||
in case the lookup fails.
|
||||
"""
|
||||
glibc_version = glibc_version_string()
|
||||
if glibc_version is None:
|
||||
return ("", "")
|
||||
else:
|
||||
return ("glibc", glibc_version)
|
151
venv/lib/python3.11/site-packages/pip/_internal/utils/hashes.py
Normal file
151
venv/lib/python3.11/site-packages/pip/_internal/utils/hashes.py
Normal file
|
@ -0,0 +1,151 @@
|
|||
import hashlib
|
||||
from typing import TYPE_CHECKING, BinaryIO, Dict, Iterable, List, Optional
|
||||
|
||||
from pip._internal.exceptions import HashMismatch, HashMissing, InstallationError
|
||||
from pip._internal.utils.misc import read_chunks
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from hashlib import _Hash
|
||||
|
||||
# NoReturn introduced in 3.6.2; imported only for type checking to maintain
|
||||
# pip compatibility with older patch versions of Python 3.6
|
||||
from typing import NoReturn
|
||||
|
||||
|
||||
# The recommended hash algo of the moment. Change this whenever the state of
|
||||
# the art changes; it won't hurt backward compatibility.
|
||||
FAVORITE_HASH = "sha256"
|
||||
|
||||
|
||||
# Names of hashlib algorithms allowed by the --hash option and ``pip hash``
|
||||
# Currently, those are the ones at least as collision-resistant as sha256.
|
||||
STRONG_HASHES = ["sha256", "sha384", "sha512"]
|
||||
|
||||
|
||||
class Hashes:
|
||||
"""A wrapper that builds multiple hashes at once and checks them against
|
||||
known-good values
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, hashes: Optional[Dict[str, List[str]]] = None) -> None:
|
||||
"""
|
||||
:param hashes: A dict of algorithm names pointing to lists of allowed
|
||||
hex digests
|
||||
"""
|
||||
allowed = {}
|
||||
if hashes is not None:
|
||||
for alg, keys in hashes.items():
|
||||
# Make sure values are always sorted (to ease equality checks)
|
||||
allowed[alg] = sorted(keys)
|
||||
self._allowed = allowed
|
||||
|
||||
def __and__(self, other: "Hashes") -> "Hashes":
|
||||
if not isinstance(other, Hashes):
|
||||
return NotImplemented
|
||||
|
||||
# If either of the Hashes object is entirely empty (i.e. no hash
|
||||
# specified at all), all hashes from the other object are allowed.
|
||||
if not other:
|
||||
return self
|
||||
if not self:
|
||||
return other
|
||||
|
||||
# Otherwise only hashes that present in both objects are allowed.
|
||||
new = {}
|
||||
for alg, values in other._allowed.items():
|
||||
if alg not in self._allowed:
|
||||
continue
|
||||
new[alg] = [v for v in values if v in self._allowed[alg]]
|
||||
return Hashes(new)
|
||||
|
||||
@property
|
||||
def digest_count(self) -> int:
|
||||
return sum(len(digests) for digests in self._allowed.values())
|
||||
|
||||
def is_hash_allowed(self, hash_name: str, hex_digest: str) -> bool:
|
||||
"""Return whether the given hex digest is allowed."""
|
||||
return hex_digest in self._allowed.get(hash_name, [])
|
||||
|
||||
def check_against_chunks(self, chunks: Iterable[bytes]) -> None:
|
||||
"""Check good hashes against ones built from iterable of chunks of
|
||||
data.
|
||||
|
||||
Raise HashMismatch if none match.
|
||||
|
||||
"""
|
||||
gots = {}
|
||||
for hash_name in self._allowed.keys():
|
||||
try:
|
||||
gots[hash_name] = hashlib.new(hash_name)
|
||||
except (ValueError, TypeError):
|
||||
raise InstallationError(f"Unknown hash name: {hash_name}")
|
||||
|
||||
for chunk in chunks:
|
||||
for hash in gots.values():
|
||||
hash.update(chunk)
|
||||
|
||||
for hash_name, got in gots.items():
|
||||
if got.hexdigest() in self._allowed[hash_name]:
|
||||
return
|
||||
self._raise(gots)
|
||||
|
||||
def _raise(self, gots: Dict[str, "_Hash"]) -> "NoReturn":
|
||||
raise HashMismatch(self._allowed, gots)
|
||||
|
||||
def check_against_file(self, file: BinaryIO) -> None:
|
||||
"""Check good hashes against a file-like object
|
||||
|
||||
Raise HashMismatch if none match.
|
||||
|
||||
"""
|
||||
return self.check_against_chunks(read_chunks(file))
|
||||
|
||||
def check_against_path(self, path: str) -> None:
|
||||
with open(path, "rb") as file:
|
||||
return self.check_against_file(file)
|
||||
|
||||
def has_one_of(self, hashes: Dict[str, str]) -> bool:
|
||||
"""Return whether any of the given hashes are allowed."""
|
||||
for hash_name, hex_digest in hashes.items():
|
||||
if self.is_hash_allowed(hash_name, hex_digest):
|
||||
return True
|
||||
return False
|
||||
|
||||
def __bool__(self) -> bool:
|
||||
"""Return whether I know any known-good hashes."""
|
||||
return bool(self._allowed)
|
||||
|
||||
def __eq__(self, other: object) -> bool:
|
||||
if not isinstance(other, Hashes):
|
||||
return NotImplemented
|
||||
return self._allowed == other._allowed
|
||||
|
||||
def __hash__(self) -> int:
|
||||
return hash(
|
||||
",".join(
|
||||
sorted(
|
||||
":".join((alg, digest))
|
||||
for alg, digest_list in self._allowed.items()
|
||||
for digest in digest_list
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class MissingHashes(Hashes):
|
||||
"""A workalike for Hashes used when we're missing a hash for a requirement
|
||||
|
||||
It computes the actual hash of the requirement and raises a HashMissing
|
||||
exception showing it to the user.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Don't offer the ``hashes`` kwarg."""
|
||||
# Pass our favorite hash in to generate a "gotten hash". With the
|
||||
# empty list, it will never match, so an error will always raise.
|
||||
super().__init__(hashes={FAVORITE_HASH: []})
|
||||
|
||||
def _raise(self, gots: Dict[str, "_Hash"]) -> "NoReturn":
|
||||
raise HashMissing(gots[FAVORITE_HASH].hexdigest())
|
|
@ -0,0 +1,35 @@
|
|||
"""A helper module that injects SecureTransport, on import.
|
||||
|
||||
The import should be done as early as possible, to ensure all requests and
|
||||
sessions (or whatever) are created after injecting SecureTransport.
|
||||
|
||||
Note that we only do the injection on macOS, when the linked OpenSSL is too
|
||||
old to handle TLSv1.2.
|
||||
"""
|
||||
|
||||
import sys
|
||||
|
||||
|
||||
def inject_securetransport() -> None:
|
||||
# Only relevant on macOS
|
||||
if sys.platform != "darwin":
|
||||
return
|
||||
|
||||
try:
|
||||
import ssl
|
||||
except ImportError:
|
||||
return
|
||||
|
||||
# Checks for OpenSSL 1.0.1
|
||||
if ssl.OPENSSL_VERSION_NUMBER >= 0x1000100F:
|
||||
return
|
||||
|
||||
try:
|
||||
from pip._vendor.urllib3.contrib import securetransport
|
||||
except (ImportError, OSError):
|
||||
return
|
||||
|
||||
securetransport.inject_into_urllib3()
|
||||
|
||||
|
||||
inject_securetransport()
|
348
venv/lib/python3.11/site-packages/pip/_internal/utils/logging.py
Normal file
348
venv/lib/python3.11/site-packages/pip/_internal/utils/logging.py
Normal file
|
@ -0,0 +1,348 @@
|
|||
import contextlib
|
||||
import errno
|
||||
import logging
|
||||
import logging.handlers
|
||||
import os
|
||||
import sys
|
||||
import threading
|
||||
from dataclasses import dataclass
|
||||
from io import TextIOWrapper
|
||||
from logging import Filter
|
||||
from typing import Any, ClassVar, Generator, List, Optional, TextIO, Type
|
||||
|
||||
from pip._vendor.rich.console import (
|
||||
Console,
|
||||
ConsoleOptions,
|
||||
ConsoleRenderable,
|
||||
RenderableType,
|
||||
RenderResult,
|
||||
RichCast,
|
||||
)
|
||||
from pip._vendor.rich.highlighter import NullHighlighter
|
||||
from pip._vendor.rich.logging import RichHandler
|
||||
from pip._vendor.rich.segment import Segment
|
||||
from pip._vendor.rich.style import Style
|
||||
|
||||
from pip._internal.utils._log import VERBOSE, getLogger
|
||||
from pip._internal.utils.compat import WINDOWS
|
||||
from pip._internal.utils.deprecation import DEPRECATION_MSG_PREFIX
|
||||
from pip._internal.utils.misc import ensure_dir
|
||||
|
||||
_log_state = threading.local()
|
||||
subprocess_logger = getLogger("pip.subprocessor")
|
||||
|
||||
|
||||
class BrokenStdoutLoggingError(Exception):
|
||||
"""
|
||||
Raised if BrokenPipeError occurs for the stdout stream while logging.
|
||||
"""
|
||||
|
||||
|
||||
def _is_broken_pipe_error(exc_class: Type[BaseException], exc: BaseException) -> bool:
|
||||
if exc_class is BrokenPipeError:
|
||||
return True
|
||||
|
||||
# On Windows, a broken pipe can show up as EINVAL rather than EPIPE:
|
||||
# https://bugs.python.org/issue19612
|
||||
# https://bugs.python.org/issue30418
|
||||
if not WINDOWS:
|
||||
return False
|
||||
|
||||
return isinstance(exc, OSError) and exc.errno in (errno.EINVAL, errno.EPIPE)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def indent_log(num: int = 2) -> Generator[None, None, None]:
|
||||
"""
|
||||
A context manager which will cause the log output to be indented for any
|
||||
log messages emitted inside it.
|
||||
"""
|
||||
# For thread-safety
|
||||
_log_state.indentation = get_indentation()
|
||||
_log_state.indentation += num
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
_log_state.indentation -= num
|
||||
|
||||
|
||||
def get_indentation() -> int:
|
||||
return getattr(_log_state, "indentation", 0)
|
||||
|
||||
|
||||
class IndentingFormatter(logging.Formatter):
|
||||
default_time_format = "%Y-%m-%dT%H:%M:%S"
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*args: Any,
|
||||
add_timestamp: bool = False,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""
|
||||
A logging.Formatter that obeys the indent_log() context manager.
|
||||
|
||||
:param add_timestamp: A bool indicating output lines should be prefixed
|
||||
with their record's timestamp.
|
||||
"""
|
||||
self.add_timestamp = add_timestamp
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def get_message_start(self, formatted: str, levelno: int) -> str:
|
||||
"""
|
||||
Return the start of the formatted log message (not counting the
|
||||
prefix to add to each line).
|
||||
"""
|
||||
if levelno < logging.WARNING:
|
||||
return ""
|
||||
if formatted.startswith(DEPRECATION_MSG_PREFIX):
|
||||
# Then the message already has a prefix. We don't want it to
|
||||
# look like "WARNING: DEPRECATION: ...."
|
||||
return ""
|
||||
if levelno < logging.ERROR:
|
||||
return "WARNING: "
|
||||
|
||||
return "ERROR: "
|
||||
|
||||
def format(self, record: logging.LogRecord) -> str:
|
||||
"""
|
||||
Calls the standard formatter, but will indent all of the log message
|
||||
lines by our current indentation level.
|
||||
"""
|
||||
formatted = super().format(record)
|
||||
message_start = self.get_message_start(formatted, record.levelno)
|
||||
formatted = message_start + formatted
|
||||
|
||||
prefix = ""
|
||||
if self.add_timestamp:
|
||||
prefix = f"{self.formatTime(record)} "
|
||||
prefix += " " * get_indentation()
|
||||
formatted = "".join([prefix + line for line in formatted.splitlines(True)])
|
||||
return formatted
|
||||
|
||||
|
||||
@dataclass
|
||||
class IndentedRenderable:
|
||||
renderable: RenderableType
|
||||
indent: int
|
||||
|
||||
def __rich_console__(
|
||||
self, console: Console, options: ConsoleOptions
|
||||
) -> RenderResult:
|
||||
segments = console.render(self.renderable, options)
|
||||
lines = Segment.split_lines(segments)
|
||||
for line in lines:
|
||||
yield Segment(" " * self.indent)
|
||||
yield from line
|
||||
yield Segment("\n")
|
||||
|
||||
|
||||
class RichPipStreamHandler(RichHandler):
|
||||
KEYWORDS: ClassVar[Optional[List[str]]] = []
|
||||
|
||||
def __init__(self, stream: Optional[TextIO], no_color: bool) -> None:
|
||||
super().__init__(
|
||||
console=Console(file=stream, no_color=no_color, soft_wrap=True),
|
||||
show_time=False,
|
||||
show_level=False,
|
||||
show_path=False,
|
||||
highlighter=NullHighlighter(),
|
||||
)
|
||||
|
||||
# Our custom override on Rich's logger, to make things work as we need them to.
|
||||
def emit(self, record: logging.LogRecord) -> None:
|
||||
style: Optional[Style] = None
|
||||
|
||||
# If we are given a diagnostic error to present, present it with indentation.
|
||||
assert isinstance(record.args, tuple)
|
||||
if record.msg == "[present-rich] %s" and len(record.args) == 1:
|
||||
rich_renderable = record.args[0]
|
||||
assert isinstance(
|
||||
rich_renderable, (ConsoleRenderable, RichCast, str)
|
||||
), f"{rich_renderable} is not rich-console-renderable"
|
||||
|
||||
renderable: RenderableType = IndentedRenderable(
|
||||
rich_renderable, indent=get_indentation()
|
||||
)
|
||||
else:
|
||||
message = self.format(record)
|
||||
renderable = self.render_message(record, message)
|
||||
if record.levelno is not None:
|
||||
if record.levelno >= logging.ERROR:
|
||||
style = Style(color="red")
|
||||
elif record.levelno >= logging.WARNING:
|
||||
style = Style(color="yellow")
|
||||
|
||||
try:
|
||||
self.console.print(renderable, overflow="ignore", crop=False, style=style)
|
||||
except Exception:
|
||||
self.handleError(record)
|
||||
|
||||
def handleError(self, record: logging.LogRecord) -> None:
|
||||
"""Called when logging is unable to log some output."""
|
||||
|
||||
exc_class, exc = sys.exc_info()[:2]
|
||||
# If a broken pipe occurred while calling write() or flush() on the
|
||||
# stdout stream in logging's Handler.emit(), then raise our special
|
||||
# exception so we can handle it in main() instead of logging the
|
||||
# broken pipe error and continuing.
|
||||
if (
|
||||
exc_class
|
||||
and exc
|
||||
and self.console.file is sys.stdout
|
||||
and _is_broken_pipe_error(exc_class, exc)
|
||||
):
|
||||
raise BrokenStdoutLoggingError()
|
||||
|
||||
return super().handleError(record)
|
||||
|
||||
|
||||
class BetterRotatingFileHandler(logging.handlers.RotatingFileHandler):
|
||||
def _open(self) -> TextIOWrapper:
|
||||
ensure_dir(os.path.dirname(self.baseFilename))
|
||||
return super()._open()
|
||||
|
||||
|
||||
class MaxLevelFilter(Filter):
|
||||
def __init__(self, level: int) -> None:
|
||||
self.level = level
|
||||
|
||||
def filter(self, record: logging.LogRecord) -> bool:
|
||||
return record.levelno < self.level
|
||||
|
||||
|
||||
class ExcludeLoggerFilter(Filter):
|
||||
|
||||
"""
|
||||
A logging Filter that excludes records from a logger (or its children).
|
||||
"""
|
||||
|
||||
def filter(self, record: logging.LogRecord) -> bool:
|
||||
# The base Filter class allows only records from a logger (or its
|
||||
# children).
|
||||
return not super().filter(record)
|
||||
|
||||
|
||||
def setup_logging(verbosity: int, no_color: bool, user_log_file: Optional[str]) -> int:
|
||||
"""Configures and sets up all of the logging
|
||||
|
||||
Returns the requested logging level, as its integer value.
|
||||
"""
|
||||
|
||||
# Determine the level to be logging at.
|
||||
if verbosity >= 2:
|
||||
level_number = logging.DEBUG
|
||||
elif verbosity == 1:
|
||||
level_number = VERBOSE
|
||||
elif verbosity == -1:
|
||||
level_number = logging.WARNING
|
||||
elif verbosity == -2:
|
||||
level_number = logging.ERROR
|
||||
elif verbosity <= -3:
|
||||
level_number = logging.CRITICAL
|
||||
else:
|
||||
level_number = logging.INFO
|
||||
|
||||
level = logging.getLevelName(level_number)
|
||||
|
||||
# The "root" logger should match the "console" level *unless* we also need
|
||||
# to log to a user log file.
|
||||
include_user_log = user_log_file is not None
|
||||
if include_user_log:
|
||||
additional_log_file = user_log_file
|
||||
root_level = "DEBUG"
|
||||
else:
|
||||
additional_log_file = "/dev/null"
|
||||
root_level = level
|
||||
|
||||
# Disable any logging besides WARNING unless we have DEBUG level logging
|
||||
# enabled for vendored libraries.
|
||||
vendored_log_level = "WARNING" if level in ["INFO", "ERROR"] else "DEBUG"
|
||||
|
||||
# Shorthands for clarity
|
||||
log_streams = {
|
||||
"stdout": "ext://sys.stdout",
|
||||
"stderr": "ext://sys.stderr",
|
||||
}
|
||||
handler_classes = {
|
||||
"stream": "pip._internal.utils.logging.RichPipStreamHandler",
|
||||
"file": "pip._internal.utils.logging.BetterRotatingFileHandler",
|
||||
}
|
||||
handlers = ["console", "console_errors", "console_subprocess"] + (
|
||||
["user_log"] if include_user_log else []
|
||||
)
|
||||
|
||||
logging.config.dictConfig(
|
||||
{
|
||||
"version": 1,
|
||||
"disable_existing_loggers": False,
|
||||
"filters": {
|
||||
"exclude_warnings": {
|
||||
"()": "pip._internal.utils.logging.MaxLevelFilter",
|
||||
"level": logging.WARNING,
|
||||
},
|
||||
"restrict_to_subprocess": {
|
||||
"()": "logging.Filter",
|
||||
"name": subprocess_logger.name,
|
||||
},
|
||||
"exclude_subprocess": {
|
||||
"()": "pip._internal.utils.logging.ExcludeLoggerFilter",
|
||||
"name": subprocess_logger.name,
|
||||
},
|
||||
},
|
||||
"formatters": {
|
||||
"indent": {
|
||||
"()": IndentingFormatter,
|
||||
"format": "%(message)s",
|
||||
},
|
||||
"indent_with_timestamp": {
|
||||
"()": IndentingFormatter,
|
||||
"format": "%(message)s",
|
||||
"add_timestamp": True,
|
||||
},
|
||||
},
|
||||
"handlers": {
|
||||
"console": {
|
||||
"level": level,
|
||||
"class": handler_classes["stream"],
|
||||
"no_color": no_color,
|
||||
"stream": log_streams["stdout"],
|
||||
"filters": ["exclude_subprocess", "exclude_warnings"],
|
||||
"formatter": "indent",
|
||||
},
|
||||
"console_errors": {
|
||||
"level": "WARNING",
|
||||
"class": handler_classes["stream"],
|
||||
"no_color": no_color,
|
||||
"stream": log_streams["stderr"],
|
||||
"filters": ["exclude_subprocess"],
|
||||
"formatter": "indent",
|
||||
},
|
||||
# A handler responsible for logging to the console messages
|
||||
# from the "subprocessor" logger.
|
||||
"console_subprocess": {
|
||||
"level": level,
|
||||
"class": handler_classes["stream"],
|
||||
"stream": log_streams["stderr"],
|
||||
"no_color": no_color,
|
||||
"filters": ["restrict_to_subprocess"],
|
||||
"formatter": "indent",
|
||||
},
|
||||
"user_log": {
|
||||
"level": "DEBUG",
|
||||
"class": handler_classes["file"],
|
||||
"filename": additional_log_file,
|
||||
"encoding": "utf-8",
|
||||
"delay": True,
|
||||
"formatter": "indent_with_timestamp",
|
||||
},
|
||||
},
|
||||
"root": {
|
||||
"level": root_level,
|
||||
"handlers": handlers,
|
||||
},
|
||||
"loggers": {"pip._vendor": {"level": vendored_log_level}},
|
||||
}
|
||||
)
|
||||
|
||||
return level_number
|
735
venv/lib/python3.11/site-packages/pip/_internal/utils/misc.py
Normal file
735
venv/lib/python3.11/site-packages/pip/_internal/utils/misc.py
Normal file
|
@ -0,0 +1,735 @@
|
|||
import contextlib
|
||||
import errno
|
||||
import getpass
|
||||
import hashlib
|
||||
import io
|
||||
import logging
|
||||
import os
|
||||
import posixpath
|
||||
import shutil
|
||||
import stat
|
||||
import sys
|
||||
import sysconfig
|
||||
import urllib.parse
|
||||
from io import StringIO
|
||||
from itertools import filterfalse, tee, zip_longest
|
||||
from types import TracebackType
|
||||
from typing import (
|
||||
Any,
|
||||
BinaryIO,
|
||||
Callable,
|
||||
ContextManager,
|
||||
Dict,
|
||||
Generator,
|
||||
Iterable,
|
||||
Iterator,
|
||||
List,
|
||||
Optional,
|
||||
TextIO,
|
||||
Tuple,
|
||||
Type,
|
||||
TypeVar,
|
||||
Union,
|
||||
cast,
|
||||
)
|
||||
|
||||
from pip._vendor.pyproject_hooks import BuildBackendHookCaller
|
||||
from pip._vendor.tenacity import retry, stop_after_delay, wait_fixed
|
||||
|
||||
from pip import __version__
|
||||
from pip._internal.exceptions import CommandError, ExternallyManagedEnvironment
|
||||
from pip._internal.locations import get_major_minor_version
|
||||
from pip._internal.utils.compat import WINDOWS
|
||||
from pip._internal.utils.virtualenv import running_under_virtualenv
|
||||
|
||||
__all__ = [
|
||||
"rmtree",
|
||||
"display_path",
|
||||
"backup_dir",
|
||||
"ask",
|
||||
"splitext",
|
||||
"format_size",
|
||||
"is_installable_dir",
|
||||
"normalize_path",
|
||||
"renames",
|
||||
"get_prog",
|
||||
"captured_stdout",
|
||||
"ensure_dir",
|
||||
"remove_auth_from_url",
|
||||
"check_externally_managed",
|
||||
"ConfiguredBuildBackendHookCaller",
|
||||
]
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
T = TypeVar("T")
|
||||
ExcInfo = Tuple[Type[BaseException], BaseException, TracebackType]
|
||||
VersionInfo = Tuple[int, int, int]
|
||||
NetlocTuple = Tuple[str, Tuple[Optional[str], Optional[str]]]
|
||||
|
||||
|
||||
def get_pip_version() -> str:
|
||||
pip_pkg_dir = os.path.join(os.path.dirname(__file__), "..", "..")
|
||||
pip_pkg_dir = os.path.abspath(pip_pkg_dir)
|
||||
|
||||
return "pip {} from {} (python {})".format(
|
||||
__version__,
|
||||
pip_pkg_dir,
|
||||
get_major_minor_version(),
|
||||
)
|
||||
|
||||
|
||||
def normalize_version_info(py_version_info: Tuple[int, ...]) -> Tuple[int, int, int]:
|
||||
"""
|
||||
Convert a tuple of ints representing a Python version to one of length
|
||||
three.
|
||||
|
||||
:param py_version_info: a tuple of ints representing a Python version,
|
||||
or None to specify no version. The tuple can have any length.
|
||||
|
||||
:return: a tuple of length three if `py_version_info` is non-None.
|
||||
Otherwise, return `py_version_info` unchanged (i.e. None).
|
||||
"""
|
||||
if len(py_version_info) < 3:
|
||||
py_version_info += (3 - len(py_version_info)) * (0,)
|
||||
elif len(py_version_info) > 3:
|
||||
py_version_info = py_version_info[:3]
|
||||
|
||||
return cast("VersionInfo", py_version_info)
|
||||
|
||||
|
||||
def ensure_dir(path: str) -> None:
|
||||
"""os.path.makedirs without EEXIST."""
|
||||
try:
|
||||
os.makedirs(path)
|
||||
except OSError as e:
|
||||
# Windows can raise spurious ENOTEMPTY errors. See #6426.
|
||||
if e.errno != errno.EEXIST and e.errno != errno.ENOTEMPTY:
|
||||
raise
|
||||
|
||||
|
||||
def get_prog() -> str:
|
||||
try:
|
||||
prog = os.path.basename(sys.argv[0])
|
||||
if prog in ("__main__.py", "-c"):
|
||||
return f"{sys.executable} -m pip"
|
||||
else:
|
||||
return prog
|
||||
except (AttributeError, TypeError, IndexError):
|
||||
pass
|
||||
return "pip"
|
||||
|
||||
|
||||
# Retry every half second for up to 3 seconds
|
||||
# Tenacity raises RetryError by default, explicitly raise the original exception
|
||||
@retry(reraise=True, stop=stop_after_delay(3), wait=wait_fixed(0.5))
|
||||
def rmtree(dir: str, ignore_errors: bool = False) -> None:
|
||||
if sys.version_info >= (3, 12):
|
||||
shutil.rmtree(dir, ignore_errors=ignore_errors, onexc=rmtree_errorhandler)
|
||||
else:
|
||||
shutil.rmtree(dir, ignore_errors=ignore_errors, onerror=rmtree_errorhandler)
|
||||
|
||||
|
||||
def rmtree_errorhandler(
|
||||
func: Callable[..., Any], path: str, exc_info: Union[ExcInfo, BaseException]
|
||||
) -> None:
|
||||
"""On Windows, the files in .svn are read-only, so when rmtree() tries to
|
||||
remove them, an exception is thrown. We catch that here, remove the
|
||||
read-only attribute, and hopefully continue without problems."""
|
||||
try:
|
||||
has_attr_readonly = not (os.stat(path).st_mode & stat.S_IWRITE)
|
||||
except OSError:
|
||||
# it's equivalent to os.path.exists
|
||||
return
|
||||
|
||||
if has_attr_readonly:
|
||||
# convert to read/write
|
||||
os.chmod(path, stat.S_IWRITE)
|
||||
# use the original function to repeat the operation
|
||||
func(path)
|
||||
return
|
||||
else:
|
||||
raise
|
||||
|
||||
|
||||
def display_path(path: str) -> str:
|
||||
"""Gives the display value for a given path, making it relative to cwd
|
||||
if possible."""
|
||||
path = os.path.normcase(os.path.abspath(path))
|
||||
if path.startswith(os.getcwd() + os.path.sep):
|
||||
path = "." + path[len(os.getcwd()) :]
|
||||
return path
|
||||
|
||||
|
||||
def backup_dir(dir: str, ext: str = ".bak") -> str:
|
||||
"""Figure out the name of a directory to back up the given dir to
|
||||
(adding .bak, .bak2, etc)"""
|
||||
n = 1
|
||||
extension = ext
|
||||
while os.path.exists(dir + extension):
|
||||
n += 1
|
||||
extension = ext + str(n)
|
||||
return dir + extension
|
||||
|
||||
|
||||
def ask_path_exists(message: str, options: Iterable[str]) -> str:
|
||||
for action in os.environ.get("PIP_EXISTS_ACTION", "").split():
|
||||
if action in options:
|
||||
return action
|
||||
return ask(message, options)
|
||||
|
||||
|
||||
def _check_no_input(message: str) -> None:
|
||||
"""Raise an error if no input is allowed."""
|
||||
if os.environ.get("PIP_NO_INPUT"):
|
||||
raise Exception(
|
||||
f"No input was expected ($PIP_NO_INPUT set); question: {message}"
|
||||
)
|
||||
|
||||
|
||||
def ask(message: str, options: Iterable[str]) -> str:
|
||||
"""Ask the message interactively, with the given possible responses"""
|
||||
while 1:
|
||||
_check_no_input(message)
|
||||
response = input(message)
|
||||
response = response.strip().lower()
|
||||
if response not in options:
|
||||
print(
|
||||
"Your response ({!r}) was not one of the expected responses: "
|
||||
"{}".format(response, ", ".join(options))
|
||||
)
|
||||
else:
|
||||
return response
|
||||
|
||||
|
||||
def ask_input(message: str) -> str:
|
||||
"""Ask for input interactively."""
|
||||
_check_no_input(message)
|
||||
return input(message)
|
||||
|
||||
|
||||
def ask_password(message: str) -> str:
|
||||
"""Ask for a password interactively."""
|
||||
_check_no_input(message)
|
||||
return getpass.getpass(message)
|
||||
|
||||
|
||||
def strtobool(val: str) -> int:
|
||||
"""Convert a string representation of truth to true (1) or false (0).
|
||||
|
||||
True values are 'y', 'yes', 't', 'true', 'on', and '1'; false values
|
||||
are 'n', 'no', 'f', 'false', 'off', and '0'. Raises ValueError if
|
||||
'val' is anything else.
|
||||
"""
|
||||
val = val.lower()
|
||||
if val in ("y", "yes", "t", "true", "on", "1"):
|
||||
return 1
|
||||
elif val in ("n", "no", "f", "false", "off", "0"):
|
||||
return 0
|
||||
else:
|
||||
raise ValueError(f"invalid truth value {val!r}")
|
||||
|
||||
|
||||
def format_size(bytes: float) -> str:
|
||||
if bytes > 1000 * 1000:
|
||||
return "{:.1f} MB".format(bytes / 1000.0 / 1000)
|
||||
elif bytes > 10 * 1000:
|
||||
return "{} kB".format(int(bytes / 1000))
|
||||
elif bytes > 1000:
|
||||
return "{:.1f} kB".format(bytes / 1000.0)
|
||||
else:
|
||||
return "{} bytes".format(int(bytes))
|
||||
|
||||
|
||||
def tabulate(rows: Iterable[Iterable[Any]]) -> Tuple[List[str], List[int]]:
|
||||
"""Return a list of formatted rows and a list of column sizes.
|
||||
|
||||
For example::
|
||||
|
||||
>>> tabulate([['foobar', 2000], [0xdeadbeef]])
|
||||
(['foobar 2000', '3735928559'], [10, 4])
|
||||
"""
|
||||
rows = [tuple(map(str, row)) for row in rows]
|
||||
sizes = [max(map(len, col)) for col in zip_longest(*rows, fillvalue="")]
|
||||
table = [" ".join(map(str.ljust, row, sizes)).rstrip() for row in rows]
|
||||
return table, sizes
|
||||
|
||||
|
||||
def is_installable_dir(path: str) -> bool:
|
||||
"""Is path is a directory containing pyproject.toml or setup.py?
|
||||
|
||||
If pyproject.toml exists, this is a PEP 517 project. Otherwise we look for
|
||||
a legacy setuptools layout by identifying setup.py. We don't check for the
|
||||
setup.cfg because using it without setup.py is only available for PEP 517
|
||||
projects, which are already covered by the pyproject.toml check.
|
||||
"""
|
||||
if not os.path.isdir(path):
|
||||
return False
|
||||
if os.path.isfile(os.path.join(path, "pyproject.toml")):
|
||||
return True
|
||||
if os.path.isfile(os.path.join(path, "setup.py")):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def read_chunks(
|
||||
file: BinaryIO, size: int = io.DEFAULT_BUFFER_SIZE
|
||||
) -> Generator[bytes, None, None]:
|
||||
"""Yield pieces of data from a file-like object until EOF."""
|
||||
while True:
|
||||
chunk = file.read(size)
|
||||
if not chunk:
|
||||
break
|
||||
yield chunk
|
||||
|
||||
|
||||
def normalize_path(path: str, resolve_symlinks: bool = True) -> str:
|
||||
"""
|
||||
Convert a path to its canonical, case-normalized, absolute version.
|
||||
|
||||
"""
|
||||
path = os.path.expanduser(path)
|
||||
if resolve_symlinks:
|
||||
path = os.path.realpath(path)
|
||||
else:
|
||||
path = os.path.abspath(path)
|
||||
return os.path.normcase(path)
|
||||
|
||||
|
||||
def splitext(path: str) -> Tuple[str, str]:
|
||||
"""Like os.path.splitext, but take off .tar too"""
|
||||
base, ext = posixpath.splitext(path)
|
||||
if base.lower().endswith(".tar"):
|
||||
ext = base[-4:] + ext
|
||||
base = base[:-4]
|
||||
return base, ext
|
||||
|
||||
|
||||
def renames(old: str, new: str) -> None:
|
||||
"""Like os.renames(), but handles renaming across devices."""
|
||||
# Implementation borrowed from os.renames().
|
||||
head, tail = os.path.split(new)
|
||||
if head and tail and not os.path.exists(head):
|
||||
os.makedirs(head)
|
||||
|
||||
shutil.move(old, new)
|
||||
|
||||
head, tail = os.path.split(old)
|
||||
if head and tail:
|
||||
try:
|
||||
os.removedirs(head)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
|
||||
def is_local(path: str) -> bool:
|
||||
"""
|
||||
Return True if path is within sys.prefix, if we're running in a virtualenv.
|
||||
|
||||
If we're not in a virtualenv, all paths are considered "local."
|
||||
|
||||
Caution: this function assumes the head of path has been normalized
|
||||
with normalize_path.
|
||||
"""
|
||||
if not running_under_virtualenv():
|
||||
return True
|
||||
return path.startswith(normalize_path(sys.prefix))
|
||||
|
||||
|
||||
def write_output(msg: Any, *args: Any) -> None:
|
||||
logger.info(msg, *args)
|
||||
|
||||
|
||||
class StreamWrapper(StringIO):
|
||||
orig_stream: TextIO
|
||||
|
||||
@classmethod
|
||||
def from_stream(cls, orig_stream: TextIO) -> "StreamWrapper":
|
||||
ret = cls()
|
||||
ret.orig_stream = orig_stream
|
||||
return ret
|
||||
|
||||
# compileall.compile_dir() needs stdout.encoding to print to stdout
|
||||
# type ignore is because TextIOBase.encoding is writeable
|
||||
@property
|
||||
def encoding(self) -> str: # type: ignore
|
||||
return self.orig_stream.encoding
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def captured_output(stream_name: str) -> Generator[StreamWrapper, None, None]:
|
||||
"""Return a context manager used by captured_stdout/stdin/stderr
|
||||
that temporarily replaces the sys stream *stream_name* with a StringIO.
|
||||
|
||||
Taken from Lib/support/__init__.py in the CPython repo.
|
||||
"""
|
||||
orig_stdout = getattr(sys, stream_name)
|
||||
setattr(sys, stream_name, StreamWrapper.from_stream(orig_stdout))
|
||||
try:
|
||||
yield getattr(sys, stream_name)
|
||||
finally:
|
||||
setattr(sys, stream_name, orig_stdout)
|
||||
|
||||
|
||||
def captured_stdout() -> ContextManager[StreamWrapper]:
|
||||
"""Capture the output of sys.stdout:
|
||||
|
||||
with captured_stdout() as stdout:
|
||||
print('hello')
|
||||
self.assertEqual(stdout.getvalue(), 'hello\n')
|
||||
|
||||
Taken from Lib/support/__init__.py in the CPython repo.
|
||||
"""
|
||||
return captured_output("stdout")
|
||||
|
||||
|
||||
def captured_stderr() -> ContextManager[StreamWrapper]:
|
||||
"""
|
||||
See captured_stdout().
|
||||
"""
|
||||
return captured_output("stderr")
|
||||
|
||||
|
||||
# Simulates an enum
|
||||
def enum(*sequential: Any, **named: Any) -> Type[Any]:
|
||||
enums = dict(zip(sequential, range(len(sequential))), **named)
|
||||
reverse = {value: key for key, value in enums.items()}
|
||||
enums["reverse_mapping"] = reverse
|
||||
return type("Enum", (), enums)
|
||||
|
||||
|
||||
def build_netloc(host: str, port: Optional[int]) -> str:
|
||||
"""
|
||||
Build a netloc from a host-port pair
|
||||
"""
|
||||
if port is None:
|
||||
return host
|
||||
if ":" in host:
|
||||
# Only wrap host with square brackets when it is IPv6
|
||||
host = f"[{host}]"
|
||||
return f"{host}:{port}"
|
||||
|
||||
|
||||
def build_url_from_netloc(netloc: str, scheme: str = "https") -> str:
|
||||
"""
|
||||
Build a full URL from a netloc.
|
||||
"""
|
||||
if netloc.count(":") >= 2 and "@" not in netloc and "[" not in netloc:
|
||||
# It must be a bare IPv6 address, so wrap it with brackets.
|
||||
netloc = f"[{netloc}]"
|
||||
return f"{scheme}://{netloc}"
|
||||
|
||||
|
||||
def parse_netloc(netloc: str) -> Tuple[Optional[str], Optional[int]]:
|
||||
"""
|
||||
Return the host-port pair from a netloc.
|
||||
"""
|
||||
url = build_url_from_netloc(netloc)
|
||||
parsed = urllib.parse.urlparse(url)
|
||||
return parsed.hostname, parsed.port
|
||||
|
||||
|
||||
def split_auth_from_netloc(netloc: str) -> NetlocTuple:
|
||||
"""
|
||||
Parse out and remove the auth information from a netloc.
|
||||
|
||||
Returns: (netloc, (username, password)).
|
||||
"""
|
||||
if "@" not in netloc:
|
||||
return netloc, (None, None)
|
||||
|
||||
# Split from the right because that's how urllib.parse.urlsplit()
|
||||
# behaves if more than one @ is present (which can be checked using
|
||||
# the password attribute of urlsplit()'s return value).
|
||||
auth, netloc = netloc.rsplit("@", 1)
|
||||
pw: Optional[str] = None
|
||||
if ":" in auth:
|
||||
# Split from the left because that's how urllib.parse.urlsplit()
|
||||
# behaves if more than one : is present (which again can be checked
|
||||
# using the password attribute of the return value)
|
||||
user, pw = auth.split(":", 1)
|
||||
else:
|
||||
user, pw = auth, None
|
||||
|
||||
user = urllib.parse.unquote(user)
|
||||
if pw is not None:
|
||||
pw = urllib.parse.unquote(pw)
|
||||
|
||||
return netloc, (user, pw)
|
||||
|
||||
|
||||
def redact_netloc(netloc: str) -> str:
|
||||
"""
|
||||
Replace the sensitive data in a netloc with "****", if it exists.
|
||||
|
||||
For example:
|
||||
- "user:pass@example.com" returns "user:****@example.com"
|
||||
- "accesstoken@example.com" returns "****@example.com"
|
||||
"""
|
||||
netloc, (user, password) = split_auth_from_netloc(netloc)
|
||||
if user is None:
|
||||
return netloc
|
||||
if password is None:
|
||||
user = "****"
|
||||
password = ""
|
||||
else:
|
||||
user = urllib.parse.quote(user)
|
||||
password = ":****"
|
||||
return "{user}{password}@{netloc}".format(
|
||||
user=user, password=password, netloc=netloc
|
||||
)
|
||||
|
||||
|
||||
def _transform_url(
|
||||
url: str, transform_netloc: Callable[[str], Tuple[Any, ...]]
|
||||
) -> Tuple[str, NetlocTuple]:
|
||||
"""Transform and replace netloc in a url.
|
||||
|
||||
transform_netloc is a function taking the netloc and returning a
|
||||
tuple. The first element of this tuple is the new netloc. The
|
||||
entire tuple is returned.
|
||||
|
||||
Returns a tuple containing the transformed url as item 0 and the
|
||||
original tuple returned by transform_netloc as item 1.
|
||||
"""
|
||||
purl = urllib.parse.urlsplit(url)
|
||||
netloc_tuple = transform_netloc(purl.netloc)
|
||||
# stripped url
|
||||
url_pieces = (purl.scheme, netloc_tuple[0], purl.path, purl.query, purl.fragment)
|
||||
surl = urllib.parse.urlunsplit(url_pieces)
|
||||
return surl, cast("NetlocTuple", netloc_tuple)
|
||||
|
||||
|
||||
def _get_netloc(netloc: str) -> NetlocTuple:
|
||||
return split_auth_from_netloc(netloc)
|
||||
|
||||
|
||||
def _redact_netloc(netloc: str) -> Tuple[str]:
|
||||
return (redact_netloc(netloc),)
|
||||
|
||||
|
||||
def split_auth_netloc_from_url(
|
||||
url: str,
|
||||
) -> Tuple[str, str, Tuple[Optional[str], Optional[str]]]:
|
||||
"""
|
||||
Parse a url into separate netloc, auth, and url with no auth.
|
||||
|
||||
Returns: (url_without_auth, netloc, (username, password))
|
||||
"""
|
||||
url_without_auth, (netloc, auth) = _transform_url(url, _get_netloc)
|
||||
return url_without_auth, netloc, auth
|
||||
|
||||
|
||||
def remove_auth_from_url(url: str) -> str:
|
||||
"""Return a copy of url with 'username:password@' removed."""
|
||||
# username/pass params are passed to subversion through flags
|
||||
# and are not recognized in the url.
|
||||
return _transform_url(url, _get_netloc)[0]
|
||||
|
||||
|
||||
def redact_auth_from_url(url: str) -> str:
|
||||
"""Replace the password in a given url with ****."""
|
||||
return _transform_url(url, _redact_netloc)[0]
|
||||
|
||||
|
||||
class HiddenText:
|
||||
def __init__(self, secret: str, redacted: str) -> None:
|
||||
self.secret = secret
|
||||
self.redacted = redacted
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return "<HiddenText {!r}>".format(str(self))
|
||||
|
||||
def __str__(self) -> str:
|
||||
return self.redacted
|
||||
|
||||
# This is useful for testing.
|
||||
def __eq__(self, other: Any) -> bool:
|
||||
if type(self) != type(other):
|
||||
return False
|
||||
|
||||
# The string being used for redaction doesn't also have to match,
|
||||
# just the raw, original string.
|
||||
return self.secret == other.secret
|
||||
|
||||
|
||||
def hide_value(value: str) -> HiddenText:
|
||||
return HiddenText(value, redacted="****")
|
||||
|
||||
|
||||
def hide_url(url: str) -> HiddenText:
|
||||
redacted = redact_auth_from_url(url)
|
||||
return HiddenText(url, redacted=redacted)
|
||||
|
||||
|
||||
def protect_pip_from_modification_on_windows(modifying_pip: bool) -> None:
|
||||
"""Protection of pip.exe from modification on Windows
|
||||
|
||||
On Windows, any operation modifying pip should be run as:
|
||||
python -m pip ...
|
||||
"""
|
||||
pip_names = [
|
||||
"pip",
|
||||
f"pip{sys.version_info.major}",
|
||||
f"pip{sys.version_info.major}.{sys.version_info.minor}",
|
||||
]
|
||||
|
||||
# See https://github.com/pypa/pip/issues/1299 for more discussion
|
||||
should_show_use_python_msg = (
|
||||
modifying_pip and WINDOWS and os.path.basename(sys.argv[0]) in pip_names
|
||||
)
|
||||
|
||||
if should_show_use_python_msg:
|
||||
new_command = [sys.executable, "-m", "pip"] + sys.argv[1:]
|
||||
raise CommandError(
|
||||
"To modify pip, please run the following command:\n{}".format(
|
||||
" ".join(new_command)
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def check_externally_managed() -> None:
|
||||
"""Check whether the current environment is externally managed.
|
||||
|
||||
If the ``EXTERNALLY-MANAGED`` config file is found, the current environment
|
||||
is considered externally managed, and an ExternallyManagedEnvironment is
|
||||
raised.
|
||||
"""
|
||||
if running_under_virtualenv():
|
||||
return
|
||||
marker = os.path.join(sysconfig.get_path("stdlib"), "EXTERNALLY-MANAGED")
|
||||
if not os.path.isfile(marker):
|
||||
return
|
||||
raise ExternallyManagedEnvironment.from_config(marker)
|
||||
|
||||
|
||||
def is_console_interactive() -> bool:
|
||||
"""Is this console interactive?"""
|
||||
return sys.stdin is not None and sys.stdin.isatty()
|
||||
|
||||
|
||||
def hash_file(path: str, blocksize: int = 1 << 20) -> Tuple[Any, int]:
|
||||
"""Return (hash, length) for path using hashlib.sha256()"""
|
||||
|
||||
h = hashlib.sha256()
|
||||
length = 0
|
||||
with open(path, "rb") as f:
|
||||
for block in read_chunks(f, size=blocksize):
|
||||
length += len(block)
|
||||
h.update(block)
|
||||
return h, length
|
||||
|
||||
|
||||
def pairwise(iterable: Iterable[Any]) -> Iterator[Tuple[Any, Any]]:
|
||||
"""
|
||||
Return paired elements.
|
||||
|
||||
For example:
|
||||
s -> (s0, s1), (s2, s3), (s4, s5), ...
|
||||
"""
|
||||
iterable = iter(iterable)
|
||||
return zip_longest(iterable, iterable)
|
||||
|
||||
|
||||
def partition(
|
||||
pred: Callable[[T], bool],
|
||||
iterable: Iterable[T],
|
||||
) -> Tuple[Iterable[T], Iterable[T]]:
|
||||
"""
|
||||
Use a predicate to partition entries into false entries and true entries,
|
||||
like
|
||||
|
||||
partition(is_odd, range(10)) --> 0 2 4 6 8 and 1 3 5 7 9
|
||||
"""
|
||||
t1, t2 = tee(iterable)
|
||||
return filterfalse(pred, t1), filter(pred, t2)
|
||||
|
||||
|
||||
class ConfiguredBuildBackendHookCaller(BuildBackendHookCaller):
|
||||
def __init__(
|
||||
self,
|
||||
config_holder: Any,
|
||||
source_dir: str,
|
||||
build_backend: str,
|
||||
backend_path: Optional[str] = None,
|
||||
runner: Optional[Callable[..., None]] = None,
|
||||
python_executable: Optional[str] = None,
|
||||
):
|
||||
super().__init__(
|
||||
source_dir, build_backend, backend_path, runner, python_executable
|
||||
)
|
||||
self.config_holder = config_holder
|
||||
|
||||
def build_wheel(
|
||||
self,
|
||||
wheel_directory: str,
|
||||
config_settings: Optional[Dict[str, Union[str, List[str]]]] = None,
|
||||
metadata_directory: Optional[str] = None,
|
||||
) -> str:
|
||||
cs = self.config_holder.config_settings
|
||||
return super().build_wheel(
|
||||
wheel_directory, config_settings=cs, metadata_directory=metadata_directory
|
||||
)
|
||||
|
||||
def build_sdist(
|
||||
self,
|
||||
sdist_directory: str,
|
||||
config_settings: Optional[Dict[str, Union[str, List[str]]]] = None,
|
||||
) -> str:
|
||||
cs = self.config_holder.config_settings
|
||||
return super().build_sdist(sdist_directory, config_settings=cs)
|
||||
|
||||
def build_editable(
|
||||
self,
|
||||
wheel_directory: str,
|
||||
config_settings: Optional[Dict[str, Union[str, List[str]]]] = None,
|
||||
metadata_directory: Optional[str] = None,
|
||||
) -> str:
|
||||
cs = self.config_holder.config_settings
|
||||
return super().build_editable(
|
||||
wheel_directory, config_settings=cs, metadata_directory=metadata_directory
|
||||
)
|
||||
|
||||
def get_requires_for_build_wheel(
|
||||
self, config_settings: Optional[Dict[str, Union[str, List[str]]]] = None
|
||||
) -> List[str]:
|
||||
cs = self.config_holder.config_settings
|
||||
return super().get_requires_for_build_wheel(config_settings=cs)
|
||||
|
||||
def get_requires_for_build_sdist(
|
||||
self, config_settings: Optional[Dict[str, Union[str, List[str]]]] = None
|
||||
) -> List[str]:
|
||||
cs = self.config_holder.config_settings
|
||||
return super().get_requires_for_build_sdist(config_settings=cs)
|
||||
|
||||
def get_requires_for_build_editable(
|
||||
self, config_settings: Optional[Dict[str, Union[str, List[str]]]] = None
|
||||
) -> List[str]:
|
||||
cs = self.config_holder.config_settings
|
||||
return super().get_requires_for_build_editable(config_settings=cs)
|
||||
|
||||
def prepare_metadata_for_build_wheel(
|
||||
self,
|
||||
metadata_directory: str,
|
||||
config_settings: Optional[Dict[str, Union[str, List[str]]]] = None,
|
||||
_allow_fallback: bool = True,
|
||||
) -> str:
|
||||
cs = self.config_holder.config_settings
|
||||
return super().prepare_metadata_for_build_wheel(
|
||||
metadata_directory=metadata_directory,
|
||||
config_settings=cs,
|
||||
_allow_fallback=_allow_fallback,
|
||||
)
|
||||
|
||||
def prepare_metadata_for_build_editable(
|
||||
self,
|
||||
metadata_directory: str,
|
||||
config_settings: Optional[Dict[str, Union[str, List[str]]]] = None,
|
||||
_allow_fallback: bool = True,
|
||||
) -> str:
|
||||
cs = self.config_holder.config_settings
|
||||
return super().prepare_metadata_for_build_editable(
|
||||
metadata_directory=metadata_directory,
|
||||
config_settings=cs,
|
||||
_allow_fallback=_allow_fallback,
|
||||
)
|
|
@ -0,0 +1,39 @@
|
|||
"""Utilities for defining models
|
||||
"""
|
||||
|
||||
import operator
|
||||
from typing import Any, Callable, Type
|
||||
|
||||
|
||||
class KeyBasedCompareMixin:
|
||||
"""Provides comparison capabilities that is based on a key"""
|
||||
|
||||
__slots__ = ["_compare_key", "_defining_class"]
|
||||
|
||||
def __init__(self, key: Any, defining_class: Type["KeyBasedCompareMixin"]) -> None:
|
||||
self._compare_key = key
|
||||
self._defining_class = defining_class
|
||||
|
||||
def __hash__(self) -> int:
|
||||
return hash(self._compare_key)
|
||||
|
||||
def __lt__(self, other: Any) -> bool:
|
||||
return self._compare(other, operator.__lt__)
|
||||
|
||||
def __le__(self, other: Any) -> bool:
|
||||
return self._compare(other, operator.__le__)
|
||||
|
||||
def __gt__(self, other: Any) -> bool:
|
||||
return self._compare(other, operator.__gt__)
|
||||
|
||||
def __ge__(self, other: Any) -> bool:
|
||||
return self._compare(other, operator.__ge__)
|
||||
|
||||
def __eq__(self, other: Any) -> bool:
|
||||
return self._compare(other, operator.__eq__)
|
||||
|
||||
def _compare(self, other: Any, method: Callable[[Any, Any], bool]) -> bool:
|
||||
if not isinstance(other, self._defining_class):
|
||||
return NotImplemented
|
||||
|
||||
return method(self._compare_key, other._compare_key)
|
|
@ -0,0 +1,57 @@
|
|||
import functools
|
||||
import logging
|
||||
import re
|
||||
from typing import NewType, Optional, Tuple, cast
|
||||
|
||||
from pip._vendor.packaging import specifiers, version
|
||||
from pip._vendor.packaging.requirements import Requirement
|
||||
|
||||
NormalizedExtra = NewType("NormalizedExtra", str)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def check_requires_python(
|
||||
requires_python: Optional[str], version_info: Tuple[int, ...]
|
||||
) -> bool:
|
||||
"""
|
||||
Check if the given Python version matches a "Requires-Python" specifier.
|
||||
|
||||
:param version_info: A 3-tuple of ints representing a Python
|
||||
major-minor-micro version to check (e.g. `sys.version_info[:3]`).
|
||||
|
||||
:return: `True` if the given Python version satisfies the requirement.
|
||||
Otherwise, return `False`.
|
||||
|
||||
:raises InvalidSpecifier: If `requires_python` has an invalid format.
|
||||
"""
|
||||
if requires_python is None:
|
||||
# The package provides no information
|
||||
return True
|
||||
requires_python_specifier = specifiers.SpecifierSet(requires_python)
|
||||
|
||||
python_version = version.parse(".".join(map(str, version_info)))
|
||||
return python_version in requires_python_specifier
|
||||
|
||||
|
||||
@functools.lru_cache(maxsize=512)
|
||||
def get_requirement(req_string: str) -> Requirement:
|
||||
"""Construct a packaging.Requirement object with caching"""
|
||||
# Parsing requirement strings is expensive, and is also expected to happen
|
||||
# with a low diversity of different arguments (at least relative the number
|
||||
# constructed). This method adds a cache to requirement object creation to
|
||||
# minimize repeated parsing of the same string to construct equivalent
|
||||
# Requirement objects.
|
||||
return Requirement(req_string)
|
||||
|
||||
|
||||
def safe_extra(extra: str) -> NormalizedExtra:
|
||||
"""Convert an arbitrary string to a standard 'extra' name
|
||||
|
||||
Any runs of non-alphanumeric characters are replaced with a single '_',
|
||||
and the result is always lowercased.
|
||||
|
||||
This function is duplicated from ``pkg_resources``. Note that this is not
|
||||
the same to either ``canonicalize_name`` or ``_egg_link_name``.
|
||||
"""
|
||||
return cast(NormalizedExtra, re.sub("[^A-Za-z0-9.-]+", "_", extra).lower())
|
|
@ -0,0 +1,146 @@
|
|||
import sys
|
||||
import textwrap
|
||||
from typing import List, Optional, Sequence
|
||||
|
||||
# Shim to wrap setup.py invocation with setuptools
|
||||
# Note that __file__ is handled via two {!r} *and* %r, to ensure that paths on
|
||||
# Windows are correctly handled (it should be "C:\\Users" not "C:\Users").
|
||||
_SETUPTOOLS_SHIM = textwrap.dedent(
|
||||
"""
|
||||
exec(compile('''
|
||||
# This is <pip-setuptools-caller> -- a caller that pip uses to run setup.py
|
||||
#
|
||||
# - It imports setuptools before invoking setup.py, to enable projects that directly
|
||||
# import from `distutils.core` to work with newer packaging standards.
|
||||
# - It provides a clear error message when setuptools is not installed.
|
||||
# - It sets `sys.argv[0]` to the underlying `setup.py`, when invoking `setup.py` so
|
||||
# setuptools doesn't think the script is `-c`. This avoids the following warning:
|
||||
# manifest_maker: standard file '-c' not found".
|
||||
# - It generates a shim setup.py, for handling setup.cfg-only projects.
|
||||
import os, sys, tokenize
|
||||
|
||||
try:
|
||||
import setuptools
|
||||
except ImportError as error:
|
||||
print(
|
||||
"ERROR: Can not execute `setup.py` since setuptools is not available in "
|
||||
"the build environment.",
|
||||
file=sys.stderr,
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
__file__ = %r
|
||||
sys.argv[0] = __file__
|
||||
|
||||
if os.path.exists(__file__):
|
||||
filename = __file__
|
||||
with tokenize.open(__file__) as f:
|
||||
setup_py_code = f.read()
|
||||
else:
|
||||
filename = "<auto-generated setuptools caller>"
|
||||
setup_py_code = "from setuptools import setup; setup()"
|
||||
|
||||
exec(compile(setup_py_code, filename, "exec"))
|
||||
''' % ({!r},), "<pip-setuptools-caller>", "exec"))
|
||||
"""
|
||||
).rstrip()
|
||||
|
||||
|
||||
def make_setuptools_shim_args(
|
||||
setup_py_path: str,
|
||||
global_options: Optional[Sequence[str]] = None,
|
||||
no_user_config: bool = False,
|
||||
unbuffered_output: bool = False,
|
||||
) -> List[str]:
|
||||
"""
|
||||
Get setuptools command arguments with shim wrapped setup file invocation.
|
||||
|
||||
:param setup_py_path: The path to setup.py to be wrapped.
|
||||
:param global_options: Additional global options.
|
||||
:param no_user_config: If True, disables personal user configuration.
|
||||
:param unbuffered_output: If True, adds the unbuffered switch to the
|
||||
argument list.
|
||||
"""
|
||||
args = [sys.executable]
|
||||
if unbuffered_output:
|
||||
args += ["-u"]
|
||||
args += ["-c", _SETUPTOOLS_SHIM.format(setup_py_path)]
|
||||
if global_options:
|
||||
args += global_options
|
||||
if no_user_config:
|
||||
args += ["--no-user-cfg"]
|
||||
return args
|
||||
|
||||
|
||||
def make_setuptools_bdist_wheel_args(
|
||||
setup_py_path: str,
|
||||
global_options: Sequence[str],
|
||||
build_options: Sequence[str],
|
||||
destination_dir: str,
|
||||
) -> List[str]:
|
||||
# NOTE: Eventually, we'd want to also -S to the flags here, when we're
|
||||
# isolating. Currently, it breaks Python in virtualenvs, because it
|
||||
# relies on site.py to find parts of the standard library outside the
|
||||
# virtualenv.
|
||||
args = make_setuptools_shim_args(
|
||||
setup_py_path, global_options=global_options, unbuffered_output=True
|
||||
)
|
||||
args += ["bdist_wheel", "-d", destination_dir]
|
||||
args += build_options
|
||||
return args
|
||||
|
||||
|
||||
def make_setuptools_clean_args(
|
||||
setup_py_path: str,
|
||||
global_options: Sequence[str],
|
||||
) -> List[str]:
|
||||
args = make_setuptools_shim_args(
|
||||
setup_py_path, global_options=global_options, unbuffered_output=True
|
||||
)
|
||||
args += ["clean", "--all"]
|
||||
return args
|
||||
|
||||
|
||||
def make_setuptools_develop_args(
|
||||
setup_py_path: str,
|
||||
*,
|
||||
global_options: Sequence[str],
|
||||
no_user_config: bool,
|
||||
prefix: Optional[str],
|
||||
home: Optional[str],
|
||||
use_user_site: bool,
|
||||
) -> List[str]:
|
||||
assert not (use_user_site and prefix)
|
||||
|
||||
args = make_setuptools_shim_args(
|
||||
setup_py_path,
|
||||
global_options=global_options,
|
||||
no_user_config=no_user_config,
|
||||
)
|
||||
|
||||
args += ["develop", "--no-deps"]
|
||||
|
||||
if prefix:
|
||||
args += ["--prefix", prefix]
|
||||
if home is not None:
|
||||
args += ["--install-dir", home]
|
||||
|
||||
if use_user_site:
|
||||
args += ["--user", "--prefix="]
|
||||
|
||||
return args
|
||||
|
||||
|
||||
def make_setuptools_egg_info_args(
|
||||
setup_py_path: str,
|
||||
egg_info_dir: Optional[str],
|
||||
no_user_config: bool,
|
||||
) -> List[str]:
|
||||
args = make_setuptools_shim_args(setup_py_path, no_user_config=no_user_config)
|
||||
|
||||
args += ["egg_info"]
|
||||
|
||||
if egg_info_dir:
|
||||
args += ["--egg-base", egg_info_dir]
|
||||
|
||||
return args
|
|
@ -0,0 +1,260 @@
|
|||
import logging
|
||||
import os
|
||||
import shlex
|
||||
import subprocess
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
Callable,
|
||||
Iterable,
|
||||
List,
|
||||
Mapping,
|
||||
Optional,
|
||||
Union,
|
||||
)
|
||||
|
||||
from pip._vendor.rich.markup import escape
|
||||
|
||||
from pip._internal.cli.spinners import SpinnerInterface, open_spinner
|
||||
from pip._internal.exceptions import InstallationSubprocessError
|
||||
from pip._internal.utils.logging import VERBOSE, subprocess_logger
|
||||
from pip._internal.utils.misc import HiddenText
|
||||
|
||||
if TYPE_CHECKING:
|
||||
# Literal was introduced in Python 3.8.
|
||||
#
|
||||
# TODO: Remove `if TYPE_CHECKING` when dropping support for Python 3.7.
|
||||
from typing import Literal
|
||||
|
||||
CommandArgs = List[Union[str, HiddenText]]
|
||||
|
||||
|
||||
def make_command(*args: Union[str, HiddenText, CommandArgs]) -> CommandArgs:
|
||||
"""
|
||||
Create a CommandArgs object.
|
||||
"""
|
||||
command_args: CommandArgs = []
|
||||
for arg in args:
|
||||
# Check for list instead of CommandArgs since CommandArgs is
|
||||
# only known during type-checking.
|
||||
if isinstance(arg, list):
|
||||
command_args.extend(arg)
|
||||
else:
|
||||
# Otherwise, arg is str or HiddenText.
|
||||
command_args.append(arg)
|
||||
|
||||
return command_args
|
||||
|
||||
|
||||
def format_command_args(args: Union[List[str], CommandArgs]) -> str:
|
||||
"""
|
||||
Format command arguments for display.
|
||||
"""
|
||||
# For HiddenText arguments, display the redacted form by calling str().
|
||||
# Also, we don't apply str() to arguments that aren't HiddenText since
|
||||
# this can trigger a UnicodeDecodeError in Python 2 if the argument
|
||||
# has type unicode and includes a non-ascii character. (The type
|
||||
# checker doesn't ensure the annotations are correct in all cases.)
|
||||
return " ".join(
|
||||
shlex.quote(str(arg)) if isinstance(arg, HiddenText) else shlex.quote(arg)
|
||||
for arg in args
|
||||
)
|
||||
|
||||
|
||||
def reveal_command_args(args: Union[List[str], CommandArgs]) -> List[str]:
|
||||
"""
|
||||
Return the arguments in their raw, unredacted form.
|
||||
"""
|
||||
return [arg.secret if isinstance(arg, HiddenText) else arg for arg in args]
|
||||
|
||||
|
||||
def call_subprocess(
|
||||
cmd: Union[List[str], CommandArgs],
|
||||
show_stdout: bool = False,
|
||||
cwd: Optional[str] = None,
|
||||
on_returncode: 'Literal["raise", "warn", "ignore"]' = "raise",
|
||||
extra_ok_returncodes: Optional[Iterable[int]] = None,
|
||||
extra_environ: Optional[Mapping[str, Any]] = None,
|
||||
unset_environ: Optional[Iterable[str]] = None,
|
||||
spinner: Optional[SpinnerInterface] = None,
|
||||
log_failed_cmd: Optional[bool] = True,
|
||||
stdout_only: Optional[bool] = False,
|
||||
*,
|
||||
command_desc: str,
|
||||
) -> str:
|
||||
"""
|
||||
Args:
|
||||
show_stdout: if true, use INFO to log the subprocess's stderr and
|
||||
stdout streams. Otherwise, use DEBUG. Defaults to False.
|
||||
extra_ok_returncodes: an iterable of integer return codes that are
|
||||
acceptable, in addition to 0. Defaults to None, which means [].
|
||||
unset_environ: an iterable of environment variable names to unset
|
||||
prior to calling subprocess.Popen().
|
||||
log_failed_cmd: if false, failed commands are not logged, only raised.
|
||||
stdout_only: if true, return only stdout, else return both. When true,
|
||||
logging of both stdout and stderr occurs when the subprocess has
|
||||
terminated, else logging occurs as subprocess output is produced.
|
||||
"""
|
||||
if extra_ok_returncodes is None:
|
||||
extra_ok_returncodes = []
|
||||
if unset_environ is None:
|
||||
unset_environ = []
|
||||
# Most places in pip use show_stdout=False. What this means is--
|
||||
#
|
||||
# - We connect the child's output (combined stderr and stdout) to a
|
||||
# single pipe, which we read.
|
||||
# - We log this output to stderr at DEBUG level as it is received.
|
||||
# - If DEBUG logging isn't enabled (e.g. if --verbose logging wasn't
|
||||
# requested), then we show a spinner so the user can still see the
|
||||
# subprocess is in progress.
|
||||
# - If the subprocess exits with an error, we log the output to stderr
|
||||
# at ERROR level if it hasn't already been displayed to the console
|
||||
# (e.g. if --verbose logging wasn't enabled). This way we don't log
|
||||
# the output to the console twice.
|
||||
#
|
||||
# If show_stdout=True, then the above is still done, but with DEBUG
|
||||
# replaced by INFO.
|
||||
if show_stdout:
|
||||
# Then log the subprocess output at INFO level.
|
||||
log_subprocess: Callable[..., None] = subprocess_logger.info
|
||||
used_level = logging.INFO
|
||||
else:
|
||||
# Then log the subprocess output using VERBOSE. This also ensures
|
||||
# it will be logged to the log file (aka user_log), if enabled.
|
||||
log_subprocess = subprocess_logger.verbose
|
||||
used_level = VERBOSE
|
||||
|
||||
# Whether the subprocess will be visible in the console.
|
||||
showing_subprocess = subprocess_logger.getEffectiveLevel() <= used_level
|
||||
|
||||
# Only use the spinner if we're not showing the subprocess output
|
||||
# and we have a spinner.
|
||||
use_spinner = not showing_subprocess and spinner is not None
|
||||
|
||||
log_subprocess("Running command %s", command_desc)
|
||||
env = os.environ.copy()
|
||||
if extra_environ:
|
||||
env.update(extra_environ)
|
||||
for name in unset_environ:
|
||||
env.pop(name, None)
|
||||
try:
|
||||
proc = subprocess.Popen(
|
||||
# Convert HiddenText objects to the underlying str.
|
||||
reveal_command_args(cmd),
|
||||
stdin=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT if not stdout_only else subprocess.PIPE,
|
||||
cwd=cwd,
|
||||
env=env,
|
||||
errors="backslashreplace",
|
||||
)
|
||||
except Exception as exc:
|
||||
if log_failed_cmd:
|
||||
subprocess_logger.critical(
|
||||
"Error %s while executing command %s",
|
||||
exc,
|
||||
command_desc,
|
||||
)
|
||||
raise
|
||||
all_output = []
|
||||
if not stdout_only:
|
||||
assert proc.stdout
|
||||
assert proc.stdin
|
||||
proc.stdin.close()
|
||||
# In this mode, stdout and stderr are in the same pipe.
|
||||
while True:
|
||||
line: str = proc.stdout.readline()
|
||||
if not line:
|
||||
break
|
||||
line = line.rstrip()
|
||||
all_output.append(line + "\n")
|
||||
|
||||
# Show the line immediately.
|
||||
log_subprocess(line)
|
||||
# Update the spinner.
|
||||
if use_spinner:
|
||||
assert spinner
|
||||
spinner.spin()
|
||||
try:
|
||||
proc.wait()
|
||||
finally:
|
||||
if proc.stdout:
|
||||
proc.stdout.close()
|
||||
output = "".join(all_output)
|
||||
else:
|
||||
# In this mode, stdout and stderr are in different pipes.
|
||||
# We must use communicate() which is the only safe way to read both.
|
||||
out, err = proc.communicate()
|
||||
# log line by line to preserve pip log indenting
|
||||
for out_line in out.splitlines():
|
||||
log_subprocess(out_line)
|
||||
all_output.append(out)
|
||||
for err_line in err.splitlines():
|
||||
log_subprocess(err_line)
|
||||
all_output.append(err)
|
||||
output = out
|
||||
|
||||
proc_had_error = proc.returncode and proc.returncode not in extra_ok_returncodes
|
||||
if use_spinner:
|
||||
assert spinner
|
||||
if proc_had_error:
|
||||
spinner.finish("error")
|
||||
else:
|
||||
spinner.finish("done")
|
||||
if proc_had_error:
|
||||
if on_returncode == "raise":
|
||||
error = InstallationSubprocessError(
|
||||
command_description=command_desc,
|
||||
exit_code=proc.returncode,
|
||||
output_lines=all_output if not showing_subprocess else None,
|
||||
)
|
||||
if log_failed_cmd:
|
||||
subprocess_logger.error("[present-rich] %s", error)
|
||||
subprocess_logger.verbose(
|
||||
"[bold magenta]full command[/]: [blue]%s[/]",
|
||||
escape(format_command_args(cmd)),
|
||||
extra={"markup": True},
|
||||
)
|
||||
subprocess_logger.verbose(
|
||||
"[bold magenta]cwd[/]: %s",
|
||||
escape(cwd or "[inherit]"),
|
||||
extra={"markup": True},
|
||||
)
|
||||
|
||||
raise error
|
||||
elif on_returncode == "warn":
|
||||
subprocess_logger.warning(
|
||||
'Command "%s" had error code %s in %s',
|
||||
command_desc,
|
||||
proc.returncode,
|
||||
cwd,
|
||||
)
|
||||
elif on_returncode == "ignore":
|
||||
pass
|
||||
else:
|
||||
raise ValueError(f"Invalid value: on_returncode={on_returncode!r}")
|
||||
return output
|
||||
|
||||
|
||||
def runner_with_spinner_message(message: str) -> Callable[..., None]:
|
||||
"""Provide a subprocess_runner that shows a spinner message.
|
||||
|
||||
Intended for use with for BuildBackendHookCaller. Thus, the runner has
|
||||
an API that matches what's expected by BuildBackendHookCaller.subprocess_runner.
|
||||
"""
|
||||
|
||||
def runner(
|
||||
cmd: List[str],
|
||||
cwd: Optional[str] = None,
|
||||
extra_environ: Optional[Mapping[str, Any]] = None,
|
||||
) -> None:
|
||||
with open_spinner(message) as spinner:
|
||||
call_subprocess(
|
||||
cmd,
|
||||
command_desc=message,
|
||||
cwd=cwd,
|
||||
extra_environ=extra_environ,
|
||||
spinner=spinner,
|
||||
)
|
||||
|
||||
return runner
|
|
@ -0,0 +1,246 @@
|
|||
import errno
|
||||
import itertools
|
||||
import logging
|
||||
import os.path
|
||||
import tempfile
|
||||
from contextlib import ExitStack, contextmanager
|
||||
from typing import Any, Dict, Generator, Optional, TypeVar, Union
|
||||
|
||||
from pip._internal.utils.misc import enum, rmtree
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_T = TypeVar("_T", bound="TempDirectory")
|
||||
|
||||
|
||||
# Kinds of temporary directories. Only needed for ones that are
|
||||
# globally-managed.
|
||||
tempdir_kinds = enum(
|
||||
BUILD_ENV="build-env",
|
||||
EPHEM_WHEEL_CACHE="ephem-wheel-cache",
|
||||
REQ_BUILD="req-build",
|
||||
)
|
||||
|
||||
|
||||
_tempdir_manager: Optional[ExitStack] = None
|
||||
|
||||
|
||||
@contextmanager
|
||||
def global_tempdir_manager() -> Generator[None, None, None]:
|
||||
global _tempdir_manager
|
||||
with ExitStack() as stack:
|
||||
old_tempdir_manager, _tempdir_manager = _tempdir_manager, stack
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
_tempdir_manager = old_tempdir_manager
|
||||
|
||||
|
||||
class TempDirectoryTypeRegistry:
|
||||
"""Manages temp directory behavior"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
self._should_delete: Dict[str, bool] = {}
|
||||
|
||||
def set_delete(self, kind: str, value: bool) -> None:
|
||||
"""Indicate whether a TempDirectory of the given kind should be
|
||||
auto-deleted.
|
||||
"""
|
||||
self._should_delete[kind] = value
|
||||
|
||||
def get_delete(self, kind: str) -> bool:
|
||||
"""Get configured auto-delete flag for a given TempDirectory type,
|
||||
default True.
|
||||
"""
|
||||
return self._should_delete.get(kind, True)
|
||||
|
||||
|
||||
_tempdir_registry: Optional[TempDirectoryTypeRegistry] = None
|
||||
|
||||
|
||||
@contextmanager
|
||||
def tempdir_registry() -> Generator[TempDirectoryTypeRegistry, None, None]:
|
||||
"""Provides a scoped global tempdir registry that can be used to dictate
|
||||
whether directories should be deleted.
|
||||
"""
|
||||
global _tempdir_registry
|
||||
old_tempdir_registry = _tempdir_registry
|
||||
_tempdir_registry = TempDirectoryTypeRegistry()
|
||||
try:
|
||||
yield _tempdir_registry
|
||||
finally:
|
||||
_tempdir_registry = old_tempdir_registry
|
||||
|
||||
|
||||
class _Default:
|
||||
pass
|
||||
|
||||
|
||||
_default = _Default()
|
||||
|
||||
|
||||
class TempDirectory:
|
||||
"""Helper class that owns and cleans up a temporary directory.
|
||||
|
||||
This class can be used as a context manager or as an OO representation of a
|
||||
temporary directory.
|
||||
|
||||
Attributes:
|
||||
path
|
||||
Location to the created temporary directory
|
||||
delete
|
||||
Whether the directory should be deleted when exiting
|
||||
(when used as a contextmanager)
|
||||
|
||||
Methods:
|
||||
cleanup()
|
||||
Deletes the temporary directory
|
||||
|
||||
When used as a context manager, if the delete attribute is True, on
|
||||
exiting the context the temporary directory is deleted.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
path: Optional[str] = None,
|
||||
delete: Union[bool, None, _Default] = _default,
|
||||
kind: str = "temp",
|
||||
globally_managed: bool = False,
|
||||
):
|
||||
super().__init__()
|
||||
|
||||
if delete is _default:
|
||||
if path is not None:
|
||||
# If we were given an explicit directory, resolve delete option
|
||||
# now.
|
||||
delete = False
|
||||
else:
|
||||
# Otherwise, we wait until cleanup and see what
|
||||
# tempdir_registry says.
|
||||
delete = None
|
||||
|
||||
# The only time we specify path is in for editables where it
|
||||
# is the value of the --src option.
|
||||
if path is None:
|
||||
path = self._create(kind)
|
||||
|
||||
self._path = path
|
||||
self._deleted = False
|
||||
self.delete = delete
|
||||
self.kind = kind
|
||||
|
||||
if globally_managed:
|
||||
assert _tempdir_manager is not None
|
||||
_tempdir_manager.enter_context(self)
|
||||
|
||||
@property
|
||||
def path(self) -> str:
|
||||
assert not self._deleted, f"Attempted to access deleted path: {self._path}"
|
||||
return self._path
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<{self.__class__.__name__} {self.path!r}>"
|
||||
|
||||
def __enter__(self: _T) -> _T:
|
||||
return self
|
||||
|
||||
def __exit__(self, exc: Any, value: Any, tb: Any) -> None:
|
||||
if self.delete is not None:
|
||||
delete = self.delete
|
||||
elif _tempdir_registry:
|
||||
delete = _tempdir_registry.get_delete(self.kind)
|
||||
else:
|
||||
delete = True
|
||||
|
||||
if delete:
|
||||
self.cleanup()
|
||||
|
||||
def _create(self, kind: str) -> str:
|
||||
"""Create a temporary directory and store its path in self.path"""
|
||||
# We realpath here because some systems have their default tmpdir
|
||||
# symlinked to another directory. This tends to confuse build
|
||||
# scripts, so we canonicalize the path by traversing potential
|
||||
# symlinks here.
|
||||
path = os.path.realpath(tempfile.mkdtemp(prefix=f"pip-{kind}-"))
|
||||
logger.debug("Created temporary directory: %s", path)
|
||||
return path
|
||||
|
||||
def cleanup(self) -> None:
|
||||
"""Remove the temporary directory created and reset state"""
|
||||
self._deleted = True
|
||||
if not os.path.exists(self._path):
|
||||
return
|
||||
rmtree(self._path)
|
||||
|
||||
|
||||
class AdjacentTempDirectory(TempDirectory):
|
||||
"""Helper class that creates a temporary directory adjacent to a real one.
|
||||
|
||||
Attributes:
|
||||
original
|
||||
The original directory to create a temp directory for.
|
||||
path
|
||||
After calling create() or entering, contains the full
|
||||
path to the temporary directory.
|
||||
delete
|
||||
Whether the directory should be deleted when exiting
|
||||
(when used as a contextmanager)
|
||||
|
||||
"""
|
||||
|
||||
# The characters that may be used to name the temp directory
|
||||
# We always prepend a ~ and then rotate through these until
|
||||
# a usable name is found.
|
||||
# pkg_resources raises a different error for .dist-info folder
|
||||
# with leading '-' and invalid metadata
|
||||
LEADING_CHARS = "-~.=%0123456789"
|
||||
|
||||
def __init__(self, original: str, delete: Optional[bool] = None) -> None:
|
||||
self.original = original.rstrip("/\\")
|
||||
super().__init__(delete=delete)
|
||||
|
||||
@classmethod
|
||||
def _generate_names(cls, name: str) -> Generator[str, None, None]:
|
||||
"""Generates a series of temporary names.
|
||||
|
||||
The algorithm replaces the leading characters in the name
|
||||
with ones that are valid filesystem characters, but are not
|
||||
valid package names (for both Python and pip definitions of
|
||||
package).
|
||||
"""
|
||||
for i in range(1, len(name)):
|
||||
for candidate in itertools.combinations_with_replacement(
|
||||
cls.LEADING_CHARS, i - 1
|
||||
):
|
||||
new_name = "~" + "".join(candidate) + name[i:]
|
||||
if new_name != name:
|
||||
yield new_name
|
||||
|
||||
# If we make it this far, we will have to make a longer name
|
||||
for i in range(len(cls.LEADING_CHARS)):
|
||||
for candidate in itertools.combinations_with_replacement(
|
||||
cls.LEADING_CHARS, i
|
||||
):
|
||||
new_name = "~" + "".join(candidate) + name
|
||||
if new_name != name:
|
||||
yield new_name
|
||||
|
||||
def _create(self, kind: str) -> str:
|
||||
root, name = os.path.split(self.original)
|
||||
for candidate in self._generate_names(name):
|
||||
path = os.path.join(root, candidate)
|
||||
try:
|
||||
os.mkdir(path)
|
||||
except OSError as ex:
|
||||
# Continue if the name exists already
|
||||
if ex.errno != errno.EEXIST:
|
||||
raise
|
||||
else:
|
||||
path = os.path.realpath(path)
|
||||
break
|
||||
else:
|
||||
# Final fallback on the default behavior.
|
||||
path = os.path.realpath(tempfile.mkdtemp(prefix=f"pip-{kind}-"))
|
||||
|
||||
logger.debug("Created temporary directory: %s", path)
|
||||
return path
|
|
@ -0,0 +1,257 @@
|
|||
"""Utilities related archives.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
import stat
|
||||
import tarfile
|
||||
import zipfile
|
||||
from typing import Iterable, List, Optional
|
||||
from zipfile import ZipInfo
|
||||
|
||||
from pip._internal.exceptions import InstallationError
|
||||
from pip._internal.utils.filetypes import (
|
||||
BZ2_EXTENSIONS,
|
||||
TAR_EXTENSIONS,
|
||||
XZ_EXTENSIONS,
|
||||
ZIP_EXTENSIONS,
|
||||
)
|
||||
from pip._internal.utils.misc import ensure_dir
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
SUPPORTED_EXTENSIONS = ZIP_EXTENSIONS + TAR_EXTENSIONS
|
||||
|
||||
try:
|
||||
import bz2 # noqa
|
||||
|
||||
SUPPORTED_EXTENSIONS += BZ2_EXTENSIONS
|
||||
except ImportError:
|
||||
logger.debug("bz2 module is not available")
|
||||
|
||||
try:
|
||||
# Only for Python 3.3+
|
||||
import lzma # noqa
|
||||
|
||||
SUPPORTED_EXTENSIONS += XZ_EXTENSIONS
|
||||
except ImportError:
|
||||
logger.debug("lzma module is not available")
|
||||
|
||||
|
||||
def current_umask() -> int:
|
||||
"""Get the current umask which involves having to set it temporarily."""
|
||||
mask = os.umask(0)
|
||||
os.umask(mask)
|
||||
return mask
|
||||
|
||||
|
||||
def split_leading_dir(path: str) -> List[str]:
|
||||
path = path.lstrip("/").lstrip("\\")
|
||||
if "/" in path and (
|
||||
("\\" in path and path.find("/") < path.find("\\")) or "\\" not in path
|
||||
):
|
||||
return path.split("/", 1)
|
||||
elif "\\" in path:
|
||||
return path.split("\\", 1)
|
||||
else:
|
||||
return [path, ""]
|
||||
|
||||
|
||||
def has_leading_dir(paths: Iterable[str]) -> bool:
|
||||
"""Returns true if all the paths have the same leading path name
|
||||
(i.e., everything is in one subdirectory in an archive)"""
|
||||
common_prefix = None
|
||||
for path in paths:
|
||||
prefix, rest = split_leading_dir(path)
|
||||
if not prefix:
|
||||
return False
|
||||
elif common_prefix is None:
|
||||
common_prefix = prefix
|
||||
elif prefix != common_prefix:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def is_within_directory(directory: str, target: str) -> bool:
|
||||
"""
|
||||
Return true if the absolute path of target is within the directory
|
||||
"""
|
||||
abs_directory = os.path.abspath(directory)
|
||||
abs_target = os.path.abspath(target)
|
||||
|
||||
prefix = os.path.commonprefix([abs_directory, abs_target])
|
||||
return prefix == abs_directory
|
||||
|
||||
|
||||
def set_extracted_file_to_default_mode_plus_executable(path: str) -> None:
|
||||
"""
|
||||
Make file present at path have execute for user/group/world
|
||||
(chmod +x) is no-op on windows per python docs
|
||||
"""
|
||||
os.chmod(path, (0o777 & ~current_umask() | 0o111))
|
||||
|
||||
|
||||
def zip_item_is_executable(info: ZipInfo) -> bool:
|
||||
mode = info.external_attr >> 16
|
||||
# if mode and regular file and any execute permissions for
|
||||
# user/group/world?
|
||||
return bool(mode and stat.S_ISREG(mode) and mode & 0o111)
|
||||
|
||||
|
||||
def unzip_file(filename: str, location: str, flatten: bool = True) -> None:
|
||||
"""
|
||||
Unzip the file (with path `filename`) to the destination `location`. All
|
||||
files are written based on system defaults and umask (i.e. permissions are
|
||||
not preserved), except that regular file members with any execute
|
||||
permissions (user, group, or world) have "chmod +x" applied after being
|
||||
written. Note that for windows, any execute changes using os.chmod are
|
||||
no-ops per the python docs.
|
||||
"""
|
||||
ensure_dir(location)
|
||||
zipfp = open(filename, "rb")
|
||||
try:
|
||||
zip = zipfile.ZipFile(zipfp, allowZip64=True)
|
||||
leading = has_leading_dir(zip.namelist()) and flatten
|
||||
for info in zip.infolist():
|
||||
name = info.filename
|
||||
fn = name
|
||||
if leading:
|
||||
fn = split_leading_dir(name)[1]
|
||||
fn = os.path.join(location, fn)
|
||||
dir = os.path.dirname(fn)
|
||||
if not is_within_directory(location, fn):
|
||||
message = (
|
||||
"The zip file ({}) has a file ({}) trying to install "
|
||||
"outside target directory ({})"
|
||||
)
|
||||
raise InstallationError(message.format(filename, fn, location))
|
||||
if fn.endswith("/") or fn.endswith("\\"):
|
||||
# A directory
|
||||
ensure_dir(fn)
|
||||
else:
|
||||
ensure_dir(dir)
|
||||
# Don't use read() to avoid allocating an arbitrarily large
|
||||
# chunk of memory for the file's content
|
||||
fp = zip.open(name)
|
||||
try:
|
||||
with open(fn, "wb") as destfp:
|
||||
shutil.copyfileobj(fp, destfp)
|
||||
finally:
|
||||
fp.close()
|
||||
if zip_item_is_executable(info):
|
||||
set_extracted_file_to_default_mode_plus_executable(fn)
|
||||
finally:
|
||||
zipfp.close()
|
||||
|
||||
|
||||
def untar_file(filename: str, location: str) -> None:
|
||||
"""
|
||||
Untar the file (with path `filename`) to the destination `location`.
|
||||
All files are written based on system defaults and umask (i.e. permissions
|
||||
are not preserved), except that regular file members with any execute
|
||||
permissions (user, group, or world) have "chmod +x" applied after being
|
||||
written. Note that for windows, any execute changes using os.chmod are
|
||||
no-ops per the python docs.
|
||||
"""
|
||||
ensure_dir(location)
|
||||
if filename.lower().endswith(".gz") or filename.lower().endswith(".tgz"):
|
||||
mode = "r:gz"
|
||||
elif filename.lower().endswith(BZ2_EXTENSIONS):
|
||||
mode = "r:bz2"
|
||||
elif filename.lower().endswith(XZ_EXTENSIONS):
|
||||
mode = "r:xz"
|
||||
elif filename.lower().endswith(".tar"):
|
||||
mode = "r"
|
||||
else:
|
||||
logger.warning(
|
||||
"Cannot determine compression type for file %s",
|
||||
filename,
|
||||
)
|
||||
mode = "r:*"
|
||||
tar = tarfile.open(filename, mode, encoding="utf-8")
|
||||
try:
|
||||
leading = has_leading_dir([member.name for member in tar.getmembers()])
|
||||
for member in tar.getmembers():
|
||||
fn = member.name
|
||||
if leading:
|
||||
fn = split_leading_dir(fn)[1]
|
||||
path = os.path.join(location, fn)
|
||||
if not is_within_directory(location, path):
|
||||
message = (
|
||||
"The tar file ({}) has a file ({}) trying to install "
|
||||
"outside target directory ({})"
|
||||
)
|
||||
raise InstallationError(message.format(filename, path, location))
|
||||
if member.isdir():
|
||||
ensure_dir(path)
|
||||
elif member.issym():
|
||||
try:
|
||||
tar._extract_member(member, path)
|
||||
except Exception as exc:
|
||||
# Some corrupt tar files seem to produce this
|
||||
# (specifically bad symlinks)
|
||||
logger.warning(
|
||||
"In the tar file %s the member %s is invalid: %s",
|
||||
filename,
|
||||
member.name,
|
||||
exc,
|
||||
)
|
||||
continue
|
||||
else:
|
||||
try:
|
||||
fp = tar.extractfile(member)
|
||||
except (KeyError, AttributeError) as exc:
|
||||
# Some corrupt tar files seem to produce this
|
||||
# (specifically bad symlinks)
|
||||
logger.warning(
|
||||
"In the tar file %s the member %s is invalid: %s",
|
||||
filename,
|
||||
member.name,
|
||||
exc,
|
||||
)
|
||||
continue
|
||||
ensure_dir(os.path.dirname(path))
|
||||
assert fp is not None
|
||||
with open(path, "wb") as destfp:
|
||||
shutil.copyfileobj(fp, destfp)
|
||||
fp.close()
|
||||
# Update the timestamp (useful for cython compiled files)
|
||||
tar.utime(member, path)
|
||||
# member have any execute permissions for user/group/world?
|
||||
if member.mode & 0o111:
|
||||
set_extracted_file_to_default_mode_plus_executable(path)
|
||||
finally:
|
||||
tar.close()
|
||||
|
||||
|
||||
def unpack_file(
|
||||
filename: str,
|
||||
location: str,
|
||||
content_type: Optional[str] = None,
|
||||
) -> None:
|
||||
filename = os.path.realpath(filename)
|
||||
if (
|
||||
content_type == "application/zip"
|
||||
or filename.lower().endswith(ZIP_EXTENSIONS)
|
||||
or zipfile.is_zipfile(filename)
|
||||
):
|
||||
unzip_file(filename, location, flatten=not filename.endswith(".whl"))
|
||||
elif (
|
||||
content_type == "application/x-gzip"
|
||||
or tarfile.is_tarfile(filename)
|
||||
or filename.lower().endswith(TAR_EXTENSIONS + BZ2_EXTENSIONS + XZ_EXTENSIONS)
|
||||
):
|
||||
untar_file(filename, location)
|
||||
else:
|
||||
# FIXME: handle?
|
||||
# FIXME: magic signatures?
|
||||
logger.critical(
|
||||
"Cannot unpack file %s (downloaded from %s, content-type: %s); "
|
||||
"cannot detect archive format",
|
||||
filename,
|
||||
location,
|
||||
content_type,
|
||||
)
|
||||
raise InstallationError(f"Cannot determine archive format of {location}")
|
|
@ -0,0 +1,62 @@
|
|||
import os
|
||||
import string
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
from typing import Optional
|
||||
|
||||
from .compat import WINDOWS
|
||||
|
||||
|
||||
def get_url_scheme(url: str) -> Optional[str]:
|
||||
if ":" not in url:
|
||||
return None
|
||||
return url.split(":", 1)[0].lower()
|
||||
|
||||
|
||||
def path_to_url(path: str) -> str:
|
||||
"""
|
||||
Convert a path to a file: URL. The path will be made absolute and have
|
||||
quoted path parts.
|
||||
"""
|
||||
path = os.path.normpath(os.path.abspath(path))
|
||||
url = urllib.parse.urljoin("file:", urllib.request.pathname2url(path))
|
||||
return url
|
||||
|
||||
|
||||
def url_to_path(url: str) -> str:
|
||||
"""
|
||||
Convert a file: URL to a path.
|
||||
"""
|
||||
assert url.startswith(
|
||||
"file:"
|
||||
), f"You can only turn file: urls into filenames (not {url!r})"
|
||||
|
||||
_, netloc, path, _, _ = urllib.parse.urlsplit(url)
|
||||
|
||||
if not netloc or netloc == "localhost":
|
||||
# According to RFC 8089, same as empty authority.
|
||||
netloc = ""
|
||||
elif WINDOWS:
|
||||
# If we have a UNC path, prepend UNC share notation.
|
||||
netloc = "\\\\" + netloc
|
||||
else:
|
||||
raise ValueError(
|
||||
f"non-local file URIs are not supported on this platform: {url!r}"
|
||||
)
|
||||
|
||||
path = urllib.request.url2pathname(netloc + path)
|
||||
|
||||
# On Windows, urlsplit parses the path as something like "/C:/Users/foo".
|
||||
# This creates issues for path-related functions like io.open(), so we try
|
||||
# to detect and strip the leading slash.
|
||||
if (
|
||||
WINDOWS
|
||||
and not netloc # Not UNC.
|
||||
and len(path) >= 3
|
||||
and path[0] == "/" # Leading slash to strip.
|
||||
and path[1] in string.ascii_letters # Drive letter.
|
||||
and path[2:4] in (":", ":/") # Colon + end of string, or colon + absolute path.
|
||||
):
|
||||
path = path[1:]
|
||||
|
||||
return path
|
|
@ -0,0 +1,104 @@
|
|||
import logging
|
||||
import os
|
||||
import re
|
||||
import site
|
||||
import sys
|
||||
from typing import List, Optional
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
_INCLUDE_SYSTEM_SITE_PACKAGES_REGEX = re.compile(
|
||||
r"include-system-site-packages\s*=\s*(?P<value>true|false)"
|
||||
)
|
||||
|
||||
|
||||
def _running_under_venv() -> bool:
|
||||
"""Checks if sys.base_prefix and sys.prefix match.
|
||||
|
||||
This handles PEP 405 compliant virtual environments.
|
||||
"""
|
||||
return sys.prefix != getattr(sys, "base_prefix", sys.prefix)
|
||||
|
||||
|
||||
def _running_under_legacy_virtualenv() -> bool:
|
||||
"""Checks if sys.real_prefix is set.
|
||||
|
||||
This handles virtual environments created with pypa's virtualenv.
|
||||
"""
|
||||
# pypa/virtualenv case
|
||||
return hasattr(sys, "real_prefix")
|
||||
|
||||
|
||||
def running_under_virtualenv() -> bool:
|
||||
"""True if we're running inside a virtual environment, False otherwise."""
|
||||
return _running_under_venv() or _running_under_legacy_virtualenv()
|
||||
|
||||
|
||||
def _get_pyvenv_cfg_lines() -> Optional[List[str]]:
|
||||
"""Reads {sys.prefix}/pyvenv.cfg and returns its contents as list of lines
|
||||
|
||||
Returns None, if it could not read/access the file.
|
||||
"""
|
||||
pyvenv_cfg_file = os.path.join(sys.prefix, "pyvenv.cfg")
|
||||
try:
|
||||
# Although PEP 405 does not specify, the built-in venv module always
|
||||
# writes with UTF-8. (pypa/pip#8717)
|
||||
with open(pyvenv_cfg_file, encoding="utf-8") as f:
|
||||
return f.read().splitlines() # avoids trailing newlines
|
||||
except OSError:
|
||||
return None
|
||||
|
||||
|
||||
def _no_global_under_venv() -> bool:
|
||||
"""Check `{sys.prefix}/pyvenv.cfg` for system site-packages inclusion
|
||||
|
||||
PEP 405 specifies that when system site-packages are not supposed to be
|
||||
visible from a virtual environment, `pyvenv.cfg` must contain the following
|
||||
line:
|
||||
|
||||
include-system-site-packages = false
|
||||
|
||||
Additionally, log a warning if accessing the file fails.
|
||||
"""
|
||||
cfg_lines = _get_pyvenv_cfg_lines()
|
||||
if cfg_lines is None:
|
||||
# We're not in a "sane" venv, so assume there is no system
|
||||
# site-packages access (since that's PEP 405's default state).
|
||||
logger.warning(
|
||||
"Could not access 'pyvenv.cfg' despite a virtual environment "
|
||||
"being active. Assuming global site-packages is not accessible "
|
||||
"in this environment."
|
||||
)
|
||||
return True
|
||||
|
||||
for line in cfg_lines:
|
||||
match = _INCLUDE_SYSTEM_SITE_PACKAGES_REGEX.match(line)
|
||||
if match is not None and match.group("value") == "false":
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _no_global_under_legacy_virtualenv() -> bool:
|
||||
"""Check if "no-global-site-packages.txt" exists beside site.py
|
||||
|
||||
This mirrors logic in pypa/virtualenv for determining whether system
|
||||
site-packages are visible in the virtual environment.
|
||||
"""
|
||||
site_mod_dir = os.path.dirname(os.path.abspath(site.__file__))
|
||||
no_global_site_packages_file = os.path.join(
|
||||
site_mod_dir,
|
||||
"no-global-site-packages.txt",
|
||||
)
|
||||
return os.path.exists(no_global_site_packages_file)
|
||||
|
||||
|
||||
def virtualenv_no_global() -> bool:
|
||||
"""Returns a boolean, whether running in venv with no system site-packages."""
|
||||
# PEP 405 compliance needs to be checked first since virtualenv >=20 would
|
||||
# return True for both checks, but is only able to use the PEP 405 config.
|
||||
if _running_under_venv():
|
||||
return _no_global_under_venv()
|
||||
|
||||
if _running_under_legacy_virtualenv():
|
||||
return _no_global_under_legacy_virtualenv()
|
||||
|
||||
return False
|
136
venv/lib/python3.11/site-packages/pip/_internal/utils/wheel.py
Normal file
136
venv/lib/python3.11/site-packages/pip/_internal/utils/wheel.py
Normal file
|
@ -0,0 +1,136 @@
|
|||
"""Support functions for working with wheel files.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from email.message import Message
|
||||
from email.parser import Parser
|
||||
from typing import Tuple
|
||||
from zipfile import BadZipFile, ZipFile
|
||||
|
||||
from pip._vendor.packaging.utils import canonicalize_name
|
||||
|
||||
from pip._internal.exceptions import UnsupportedWheel
|
||||
|
||||
VERSION_COMPATIBLE = (1, 0)
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def parse_wheel(wheel_zip: ZipFile, name: str) -> Tuple[str, Message]:
|
||||
"""Extract information from the provided wheel, ensuring it meets basic
|
||||
standards.
|
||||
|
||||
Returns the name of the .dist-info directory and the parsed WHEEL metadata.
|
||||
"""
|
||||
try:
|
||||
info_dir = wheel_dist_info_dir(wheel_zip, name)
|
||||
metadata = wheel_metadata(wheel_zip, info_dir)
|
||||
version = wheel_version(metadata)
|
||||
except UnsupportedWheel as e:
|
||||
raise UnsupportedWheel("{} has an invalid wheel, {}".format(name, str(e)))
|
||||
|
||||
check_compatibility(version, name)
|
||||
|
||||
return info_dir, metadata
|
||||
|
||||
|
||||
def wheel_dist_info_dir(source: ZipFile, name: str) -> str:
|
||||
"""Returns the name of the contained .dist-info directory.
|
||||
|
||||
Raises AssertionError or UnsupportedWheel if not found, >1 found, or
|
||||
it doesn't match the provided name.
|
||||
"""
|
||||
# Zip file path separators must be /
|
||||
subdirs = {p.split("/", 1)[0] for p in source.namelist()}
|
||||
|
||||
info_dirs = [s for s in subdirs if s.endswith(".dist-info")]
|
||||
|
||||
if not info_dirs:
|
||||
raise UnsupportedWheel(".dist-info directory not found")
|
||||
|
||||
if len(info_dirs) > 1:
|
||||
raise UnsupportedWheel(
|
||||
"multiple .dist-info directories found: {}".format(", ".join(info_dirs))
|
||||
)
|
||||
|
||||
info_dir = info_dirs[0]
|
||||
|
||||
info_dir_name = canonicalize_name(info_dir)
|
||||
canonical_name = canonicalize_name(name)
|
||||
if not info_dir_name.startswith(canonical_name):
|
||||
raise UnsupportedWheel(
|
||||
".dist-info directory {!r} does not start with {!r}".format(
|
||||
info_dir, canonical_name
|
||||
)
|
||||
)
|
||||
|
||||
return info_dir
|
||||
|
||||
|
||||
def read_wheel_metadata_file(source: ZipFile, path: str) -> bytes:
|
||||
try:
|
||||
return source.read(path)
|
||||
# BadZipFile for general corruption, KeyError for missing entry,
|
||||
# and RuntimeError for password-protected files
|
||||
except (BadZipFile, KeyError, RuntimeError) as e:
|
||||
raise UnsupportedWheel(f"could not read {path!r} file: {e!r}")
|
||||
|
||||
|
||||
def wheel_metadata(source: ZipFile, dist_info_dir: str) -> Message:
|
||||
"""Return the WHEEL metadata of an extracted wheel, if possible.
|
||||
Otherwise, raise UnsupportedWheel.
|
||||
"""
|
||||
path = f"{dist_info_dir}/WHEEL"
|
||||
# Zip file path separators must be /
|
||||
wheel_contents = read_wheel_metadata_file(source, path)
|
||||
|
||||
try:
|
||||
wheel_text = wheel_contents.decode()
|
||||
except UnicodeDecodeError as e:
|
||||
raise UnsupportedWheel(f"error decoding {path!r}: {e!r}")
|
||||
|
||||
# FeedParser (used by Parser) does not raise any exceptions. The returned
|
||||
# message may have .defects populated, but for backwards-compatibility we
|
||||
# currently ignore them.
|
||||
return Parser().parsestr(wheel_text)
|
||||
|
||||
|
||||
def wheel_version(wheel_data: Message) -> Tuple[int, ...]:
|
||||
"""Given WHEEL metadata, return the parsed Wheel-Version.
|
||||
Otherwise, raise UnsupportedWheel.
|
||||
"""
|
||||
version_text = wheel_data["Wheel-Version"]
|
||||
if version_text is None:
|
||||
raise UnsupportedWheel("WHEEL is missing Wheel-Version")
|
||||
|
||||
version = version_text.strip()
|
||||
|
||||
try:
|
||||
return tuple(map(int, version.split(".")))
|
||||
except ValueError:
|
||||
raise UnsupportedWheel(f"invalid Wheel-Version: {version!r}")
|
||||
|
||||
|
||||
def check_compatibility(version: Tuple[int, ...], name: str) -> None:
|
||||
"""Raises errors or warns if called with an incompatible Wheel-Version.
|
||||
|
||||
pip should refuse to install a Wheel-Version that's a major series
|
||||
ahead of what it's compatible with (e.g 2.0 > 1.1); and warn when
|
||||
installing a version only minor version ahead (e.g 1.2 > 1.1).
|
||||
|
||||
version: a 2-tuple representing a Wheel-Version (Major, Minor)
|
||||
name: name of wheel or package to raise exception about
|
||||
|
||||
:raises UnsupportedWheel: when an incompatible Wheel-Version is given
|
||||
"""
|
||||
if version[0] > VERSION_COMPATIBLE[0]:
|
||||
raise UnsupportedWheel(
|
||||
"{}'s Wheel-Version ({}) is not compatible with this version "
|
||||
"of pip".format(name, ".".join(map(str, version)))
|
||||
)
|
||||
elif version > VERSION_COMPATIBLE:
|
||||
logger.warning(
|
||||
"Installing from a newer Wheel-Version (%s)",
|
||||
".".join(map(str, version)),
|
||||
)
|
Loading…
Add table
Add a link
Reference in a new issue