first commit
This commit is contained in:
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
109
venv/Lib/site-packages/pip/_internal/utils/_jaraco_text.py
Normal file
109
venv/Lib/site-packages/pip/_internal/utils/_jaraco_text.py
Normal file
@@ -0,0 +1,109 @@
|
||||
"""Functions brought over from jaraco.text.
|
||||
|
||||
These functions are not supposed to be used within `pip._internal`. These are
|
||||
helper functions brought over from `jaraco.text` to enable vendoring newer
|
||||
copies of `pkg_resources` without having to vendor `jaraco.text` and its entire
|
||||
dependency cone; something that our vendoring setup is not currently capable of
|
||||
handling.
|
||||
|
||||
License reproduced from original source below:
|
||||
|
||||
Copyright Jason R. Coombs
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to
|
||||
deal in the Software without restriction, including without limitation the
|
||||
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
||||
sell copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||
IN THE SOFTWARE.
|
||||
"""
|
||||
|
||||
import functools
|
||||
import itertools
|
||||
|
||||
|
||||
def _nonblank(str):
|
||||
return str and not str.startswith("#")
|
||||
|
||||
|
||||
@functools.singledispatch
|
||||
def yield_lines(iterable):
|
||||
r"""
|
||||
Yield valid lines of a string or iterable.
|
||||
|
||||
>>> list(yield_lines(''))
|
||||
[]
|
||||
>>> list(yield_lines(['foo', 'bar']))
|
||||
['foo', 'bar']
|
||||
>>> list(yield_lines('foo\nbar'))
|
||||
['foo', 'bar']
|
||||
>>> list(yield_lines('\nfoo\n#bar\nbaz #comment'))
|
||||
['foo', 'baz #comment']
|
||||
>>> list(yield_lines(['foo\nbar', 'baz', 'bing\n\n\n']))
|
||||
['foo', 'bar', 'baz', 'bing']
|
||||
"""
|
||||
return itertools.chain.from_iterable(map(yield_lines, iterable))
|
||||
|
||||
|
||||
@yield_lines.register(str)
|
||||
def _(text):
|
||||
return filter(_nonblank, map(str.strip, text.splitlines()))
|
||||
|
||||
|
||||
def drop_comment(line):
|
||||
"""
|
||||
Drop comments.
|
||||
|
||||
>>> drop_comment('foo # bar')
|
||||
'foo'
|
||||
|
||||
A hash without a space may be in a URL.
|
||||
|
||||
>>> drop_comment('http://example.com/foo#bar')
|
||||
'http://example.com/foo#bar'
|
||||
"""
|
||||
return line.partition(" #")[0]
|
||||
|
||||
|
||||
def join_continuation(lines):
|
||||
r"""
|
||||
Join lines continued by a trailing backslash.
|
||||
|
||||
>>> list(join_continuation(['foo \\', 'bar', 'baz']))
|
||||
['foobar', 'baz']
|
||||
>>> list(join_continuation(['foo \\', 'bar', 'baz']))
|
||||
['foobar', 'baz']
|
||||
>>> list(join_continuation(['foo \\', 'bar \\', 'baz']))
|
||||
['foobarbaz']
|
||||
|
||||
Not sure why, but...
|
||||
The character preceeding the backslash is also elided.
|
||||
|
||||
>>> list(join_continuation(['goo\\', 'dly']))
|
||||
['godly']
|
||||
|
||||
A terrible idea, but...
|
||||
If no line is available to continue, suppress the lines.
|
||||
|
||||
>>> list(join_continuation(['foo', 'bar\\', 'baz\\']))
|
||||
['foo']
|
||||
"""
|
||||
lines = iter(lines)
|
||||
for item in lines:
|
||||
while item.endswith("\\"):
|
||||
try:
|
||||
item = item[:-2].strip() + next(lines)
|
||||
except StopIteration:
|
||||
return
|
||||
yield item
|
38
venv/Lib/site-packages/pip/_internal/utils/_log.py
Normal file
38
venv/Lib/site-packages/pip/_internal/utils/_log.py
Normal file
@@ -0,0 +1,38 @@
|
||||
"""Customize logging
|
||||
|
||||
Defines custom logger class for the `logger.verbose(...)` method.
|
||||
|
||||
init_logging() must be called before any other modules that call logging.getLogger.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import Any, cast
|
||||
|
||||
# custom log level for `--verbose` output
|
||||
# between DEBUG and INFO
|
||||
VERBOSE = 15
|
||||
|
||||
|
||||
class VerboseLogger(logging.Logger):
|
||||
"""Custom Logger, defining a verbose log-level
|
||||
|
||||
VERBOSE is between INFO and DEBUG.
|
||||
"""
|
||||
|
||||
def verbose(self, msg: str, *args: Any, **kwargs: Any) -> None:
|
||||
return self.log(VERBOSE, msg, *args, **kwargs)
|
||||
|
||||
|
||||
def getLogger(name: str) -> VerboseLogger:
|
||||
"""logging.getLogger, but ensures our VerboseLogger class is returned"""
|
||||
return cast(VerboseLogger, logging.getLogger(name))
|
||||
|
||||
|
||||
def init_logging() -> None:
|
||||
"""Register our VerboseLogger and VERBOSE log level.
|
||||
|
||||
Should be called before any calls to getLogger(),
|
||||
i.e. in pip._internal.__init__
|
||||
"""
|
||||
logging.setLoggerClass(VerboseLogger)
|
||||
logging.addLevelName(VERBOSE, "VERBOSE")
|
52
venv/Lib/site-packages/pip/_internal/utils/appdirs.py
Normal file
52
venv/Lib/site-packages/pip/_internal/utils/appdirs.py
Normal file
@@ -0,0 +1,52 @@
|
||||
"""
|
||||
This code wraps the vendored appdirs module to so the return values are
|
||||
compatible for the current pip code base.
|
||||
|
||||
The intention is to rewrite current usages gradually, keeping the tests pass,
|
||||
and eventually drop this after all usages are changed.
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
from typing import List
|
||||
|
||||
from pip._vendor import platformdirs as _appdirs
|
||||
|
||||
|
||||
def user_cache_dir(appname: str) -> str:
|
||||
return _appdirs.user_cache_dir(appname, appauthor=False)
|
||||
|
||||
|
||||
def _macos_user_config_dir(appname: str, roaming: bool = True) -> str:
|
||||
# Use ~/Application Support/pip, if the directory exists.
|
||||
path = _appdirs.user_data_dir(appname, appauthor=False, roaming=roaming)
|
||||
if os.path.isdir(path):
|
||||
return path
|
||||
|
||||
# Use a Linux-like ~/.config/pip, by default.
|
||||
linux_like_path = "~/.config/"
|
||||
if appname:
|
||||
linux_like_path = os.path.join(linux_like_path, appname)
|
||||
|
||||
return os.path.expanduser(linux_like_path)
|
||||
|
||||
|
||||
def user_config_dir(appname: str, roaming: bool = True) -> str:
|
||||
if sys.platform == "darwin":
|
||||
return _macos_user_config_dir(appname, roaming)
|
||||
|
||||
return _appdirs.user_config_dir(appname, appauthor=False, roaming=roaming)
|
||||
|
||||
|
||||
# for the discussion regarding site_config_dir locations
|
||||
# see <https://github.com/pypa/pip/issues/1733>
|
||||
def site_config_dirs(appname: str) -> List[str]:
|
||||
if sys.platform == "darwin":
|
||||
return [_appdirs.site_data_dir(appname, appauthor=False, multipath=True)]
|
||||
|
||||
dirval = _appdirs.site_config_dir(appname, appauthor=False, multipath=True)
|
||||
if sys.platform == "win32":
|
||||
return [dirval]
|
||||
|
||||
# Unix-y system. Look in /etc as well.
|
||||
return dirval.split(os.pathsep) + ["/etc"]
|
63
venv/Lib/site-packages/pip/_internal/utils/compat.py
Normal file
63
venv/Lib/site-packages/pip/_internal/utils/compat.py
Normal file
@@ -0,0 +1,63 @@
|
||||
"""Stuff that differs in different Python versions and platform
|
||||
distributions."""
|
||||
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
|
||||
__all__ = ["get_path_uid", "stdlib_pkgs", "WINDOWS"]
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def has_tls() -> bool:
|
||||
try:
|
||||
import _ssl # noqa: F401 # ignore unused
|
||||
|
||||
return True
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
from pip._vendor.urllib3.util import IS_PYOPENSSL
|
||||
|
||||
return IS_PYOPENSSL
|
||||
|
||||
|
||||
def get_path_uid(path: str) -> int:
|
||||
"""
|
||||
Return path's uid.
|
||||
|
||||
Does not follow symlinks:
|
||||
https://github.com/pypa/pip/pull/935#discussion_r5307003
|
||||
|
||||
Placed this function in compat due to differences on AIX and
|
||||
Jython, that should eventually go away.
|
||||
|
||||
:raises OSError: When path is a symlink or can't be read.
|
||||
"""
|
||||
if hasattr(os, "O_NOFOLLOW"):
|
||||
fd = os.open(path, os.O_RDONLY | os.O_NOFOLLOW)
|
||||
file_uid = os.fstat(fd).st_uid
|
||||
os.close(fd)
|
||||
else: # AIX and Jython
|
||||
# WARNING: time of check vulnerability, but best we can do w/o NOFOLLOW
|
||||
if not os.path.islink(path):
|
||||
# older versions of Jython don't have `os.fstat`
|
||||
file_uid = os.stat(path).st_uid
|
||||
else:
|
||||
# raise OSError for parity with os.O_NOFOLLOW above
|
||||
raise OSError(f"{path} is a symlink; Will not return uid for symlinks")
|
||||
return file_uid
|
||||
|
||||
|
||||
# packages in the stdlib that may have installation metadata, but should not be
|
||||
# considered 'installed'. this theoretically could be determined based on
|
||||
# dist.location (py27:`sysconfig.get_paths()['stdlib']`,
|
||||
# py26:sysconfig.get_config_vars('LIBDEST')), but fear platform variation may
|
||||
# make this ineffective, so hard-coding
|
||||
stdlib_pkgs = {"python", "wsgiref", "argparse"}
|
||||
|
||||
|
||||
# windows detection, covers cpython and ironpython
|
||||
WINDOWS = sys.platform.startswith("win") or (sys.platform == "cli" and os.name == "nt")
|
165
venv/Lib/site-packages/pip/_internal/utils/compatibility_tags.py
Normal file
165
venv/Lib/site-packages/pip/_internal/utils/compatibility_tags.py
Normal file
@@ -0,0 +1,165 @@
|
||||
"""Generate and work with PEP 425 Compatibility Tags.
|
||||
"""
|
||||
|
||||
import re
|
||||
from typing import List, Optional, Tuple
|
||||
|
||||
from pip._vendor.packaging.tags import (
|
||||
PythonVersion,
|
||||
Tag,
|
||||
compatible_tags,
|
||||
cpython_tags,
|
||||
generic_tags,
|
||||
interpreter_name,
|
||||
interpreter_version,
|
||||
mac_platforms,
|
||||
)
|
||||
|
||||
_osx_arch_pat = re.compile(r"(.+)_(\d+)_(\d+)_(.+)")
|
||||
|
||||
|
||||
def version_info_to_nodot(version_info: Tuple[int, ...]) -> str:
|
||||
# Only use up to the first two numbers.
|
||||
return "".join(map(str, version_info[:2]))
|
||||
|
||||
|
||||
def _mac_platforms(arch: str) -> List[str]:
|
||||
match = _osx_arch_pat.match(arch)
|
||||
if match:
|
||||
name, major, minor, actual_arch = match.groups()
|
||||
mac_version = (int(major), int(minor))
|
||||
arches = [
|
||||
# Since we have always only checked that the platform starts
|
||||
# with "macosx", for backwards-compatibility we extract the
|
||||
# actual prefix provided by the user in case they provided
|
||||
# something like "macosxcustom_". It may be good to remove
|
||||
# this as undocumented or deprecate it in the future.
|
||||
"{}_{}".format(name, arch[len("macosx_") :])
|
||||
for arch in mac_platforms(mac_version, actual_arch)
|
||||
]
|
||||
else:
|
||||
# arch pattern didn't match (?!)
|
||||
arches = [arch]
|
||||
return arches
|
||||
|
||||
|
||||
def _custom_manylinux_platforms(arch: str) -> List[str]:
|
||||
arches = [arch]
|
||||
arch_prefix, arch_sep, arch_suffix = arch.partition("_")
|
||||
if arch_prefix == "manylinux2014":
|
||||
# manylinux1/manylinux2010 wheels run on most manylinux2014 systems
|
||||
# with the exception of wheels depending on ncurses. PEP 599 states
|
||||
# manylinux1/manylinux2010 wheels should be considered
|
||||
# manylinux2014 wheels:
|
||||
# https://www.python.org/dev/peps/pep-0599/#backwards-compatibility-with-manylinux2010-wheels
|
||||
if arch_suffix in {"i686", "x86_64"}:
|
||||
arches.append("manylinux2010" + arch_sep + arch_suffix)
|
||||
arches.append("manylinux1" + arch_sep + arch_suffix)
|
||||
elif arch_prefix == "manylinux2010":
|
||||
# manylinux1 wheels run on most manylinux2010 systems with the
|
||||
# exception of wheels depending on ncurses. PEP 571 states
|
||||
# manylinux1 wheels should be considered manylinux2010 wheels:
|
||||
# https://www.python.org/dev/peps/pep-0571/#backwards-compatibility-with-manylinux1-wheels
|
||||
arches.append("manylinux1" + arch_sep + arch_suffix)
|
||||
return arches
|
||||
|
||||
|
||||
def _get_custom_platforms(arch: str) -> List[str]:
|
||||
arch_prefix, arch_sep, arch_suffix = arch.partition("_")
|
||||
if arch.startswith("macosx"):
|
||||
arches = _mac_platforms(arch)
|
||||
elif arch_prefix in ["manylinux2014", "manylinux2010"]:
|
||||
arches = _custom_manylinux_platforms(arch)
|
||||
else:
|
||||
arches = [arch]
|
||||
return arches
|
||||
|
||||
|
||||
def _expand_allowed_platforms(platforms: Optional[List[str]]) -> Optional[List[str]]:
|
||||
if not platforms:
|
||||
return None
|
||||
|
||||
seen = set()
|
||||
result = []
|
||||
|
||||
for p in platforms:
|
||||
if p in seen:
|
||||
continue
|
||||
additions = [c for c in _get_custom_platforms(p) if c not in seen]
|
||||
seen.update(additions)
|
||||
result.extend(additions)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def _get_python_version(version: str) -> PythonVersion:
|
||||
if len(version) > 1:
|
||||
return int(version[0]), int(version[1:])
|
||||
else:
|
||||
return (int(version[0]),)
|
||||
|
||||
|
||||
def _get_custom_interpreter(
|
||||
implementation: Optional[str] = None, version: Optional[str] = None
|
||||
) -> str:
|
||||
if implementation is None:
|
||||
implementation = interpreter_name()
|
||||
if version is None:
|
||||
version = interpreter_version()
|
||||
return f"{implementation}{version}"
|
||||
|
||||
|
||||
def get_supported(
|
||||
version: Optional[str] = None,
|
||||
platforms: Optional[List[str]] = None,
|
||||
impl: Optional[str] = None,
|
||||
abis: Optional[List[str]] = None,
|
||||
) -> List[Tag]:
|
||||
"""Return a list of supported tags for each version specified in
|
||||
`versions`.
|
||||
|
||||
:param version: a string version, of the form "33" or "32",
|
||||
or None. The version will be assumed to support our ABI.
|
||||
:param platform: specify a list of platforms you want valid
|
||||
tags for, or None. If None, use the local system platform.
|
||||
:param impl: specify the exact implementation you want valid
|
||||
tags for, or None. If None, use the local interpreter impl.
|
||||
:param abis: specify a list of abis you want valid
|
||||
tags for, or None. If None, use the local interpreter abi.
|
||||
"""
|
||||
supported: List[Tag] = []
|
||||
|
||||
python_version: Optional[PythonVersion] = None
|
||||
if version is not None:
|
||||
python_version = _get_python_version(version)
|
||||
|
||||
interpreter = _get_custom_interpreter(impl, version)
|
||||
|
||||
platforms = _expand_allowed_platforms(platforms)
|
||||
|
||||
is_cpython = (impl or interpreter_name()) == "cp"
|
||||
if is_cpython:
|
||||
supported.extend(
|
||||
cpython_tags(
|
||||
python_version=python_version,
|
||||
abis=abis,
|
||||
platforms=platforms,
|
||||
)
|
||||
)
|
||||
else:
|
||||
supported.extend(
|
||||
generic_tags(
|
||||
interpreter=interpreter,
|
||||
abis=abis,
|
||||
platforms=platforms,
|
||||
)
|
||||
)
|
||||
supported.extend(
|
||||
compatible_tags(
|
||||
python_version=python_version,
|
||||
interpreter=interpreter,
|
||||
platforms=platforms,
|
||||
)
|
||||
)
|
||||
|
||||
return supported
|
11
venv/Lib/site-packages/pip/_internal/utils/datetime.py
Normal file
11
venv/Lib/site-packages/pip/_internal/utils/datetime.py
Normal file
@@ -0,0 +1,11 @@
|
||||
"""For when pip wants to check the date or time.
|
||||
"""
|
||||
|
||||
import datetime
|
||||
|
||||
|
||||
def today_is_later_than(year: int, month: int, day: int) -> bool:
|
||||
today = datetime.date.today()
|
||||
given = datetime.date(year, month, day)
|
||||
|
||||
return today > given
|
120
venv/Lib/site-packages/pip/_internal/utils/deprecation.py
Normal file
120
venv/Lib/site-packages/pip/_internal/utils/deprecation.py
Normal file
@@ -0,0 +1,120 @@
|
||||
"""
|
||||
A module that implements tooling to enable easy warnings about deprecations.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import warnings
|
||||
from typing import Any, Optional, TextIO, Type, Union
|
||||
|
||||
from pip._vendor.packaging.version import parse
|
||||
|
||||
from pip import __version__ as current_version # NOTE: tests patch this name.
|
||||
|
||||
DEPRECATION_MSG_PREFIX = "DEPRECATION: "
|
||||
|
||||
|
||||
class PipDeprecationWarning(Warning):
|
||||
pass
|
||||
|
||||
|
||||
_original_showwarning: Any = None
|
||||
|
||||
|
||||
# Warnings <-> Logging Integration
|
||||
def _showwarning(
|
||||
message: Union[Warning, str],
|
||||
category: Type[Warning],
|
||||
filename: str,
|
||||
lineno: int,
|
||||
file: Optional[TextIO] = None,
|
||||
line: Optional[str] = None,
|
||||
) -> None:
|
||||
if file is not None:
|
||||
if _original_showwarning is not None:
|
||||
_original_showwarning(message, category, filename, lineno, file, line)
|
||||
elif issubclass(category, PipDeprecationWarning):
|
||||
# We use a specially named logger which will handle all of the
|
||||
# deprecation messages for pip.
|
||||
logger = logging.getLogger("pip._internal.deprecations")
|
||||
logger.warning(message)
|
||||
else:
|
||||
_original_showwarning(message, category, filename, lineno, file, line)
|
||||
|
||||
|
||||
def install_warning_logger() -> None:
|
||||
# Enable our Deprecation Warnings
|
||||
warnings.simplefilter("default", PipDeprecationWarning, append=True)
|
||||
|
||||
global _original_showwarning
|
||||
|
||||
if _original_showwarning is None:
|
||||
_original_showwarning = warnings.showwarning
|
||||
warnings.showwarning = _showwarning
|
||||
|
||||
|
||||
def deprecated(
|
||||
*,
|
||||
reason: str,
|
||||
replacement: Optional[str],
|
||||
gone_in: Optional[str],
|
||||
feature_flag: Optional[str] = None,
|
||||
issue: Optional[int] = None,
|
||||
) -> None:
|
||||
"""Helper to deprecate existing functionality.
|
||||
|
||||
reason:
|
||||
Textual reason shown to the user about why this functionality has
|
||||
been deprecated. Should be a complete sentence.
|
||||
replacement:
|
||||
Textual suggestion shown to the user about what alternative
|
||||
functionality they can use.
|
||||
gone_in:
|
||||
The version of pip does this functionality should get removed in.
|
||||
Raises an error if pip's current version is greater than or equal to
|
||||
this.
|
||||
feature_flag:
|
||||
Command-line flag of the form --use-feature={feature_flag} for testing
|
||||
upcoming functionality.
|
||||
issue:
|
||||
Issue number on the tracker that would serve as a useful place for
|
||||
users to find related discussion and provide feedback.
|
||||
"""
|
||||
|
||||
# Determine whether or not the feature is already gone in this version.
|
||||
is_gone = gone_in is not None and parse(current_version) >= parse(gone_in)
|
||||
|
||||
message_parts = [
|
||||
(reason, f"{DEPRECATION_MSG_PREFIX}{{}}"),
|
||||
(
|
||||
gone_in,
|
||||
"pip {} will enforce this behaviour change."
|
||||
if not is_gone
|
||||
else "Since pip {}, this is no longer supported.",
|
||||
),
|
||||
(
|
||||
replacement,
|
||||
"A possible replacement is {}.",
|
||||
),
|
||||
(
|
||||
feature_flag,
|
||||
"You can use the flag --use-feature={} to test the upcoming behaviour."
|
||||
if not is_gone
|
||||
else None,
|
||||
),
|
||||
(
|
||||
issue,
|
||||
"Discussion can be found at https://github.com/pypa/pip/issues/{}",
|
||||
),
|
||||
]
|
||||
|
||||
message = " ".join(
|
||||
format_str.format(value)
|
||||
for value, format_str in message_parts
|
||||
if format_str is not None and value is not None
|
||||
)
|
||||
|
||||
# Raise as an error if this behaviour is deprecated.
|
||||
if is_gone:
|
||||
raise PipDeprecationWarning(message)
|
||||
|
||||
warnings.warn(message, category=PipDeprecationWarning, stacklevel=2)
|
@@ -0,0 +1,87 @@
|
||||
from typing import Optional
|
||||
|
||||
from pip._internal.models.direct_url import ArchiveInfo, DirectUrl, DirInfo, VcsInfo
|
||||
from pip._internal.models.link import Link
|
||||
from pip._internal.utils.urls import path_to_url
|
||||
from pip._internal.vcs import vcs
|
||||
|
||||
|
||||
def direct_url_as_pep440_direct_reference(direct_url: DirectUrl, name: str) -> str:
|
||||
"""Convert a DirectUrl to a pip requirement string."""
|
||||
direct_url.validate() # if invalid, this is a pip bug
|
||||
requirement = name + " @ "
|
||||
fragments = []
|
||||
if isinstance(direct_url.info, VcsInfo):
|
||||
requirement += "{}+{}@{}".format(
|
||||
direct_url.info.vcs, direct_url.url, direct_url.info.commit_id
|
||||
)
|
||||
elif isinstance(direct_url.info, ArchiveInfo):
|
||||
requirement += direct_url.url
|
||||
if direct_url.info.hash:
|
||||
fragments.append(direct_url.info.hash)
|
||||
else:
|
||||
assert isinstance(direct_url.info, DirInfo)
|
||||
requirement += direct_url.url
|
||||
if direct_url.subdirectory:
|
||||
fragments.append("subdirectory=" + direct_url.subdirectory)
|
||||
if fragments:
|
||||
requirement += "#" + "&".join(fragments)
|
||||
return requirement
|
||||
|
||||
|
||||
def direct_url_for_editable(source_dir: str) -> DirectUrl:
|
||||
return DirectUrl(
|
||||
url=path_to_url(source_dir),
|
||||
info=DirInfo(editable=True),
|
||||
)
|
||||
|
||||
|
||||
def direct_url_from_link(
|
||||
link: Link, source_dir: Optional[str] = None, link_is_in_wheel_cache: bool = False
|
||||
) -> DirectUrl:
|
||||
if link.is_vcs:
|
||||
vcs_backend = vcs.get_backend_for_scheme(link.scheme)
|
||||
assert vcs_backend
|
||||
url, requested_revision, _ = vcs_backend.get_url_rev_and_auth(
|
||||
link.url_without_fragment
|
||||
)
|
||||
# For VCS links, we need to find out and add commit_id.
|
||||
if link_is_in_wheel_cache:
|
||||
# If the requested VCS link corresponds to a cached
|
||||
# wheel, it means the requested revision was an
|
||||
# immutable commit hash, otherwise it would not have
|
||||
# been cached. In that case we don't have a source_dir
|
||||
# with the VCS checkout.
|
||||
assert requested_revision
|
||||
commit_id = requested_revision
|
||||
else:
|
||||
# If the wheel was not in cache, it means we have
|
||||
# had to checkout from VCS to build and we have a source_dir
|
||||
# which we can inspect to find out the commit id.
|
||||
assert source_dir
|
||||
commit_id = vcs_backend.get_revision(source_dir)
|
||||
return DirectUrl(
|
||||
url=url,
|
||||
info=VcsInfo(
|
||||
vcs=vcs_backend.name,
|
||||
commit_id=commit_id,
|
||||
requested_revision=requested_revision,
|
||||
),
|
||||
subdirectory=link.subdirectory_fragment,
|
||||
)
|
||||
elif link.is_existing_dir():
|
||||
return DirectUrl(
|
||||
url=link.url_without_fragment,
|
||||
info=DirInfo(),
|
||||
subdirectory=link.subdirectory_fragment,
|
||||
)
|
||||
else:
|
||||
hash = None
|
||||
hash_name = link.hash_name
|
||||
if hash_name:
|
||||
hash = f"{hash_name}={link.hash}"
|
||||
return DirectUrl(
|
||||
url=link.url_without_fragment,
|
||||
info=ArchiveInfo(hash=hash),
|
||||
subdirectory=link.subdirectory_fragment,
|
||||
)
|
72
venv/Lib/site-packages/pip/_internal/utils/egg_link.py
Normal file
72
venv/Lib/site-packages/pip/_internal/utils/egg_link.py
Normal file
@@ -0,0 +1,72 @@
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from typing import List, Optional
|
||||
|
||||
from pip._internal.locations import site_packages, user_site
|
||||
from pip._internal.utils.virtualenv import (
|
||||
running_under_virtualenv,
|
||||
virtualenv_no_global,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"egg_link_path_from_sys_path",
|
||||
"egg_link_path_from_location",
|
||||
]
|
||||
|
||||
|
||||
def _egg_link_name(raw_name: str) -> str:
|
||||
"""
|
||||
Convert a Name metadata value to a .egg-link name, by applying
|
||||
the same substitution as pkg_resources's safe_name function.
|
||||
Note: we cannot use canonicalize_name because it has a different logic.
|
||||
"""
|
||||
return re.sub("[^A-Za-z0-9.]+", "-", raw_name) + ".egg-link"
|
||||
|
||||
|
||||
def egg_link_path_from_sys_path(raw_name: str) -> Optional[str]:
|
||||
"""
|
||||
Look for a .egg-link file for project name, by walking sys.path.
|
||||
"""
|
||||
egg_link_name = _egg_link_name(raw_name)
|
||||
for path_item in sys.path:
|
||||
egg_link = os.path.join(path_item, egg_link_name)
|
||||
if os.path.isfile(egg_link):
|
||||
return egg_link
|
||||
return None
|
||||
|
||||
|
||||
def egg_link_path_from_location(raw_name: str) -> Optional[str]:
|
||||
"""
|
||||
Return the path for the .egg-link file if it exists, otherwise, None.
|
||||
|
||||
There's 3 scenarios:
|
||||
1) not in a virtualenv
|
||||
try to find in site.USER_SITE, then site_packages
|
||||
2) in a no-global virtualenv
|
||||
try to find in site_packages
|
||||
3) in a yes-global virtualenv
|
||||
try to find in site_packages, then site.USER_SITE
|
||||
(don't look in global location)
|
||||
|
||||
For #1 and #3, there could be odd cases, where there's an egg-link in 2
|
||||
locations.
|
||||
|
||||
This method will just return the first one found.
|
||||
"""
|
||||
sites: List[str] = []
|
||||
if running_under_virtualenv():
|
||||
sites.append(site_packages)
|
||||
if not virtualenv_no_global() and user_site:
|
||||
sites.append(user_site)
|
||||
else:
|
||||
if user_site:
|
||||
sites.append(user_site)
|
||||
sites.append(site_packages)
|
||||
|
||||
egg_link_name = _egg_link_name(raw_name)
|
||||
for site in sites:
|
||||
egglink = os.path.join(site, egg_link_name)
|
||||
if os.path.isfile(egglink):
|
||||
return egglink
|
||||
return None
|
36
venv/Lib/site-packages/pip/_internal/utils/encoding.py
Normal file
36
venv/Lib/site-packages/pip/_internal/utils/encoding.py
Normal file
@@ -0,0 +1,36 @@
|
||||
import codecs
|
||||
import locale
|
||||
import re
|
||||
import sys
|
||||
from typing import List, Tuple
|
||||
|
||||
BOMS: List[Tuple[bytes, str]] = [
|
||||
(codecs.BOM_UTF8, "utf-8"),
|
||||
(codecs.BOM_UTF16, "utf-16"),
|
||||
(codecs.BOM_UTF16_BE, "utf-16-be"),
|
||||
(codecs.BOM_UTF16_LE, "utf-16-le"),
|
||||
(codecs.BOM_UTF32, "utf-32"),
|
||||
(codecs.BOM_UTF32_BE, "utf-32-be"),
|
||||
(codecs.BOM_UTF32_LE, "utf-32-le"),
|
||||
]
|
||||
|
||||
ENCODING_RE = re.compile(rb"coding[:=]\s*([-\w.]+)")
|
||||
|
||||
|
||||
def auto_decode(data: bytes) -> str:
|
||||
"""Check a bytes string for a BOM to correctly detect the encoding
|
||||
|
||||
Fallback to locale.getpreferredencoding(False) like open() on Python3"""
|
||||
for bom, encoding in BOMS:
|
||||
if data.startswith(bom):
|
||||
return data[len(bom) :].decode(encoding)
|
||||
# Lets check the first two lines as in PEP263
|
||||
for line in data.split(b"\n")[:2]:
|
||||
if line[0:1] == b"#" and ENCODING_RE.search(line):
|
||||
result = ENCODING_RE.search(line)
|
||||
assert result is not None
|
||||
encoding = result.groups()[0].decode("ascii")
|
||||
return data.decode(encoding)
|
||||
return data.decode(
|
||||
locale.getpreferredencoding(False) or sys.getdefaultencoding(),
|
||||
)
|
84
venv/Lib/site-packages/pip/_internal/utils/entrypoints.py
Normal file
84
venv/Lib/site-packages/pip/_internal/utils/entrypoints.py
Normal file
@@ -0,0 +1,84 @@
|
||||
import itertools
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
from typing import List, Optional
|
||||
|
||||
from pip._internal.cli.main import main
|
||||
from pip._internal.utils.compat import WINDOWS
|
||||
|
||||
_EXECUTABLE_NAMES = [
|
||||
"pip",
|
||||
f"pip{sys.version_info.major}",
|
||||
f"pip{sys.version_info.major}.{sys.version_info.minor}",
|
||||
]
|
||||
if WINDOWS:
|
||||
_allowed_extensions = {"", ".exe"}
|
||||
_EXECUTABLE_NAMES = [
|
||||
"".join(parts)
|
||||
for parts in itertools.product(_EXECUTABLE_NAMES, _allowed_extensions)
|
||||
]
|
||||
|
||||
|
||||
def _wrapper(args: Optional[List[str]] = None) -> int:
|
||||
"""Central wrapper for all old entrypoints.
|
||||
|
||||
Historically pip has had several entrypoints defined. Because of issues
|
||||
arising from PATH, sys.path, multiple Pythons, their interactions, and most
|
||||
of them having a pip installed, users suffer every time an entrypoint gets
|
||||
moved.
|
||||
|
||||
To alleviate this pain, and provide a mechanism for warning users and
|
||||
directing them to an appropriate place for help, we now define all of
|
||||
our old entrypoints as wrappers for the current one.
|
||||
"""
|
||||
sys.stderr.write(
|
||||
"WARNING: pip is being invoked by an old script wrapper. This will "
|
||||
"fail in a future version of pip.\n"
|
||||
"Please see https://github.com/pypa/pip/issues/5599 for advice on "
|
||||
"fixing the underlying issue.\n"
|
||||
"To avoid this problem you can invoke Python with '-m pip' instead of "
|
||||
"running pip directly.\n"
|
||||
)
|
||||
return main(args)
|
||||
|
||||
|
||||
def get_best_invocation_for_this_pip() -> str:
|
||||
"""Try to figure out the best way to invoke pip in the current environment."""
|
||||
binary_directory = "Scripts" if WINDOWS else "bin"
|
||||
binary_prefix = os.path.join(sys.prefix, binary_directory)
|
||||
|
||||
# Try to use pip[X[.Y]] names, if those executables for this environment are
|
||||
# the first on PATH with that name.
|
||||
path_parts = os.path.normcase(os.environ.get("PATH", "")).split(os.pathsep)
|
||||
exe_are_in_PATH = os.path.normcase(binary_prefix) in path_parts
|
||||
if exe_are_in_PATH:
|
||||
for exe_name in _EXECUTABLE_NAMES:
|
||||
found_executable = shutil.which(exe_name)
|
||||
binary_executable = os.path.join(binary_prefix, exe_name)
|
||||
if (
|
||||
found_executable
|
||||
and os.path.exists(binary_executable)
|
||||
and os.path.samefile(
|
||||
found_executable,
|
||||
binary_executable,
|
||||
)
|
||||
):
|
||||
return exe_name
|
||||
|
||||
# Use the `-m` invocation, if there's no "nice" invocation.
|
||||
return f"{get_best_invocation_for_this_python()} -m pip"
|
||||
|
||||
|
||||
def get_best_invocation_for_this_python() -> str:
|
||||
"""Try to figure out the best way to invoke the current Python."""
|
||||
exe = sys.executable
|
||||
exe_name = os.path.basename(exe)
|
||||
|
||||
# Try to use the basename, if it's the first executable.
|
||||
found_executable = shutil.which(exe_name)
|
||||
if found_executable and os.path.samefile(found_executable, exe):
|
||||
return exe_name
|
||||
|
||||
# Use the full executable name, because we couldn't find something simpler.
|
||||
return exe
|
153
venv/Lib/site-packages/pip/_internal/utils/filesystem.py
Normal file
153
venv/Lib/site-packages/pip/_internal/utils/filesystem.py
Normal file
@@ -0,0 +1,153 @@
|
||||
import fnmatch
|
||||
import os
|
||||
import os.path
|
||||
import random
|
||||
import sys
|
||||
from contextlib import contextmanager
|
||||
from tempfile import NamedTemporaryFile
|
||||
from typing import Any, BinaryIO, Generator, List, Union, cast
|
||||
|
||||
from pip._vendor.tenacity import retry, stop_after_delay, wait_fixed
|
||||
|
||||
from pip._internal.utils.compat import get_path_uid
|
||||
from pip._internal.utils.misc import format_size
|
||||
|
||||
|
||||
def check_path_owner(path: str) -> bool:
|
||||
# If we don't have a way to check the effective uid of this process, then
|
||||
# we'll just assume that we own the directory.
|
||||
if sys.platform == "win32" or not hasattr(os, "geteuid"):
|
||||
return True
|
||||
|
||||
assert os.path.isabs(path)
|
||||
|
||||
previous = None
|
||||
while path != previous:
|
||||
if os.path.lexists(path):
|
||||
# Check if path is writable by current user.
|
||||
if os.geteuid() == 0:
|
||||
# Special handling for root user in order to handle properly
|
||||
# cases where users use sudo without -H flag.
|
||||
try:
|
||||
path_uid = get_path_uid(path)
|
||||
except OSError:
|
||||
return False
|
||||
return path_uid == 0
|
||||
else:
|
||||
return os.access(path, os.W_OK)
|
||||
else:
|
||||
previous, path = path, os.path.dirname(path)
|
||||
return False # assume we don't own the path
|
||||
|
||||
|
||||
@contextmanager
|
||||
def adjacent_tmp_file(path: str, **kwargs: Any) -> Generator[BinaryIO, None, None]:
|
||||
"""Return a file-like object pointing to a tmp file next to path.
|
||||
|
||||
The file is created securely and is ensured to be written to disk
|
||||
after the context reaches its end.
|
||||
|
||||
kwargs will be passed to tempfile.NamedTemporaryFile to control
|
||||
the way the temporary file will be opened.
|
||||
"""
|
||||
with NamedTemporaryFile(
|
||||
delete=False,
|
||||
dir=os.path.dirname(path),
|
||||
prefix=os.path.basename(path),
|
||||
suffix=".tmp",
|
||||
**kwargs,
|
||||
) as f:
|
||||
result = cast(BinaryIO, f)
|
||||
try:
|
||||
yield result
|
||||
finally:
|
||||
result.flush()
|
||||
os.fsync(result.fileno())
|
||||
|
||||
|
||||
# Tenacity raises RetryError by default, explicitly raise the original exception
|
||||
_replace_retry = retry(reraise=True, stop=stop_after_delay(1), wait=wait_fixed(0.25))
|
||||
|
||||
replace = _replace_retry(os.replace)
|
||||
|
||||
|
||||
# test_writable_dir and _test_writable_dir_win are copied from Flit,
|
||||
# with the author's agreement to also place them under pip's license.
|
||||
def test_writable_dir(path: str) -> bool:
|
||||
"""Check if a directory is writable.
|
||||
|
||||
Uses os.access() on POSIX, tries creating files on Windows.
|
||||
"""
|
||||
# If the directory doesn't exist, find the closest parent that does.
|
||||
while not os.path.isdir(path):
|
||||
parent = os.path.dirname(path)
|
||||
if parent == path:
|
||||
break # Should never get here, but infinite loops are bad
|
||||
path = parent
|
||||
|
||||
if os.name == "posix":
|
||||
return os.access(path, os.W_OK)
|
||||
|
||||
return _test_writable_dir_win(path)
|
||||
|
||||
|
||||
def _test_writable_dir_win(path: str) -> bool:
|
||||
# os.access doesn't work on Windows: http://bugs.python.org/issue2528
|
||||
# and we can't use tempfile: http://bugs.python.org/issue22107
|
||||
basename = "accesstest_deleteme_fishfingers_custard_"
|
||||
alphabet = "abcdefghijklmnopqrstuvwxyz0123456789"
|
||||
for _ in range(10):
|
||||
name = basename + "".join(random.choice(alphabet) for _ in range(6))
|
||||
file = os.path.join(path, name)
|
||||
try:
|
||||
fd = os.open(file, os.O_RDWR | os.O_CREAT | os.O_EXCL)
|
||||
except FileExistsError:
|
||||
pass
|
||||
except PermissionError:
|
||||
# This could be because there's a directory with the same name.
|
||||
# But it's highly unlikely there's a directory called that,
|
||||
# so we'll assume it's because the parent dir is not writable.
|
||||
# This could as well be because the parent dir is not readable,
|
||||
# due to non-privileged user access.
|
||||
return False
|
||||
else:
|
||||
os.close(fd)
|
||||
os.unlink(file)
|
||||
return True
|
||||
|
||||
# This should never be reached
|
||||
raise OSError("Unexpected condition testing for writable directory")
|
||||
|
||||
|
||||
def find_files(path: str, pattern: str) -> List[str]:
|
||||
"""Returns a list of absolute paths of files beneath path, recursively,
|
||||
with filenames which match the UNIX-style shell glob pattern."""
|
||||
result: List[str] = []
|
||||
for root, _, files in os.walk(path):
|
||||
matches = fnmatch.filter(files, pattern)
|
||||
result.extend(os.path.join(root, f) for f in matches)
|
||||
return result
|
||||
|
||||
|
||||
def file_size(path: str) -> Union[int, float]:
|
||||
# If it's a symlink, return 0.
|
||||
if os.path.islink(path):
|
||||
return 0
|
||||
return os.path.getsize(path)
|
||||
|
||||
|
||||
def format_file_size(path: str) -> str:
|
||||
return format_size(file_size(path))
|
||||
|
||||
|
||||
def directory_size(path: str) -> Union[int, float]:
|
||||
size = 0.0
|
||||
for root, _dirs, files in os.walk(path):
|
||||
for filename in files:
|
||||
file_path = os.path.join(root, filename)
|
||||
size += file_size(file_path)
|
||||
return size
|
||||
|
||||
|
||||
def format_directory_size(path: str) -> str:
|
||||
return format_size(directory_size(path))
|
27
venv/Lib/site-packages/pip/_internal/utils/filetypes.py
Normal file
27
venv/Lib/site-packages/pip/_internal/utils/filetypes.py
Normal file
@@ -0,0 +1,27 @@
|
||||
"""Filetype information.
|
||||
"""
|
||||
|
||||
from typing import Tuple
|
||||
|
||||
from pip._internal.utils.misc import splitext
|
||||
|
||||
WHEEL_EXTENSION = ".whl"
|
||||
BZ2_EXTENSIONS: Tuple[str, ...] = (".tar.bz2", ".tbz")
|
||||
XZ_EXTENSIONS: Tuple[str, ...] = (
|
||||
".tar.xz",
|
||||
".txz",
|
||||
".tlz",
|
||||
".tar.lz",
|
||||
".tar.lzma",
|
||||
)
|
||||
ZIP_EXTENSIONS: Tuple[str, ...] = (".zip", WHEEL_EXTENSION)
|
||||
TAR_EXTENSIONS: Tuple[str, ...] = (".tar.gz", ".tgz", ".tar")
|
||||
ARCHIVE_EXTENSIONS = ZIP_EXTENSIONS + BZ2_EXTENSIONS + TAR_EXTENSIONS + XZ_EXTENSIONS
|
||||
|
||||
|
||||
def is_archive_file(name: str) -> bool:
|
||||
"""Return True if `name` is a considered as an archive file."""
|
||||
ext = splitext(name)[1].lower()
|
||||
if ext in ARCHIVE_EXTENSIONS:
|
||||
return True
|
||||
return False
|
88
venv/Lib/site-packages/pip/_internal/utils/glibc.py
Normal file
88
venv/Lib/site-packages/pip/_internal/utils/glibc.py
Normal file
@@ -0,0 +1,88 @@
|
||||
import os
|
||||
import sys
|
||||
from typing import Optional, Tuple
|
||||
|
||||
|
||||
def glibc_version_string() -> Optional[str]:
|
||||
"Returns glibc version string, or None if not using glibc."
|
||||
return glibc_version_string_confstr() or glibc_version_string_ctypes()
|
||||
|
||||
|
||||
def glibc_version_string_confstr() -> Optional[str]:
|
||||
"Primary implementation of glibc_version_string using os.confstr."
|
||||
# os.confstr is quite a bit faster than ctypes.DLL. It's also less likely
|
||||
# to be broken or missing. This strategy is used in the standard library
|
||||
# platform module:
|
||||
# https://github.com/python/cpython/blob/fcf1d003bf4f0100c9d0921ff3d70e1127ca1b71/Lib/platform.py#L175-L183
|
||||
if sys.platform == "win32":
|
||||
return None
|
||||
try:
|
||||
gnu_libc_version = os.confstr("CS_GNU_LIBC_VERSION")
|
||||
if gnu_libc_version is None:
|
||||
return None
|
||||
# os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17":
|
||||
_, version = gnu_libc_version.split()
|
||||
except (AttributeError, OSError, ValueError):
|
||||
# os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)...
|
||||
return None
|
||||
return version
|
||||
|
||||
|
||||
def glibc_version_string_ctypes() -> Optional[str]:
|
||||
"Fallback implementation of glibc_version_string using ctypes."
|
||||
|
||||
try:
|
||||
import ctypes
|
||||
except ImportError:
|
||||
return None
|
||||
|
||||
# ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen
|
||||
# manpage says, "If filename is NULL, then the returned handle is for the
|
||||
# main program". This way we can let the linker do the work to figure out
|
||||
# which libc our process is actually using.
|
||||
process_namespace = ctypes.CDLL(None)
|
||||
try:
|
||||
gnu_get_libc_version = process_namespace.gnu_get_libc_version
|
||||
except AttributeError:
|
||||
# Symbol doesn't exist -> therefore, we are not linked to
|
||||
# glibc.
|
||||
return None
|
||||
|
||||
# Call gnu_get_libc_version, which returns a string like "2.5"
|
||||
gnu_get_libc_version.restype = ctypes.c_char_p
|
||||
version_str = gnu_get_libc_version()
|
||||
# py2 / py3 compatibility:
|
||||
if not isinstance(version_str, str):
|
||||
version_str = version_str.decode("ascii")
|
||||
|
||||
return version_str
|
||||
|
||||
|
||||
# platform.libc_ver regularly returns completely nonsensical glibc
|
||||
# versions. E.g. on my computer, platform says:
|
||||
#
|
||||
# ~$ python2.7 -c 'import platform; print(platform.libc_ver())'
|
||||
# ('glibc', '2.7')
|
||||
# ~$ python3.5 -c 'import platform; print(platform.libc_ver())'
|
||||
# ('glibc', '2.9')
|
||||
#
|
||||
# But the truth is:
|
||||
#
|
||||
# ~$ ldd --version
|
||||
# ldd (Debian GLIBC 2.22-11) 2.22
|
||||
#
|
||||
# This is unfortunate, because it means that the linehaul data on libc
|
||||
# versions that was generated by pip 8.1.2 and earlier is useless and
|
||||
# misleading. Solution: instead of using platform, use our code that actually
|
||||
# works.
|
||||
def libc_ver() -> Tuple[str, str]:
|
||||
"""Try to determine the glibc version
|
||||
|
||||
Returns a tuple of strings (lib, version) which default to empty strings
|
||||
in case the lookup fails.
|
||||
"""
|
||||
glibc_version = glibc_version_string()
|
||||
if glibc_version is None:
|
||||
return ("", "")
|
||||
else:
|
||||
return ("glibc", glibc_version)
|
151
venv/Lib/site-packages/pip/_internal/utils/hashes.py
Normal file
151
venv/Lib/site-packages/pip/_internal/utils/hashes.py
Normal file
@@ -0,0 +1,151 @@
|
||||
import hashlib
|
||||
from typing import TYPE_CHECKING, BinaryIO, Dict, Iterable, List, Optional
|
||||
|
||||
from pip._internal.exceptions import HashMismatch, HashMissing, InstallationError
|
||||
from pip._internal.utils.misc import read_chunks
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from hashlib import _Hash
|
||||
|
||||
# NoReturn introduced in 3.6.2; imported only for type checking to maintain
|
||||
# pip compatibility with older patch versions of Python 3.6
|
||||
from typing import NoReturn
|
||||
|
||||
|
||||
# The recommended hash algo of the moment. Change this whenever the state of
|
||||
# the art changes; it won't hurt backward compatibility.
|
||||
FAVORITE_HASH = "sha256"
|
||||
|
||||
|
||||
# Names of hashlib algorithms allowed by the --hash option and ``pip hash``
|
||||
# Currently, those are the ones at least as collision-resistant as sha256.
|
||||
STRONG_HASHES = ["sha256", "sha384", "sha512"]
|
||||
|
||||
|
||||
class Hashes:
|
||||
"""A wrapper that builds multiple hashes at once and checks them against
|
||||
known-good values
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, hashes: Optional[Dict[str, List[str]]] = None) -> None:
|
||||
"""
|
||||
:param hashes: A dict of algorithm names pointing to lists of allowed
|
||||
hex digests
|
||||
"""
|
||||
allowed = {}
|
||||
if hashes is not None:
|
||||
for alg, keys in hashes.items():
|
||||
# Make sure values are always sorted (to ease equality checks)
|
||||
allowed[alg] = sorted(keys)
|
||||
self._allowed = allowed
|
||||
|
||||
def __and__(self, other: "Hashes") -> "Hashes":
|
||||
if not isinstance(other, Hashes):
|
||||
return NotImplemented
|
||||
|
||||
# If either of the Hashes object is entirely empty (i.e. no hash
|
||||
# specified at all), all hashes from the other object are allowed.
|
||||
if not other:
|
||||
return self
|
||||
if not self:
|
||||
return other
|
||||
|
||||
# Otherwise only hashes that present in both objects are allowed.
|
||||
new = {}
|
||||
for alg, values in other._allowed.items():
|
||||
if alg not in self._allowed:
|
||||
continue
|
||||
new[alg] = [v for v in values if v in self._allowed[alg]]
|
||||
return Hashes(new)
|
||||
|
||||
@property
|
||||
def digest_count(self) -> int:
|
||||
return sum(len(digests) for digests in self._allowed.values())
|
||||
|
||||
def is_hash_allowed(self, hash_name: str, hex_digest: str) -> bool:
|
||||
"""Return whether the given hex digest is allowed."""
|
||||
return hex_digest in self._allowed.get(hash_name, [])
|
||||
|
||||
def check_against_chunks(self, chunks: Iterable[bytes]) -> None:
|
||||
"""Check good hashes against ones built from iterable of chunks of
|
||||
data.
|
||||
|
||||
Raise HashMismatch if none match.
|
||||
|
||||
"""
|
||||
gots = {}
|
||||
for hash_name in self._allowed.keys():
|
||||
try:
|
||||
gots[hash_name] = hashlib.new(hash_name)
|
||||
except (ValueError, TypeError):
|
||||
raise InstallationError(f"Unknown hash name: {hash_name}")
|
||||
|
||||
for chunk in chunks:
|
||||
for hash in gots.values():
|
||||
hash.update(chunk)
|
||||
|
||||
for hash_name, got in gots.items():
|
||||
if got.hexdigest() in self._allowed[hash_name]:
|
||||
return
|
||||
self._raise(gots)
|
||||
|
||||
def _raise(self, gots: Dict[str, "_Hash"]) -> "NoReturn":
|
||||
raise HashMismatch(self._allowed, gots)
|
||||
|
||||
def check_against_file(self, file: BinaryIO) -> None:
|
||||
"""Check good hashes against a file-like object
|
||||
|
||||
Raise HashMismatch if none match.
|
||||
|
||||
"""
|
||||
return self.check_against_chunks(read_chunks(file))
|
||||
|
||||
def check_against_path(self, path: str) -> None:
|
||||
with open(path, "rb") as file:
|
||||
return self.check_against_file(file)
|
||||
|
||||
def has_one_of(self, hashes: Dict[str, str]) -> bool:
|
||||
"""Return whether any of the given hashes are allowed."""
|
||||
for hash_name, hex_digest in hashes.items():
|
||||
if self.is_hash_allowed(hash_name, hex_digest):
|
||||
return True
|
||||
return False
|
||||
|
||||
def __bool__(self) -> bool:
|
||||
"""Return whether I know any known-good hashes."""
|
||||
return bool(self._allowed)
|
||||
|
||||
def __eq__(self, other: object) -> bool:
|
||||
if not isinstance(other, Hashes):
|
||||
return NotImplemented
|
||||
return self._allowed == other._allowed
|
||||
|
||||
def __hash__(self) -> int:
|
||||
return hash(
|
||||
",".join(
|
||||
sorted(
|
||||
":".join((alg, digest))
|
||||
for alg, digest_list in self._allowed.items()
|
||||
for digest in digest_list
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class MissingHashes(Hashes):
|
||||
"""A workalike for Hashes used when we're missing a hash for a requirement
|
||||
|
||||
It computes the actual hash of the requirement and raises a HashMissing
|
||||
exception showing it to the user.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Don't offer the ``hashes`` kwarg."""
|
||||
# Pass our favorite hash in to generate a "gotten hash". With the
|
||||
# empty list, it will never match, so an error will always raise.
|
||||
super().__init__(hashes={FAVORITE_HASH: []})
|
||||
|
||||
def _raise(self, gots: Dict[str, "_Hash"]) -> "NoReturn":
|
||||
raise HashMissing(gots[FAVORITE_HASH].hexdigest())
|
@@ -0,0 +1,35 @@
|
||||
"""A helper module that injects SecureTransport, on import.
|
||||
|
||||
The import should be done as early as possible, to ensure all requests and
|
||||
sessions (or whatever) are created after injecting SecureTransport.
|
||||
|
||||
Note that we only do the injection on macOS, when the linked OpenSSL is too
|
||||
old to handle TLSv1.2.
|
||||
"""
|
||||
|
||||
import sys
|
||||
|
||||
|
||||
def inject_securetransport() -> None:
|
||||
# Only relevant on macOS
|
||||
if sys.platform != "darwin":
|
||||
return
|
||||
|
||||
try:
|
||||
import ssl
|
||||
except ImportError:
|
||||
return
|
||||
|
||||
# Checks for OpenSSL 1.0.1
|
||||
if ssl.OPENSSL_VERSION_NUMBER >= 0x1000100F:
|
||||
return
|
||||
|
||||
try:
|
||||
from pip._vendor.urllib3.contrib import securetransport
|
||||
except (ImportError, OSError):
|
||||
return
|
||||
|
||||
securetransport.inject_into_urllib3()
|
||||
|
||||
|
||||
inject_securetransport()
|
348
venv/Lib/site-packages/pip/_internal/utils/logging.py
Normal file
348
venv/Lib/site-packages/pip/_internal/utils/logging.py
Normal file
@@ -0,0 +1,348 @@
|
||||
import contextlib
|
||||
import errno
|
||||
import logging
|
||||
import logging.handlers
|
||||
import os
|
||||
import sys
|
||||
import threading
|
||||
from dataclasses import dataclass
|
||||
from io import TextIOWrapper
|
||||
from logging import Filter
|
||||
from typing import Any, ClassVar, Generator, List, Optional, TextIO, Type
|
||||
|
||||
from pip._vendor.rich.console import (
|
||||
Console,
|
||||
ConsoleOptions,
|
||||
ConsoleRenderable,
|
||||
RenderableType,
|
||||
RenderResult,
|
||||
RichCast,
|
||||
)
|
||||
from pip._vendor.rich.highlighter import NullHighlighter
|
||||
from pip._vendor.rich.logging import RichHandler
|
||||
from pip._vendor.rich.segment import Segment
|
||||
from pip._vendor.rich.style import Style
|
||||
|
||||
from pip._internal.utils._log import VERBOSE, getLogger
|
||||
from pip._internal.utils.compat import WINDOWS
|
||||
from pip._internal.utils.deprecation import DEPRECATION_MSG_PREFIX
|
||||
from pip._internal.utils.misc import ensure_dir
|
||||
|
||||
_log_state = threading.local()
|
||||
subprocess_logger = getLogger("pip.subprocessor")
|
||||
|
||||
|
||||
class BrokenStdoutLoggingError(Exception):
|
||||
"""
|
||||
Raised if BrokenPipeError occurs for the stdout stream while logging.
|
||||
"""
|
||||
|
||||
|
||||
def _is_broken_pipe_error(exc_class: Type[BaseException], exc: BaseException) -> bool:
|
||||
if exc_class is BrokenPipeError:
|
||||
return True
|
||||
|
||||
# On Windows, a broken pipe can show up as EINVAL rather than EPIPE:
|
||||
# https://bugs.python.org/issue19612
|
||||
# https://bugs.python.org/issue30418
|
||||
if not WINDOWS:
|
||||
return False
|
||||
|
||||
return isinstance(exc, OSError) and exc.errno in (errno.EINVAL, errno.EPIPE)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def indent_log(num: int = 2) -> Generator[None, None, None]:
|
||||
"""
|
||||
A context manager which will cause the log output to be indented for any
|
||||
log messages emitted inside it.
|
||||
"""
|
||||
# For thread-safety
|
||||
_log_state.indentation = get_indentation()
|
||||
_log_state.indentation += num
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
_log_state.indentation -= num
|
||||
|
||||
|
||||
def get_indentation() -> int:
|
||||
return getattr(_log_state, "indentation", 0)
|
||||
|
||||
|
||||
class IndentingFormatter(logging.Formatter):
|
||||
default_time_format = "%Y-%m-%dT%H:%M:%S"
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*args: Any,
|
||||
add_timestamp: bool = False,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""
|
||||
A logging.Formatter that obeys the indent_log() context manager.
|
||||
|
||||
:param add_timestamp: A bool indicating output lines should be prefixed
|
||||
with their record's timestamp.
|
||||
"""
|
||||
self.add_timestamp = add_timestamp
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def get_message_start(self, formatted: str, levelno: int) -> str:
|
||||
"""
|
||||
Return the start of the formatted log message (not counting the
|
||||
prefix to add to each line).
|
||||
"""
|
||||
if levelno < logging.WARNING:
|
||||
return ""
|
||||
if formatted.startswith(DEPRECATION_MSG_PREFIX):
|
||||
# Then the message already has a prefix. We don't want it to
|
||||
# look like "WARNING: DEPRECATION: ...."
|
||||
return ""
|
||||
if levelno < logging.ERROR:
|
||||
return "WARNING: "
|
||||
|
||||
return "ERROR: "
|
||||
|
||||
def format(self, record: logging.LogRecord) -> str:
|
||||
"""
|
||||
Calls the standard formatter, but will indent all of the log message
|
||||
lines by our current indentation level.
|
||||
"""
|
||||
formatted = super().format(record)
|
||||
message_start = self.get_message_start(formatted, record.levelno)
|
||||
formatted = message_start + formatted
|
||||
|
||||
prefix = ""
|
||||
if self.add_timestamp:
|
||||
prefix = f"{self.formatTime(record)} "
|
||||
prefix += " " * get_indentation()
|
||||
formatted = "".join([prefix + line for line in formatted.splitlines(True)])
|
||||
return formatted
|
||||
|
||||
|
||||
@dataclass
|
||||
class IndentedRenderable:
|
||||
renderable: RenderableType
|
||||
indent: int
|
||||
|
||||
def __rich_console__(
|
||||
self, console: Console, options: ConsoleOptions
|
||||
) -> RenderResult:
|
||||
segments = console.render(self.renderable, options)
|
||||
lines = Segment.split_lines(segments)
|
||||
for line in lines:
|
||||
yield Segment(" " * self.indent)
|
||||
yield from line
|
||||
yield Segment("\n")
|
||||
|
||||
|
||||
class RichPipStreamHandler(RichHandler):
|
||||
KEYWORDS: ClassVar[Optional[List[str]]] = []
|
||||
|
||||
def __init__(self, stream: Optional[TextIO], no_color: bool) -> None:
|
||||
super().__init__(
|
||||
console=Console(file=stream, no_color=no_color, soft_wrap=True),
|
||||
show_time=False,
|
||||
show_level=False,
|
||||
show_path=False,
|
||||
highlighter=NullHighlighter(),
|
||||
)
|
||||
|
||||
# Our custom override on Rich's logger, to make things work as we need them to.
|
||||
def emit(self, record: logging.LogRecord) -> None:
|
||||
style: Optional[Style] = None
|
||||
|
||||
# If we are given a diagnostic error to present, present it with indentation.
|
||||
assert isinstance(record.args, tuple)
|
||||
if record.msg == "[present-rich] %s" and len(record.args) == 1:
|
||||
rich_renderable = record.args[0]
|
||||
assert isinstance(
|
||||
rich_renderable, (ConsoleRenderable, RichCast, str)
|
||||
), f"{rich_renderable} is not rich-console-renderable"
|
||||
|
||||
renderable: RenderableType = IndentedRenderable(
|
||||
rich_renderable, indent=get_indentation()
|
||||
)
|
||||
else:
|
||||
message = self.format(record)
|
||||
renderable = self.render_message(record, message)
|
||||
if record.levelno is not None:
|
||||
if record.levelno >= logging.ERROR:
|
||||
style = Style(color="red")
|
||||
elif record.levelno >= logging.WARNING:
|
||||
style = Style(color="yellow")
|
||||
|
||||
try:
|
||||
self.console.print(renderable, overflow="ignore", crop=False, style=style)
|
||||
except Exception:
|
||||
self.handleError(record)
|
||||
|
||||
def handleError(self, record: logging.LogRecord) -> None:
|
||||
"""Called when logging is unable to log some output."""
|
||||
|
||||
exc_class, exc = sys.exc_info()[:2]
|
||||
# If a broken pipe occurred while calling write() or flush() on the
|
||||
# stdout stream in logging's Handler.emit(), then raise our special
|
||||
# exception so we can handle it in main() instead of logging the
|
||||
# broken pipe error and continuing.
|
||||
if (
|
||||
exc_class
|
||||
and exc
|
||||
and self.console.file is sys.stdout
|
||||
and _is_broken_pipe_error(exc_class, exc)
|
||||
):
|
||||
raise BrokenStdoutLoggingError()
|
||||
|
||||
return super().handleError(record)
|
||||
|
||||
|
||||
class BetterRotatingFileHandler(logging.handlers.RotatingFileHandler):
|
||||
def _open(self) -> TextIOWrapper:
|
||||
ensure_dir(os.path.dirname(self.baseFilename))
|
||||
return super()._open()
|
||||
|
||||
|
||||
class MaxLevelFilter(Filter):
|
||||
def __init__(self, level: int) -> None:
|
||||
self.level = level
|
||||
|
||||
def filter(self, record: logging.LogRecord) -> bool:
|
||||
return record.levelno < self.level
|
||||
|
||||
|
||||
class ExcludeLoggerFilter(Filter):
|
||||
|
||||
"""
|
||||
A logging Filter that excludes records from a logger (or its children).
|
||||
"""
|
||||
|
||||
def filter(self, record: logging.LogRecord) -> bool:
|
||||
# The base Filter class allows only records from a logger (or its
|
||||
# children).
|
||||
return not super().filter(record)
|
||||
|
||||
|
||||
def setup_logging(verbosity: int, no_color: bool, user_log_file: Optional[str]) -> int:
|
||||
"""Configures and sets up all of the logging
|
||||
|
||||
Returns the requested logging level, as its integer value.
|
||||
"""
|
||||
|
||||
# Determine the level to be logging at.
|
||||
if verbosity >= 2:
|
||||
level_number = logging.DEBUG
|
||||
elif verbosity == 1:
|
||||
level_number = VERBOSE
|
||||
elif verbosity == -1:
|
||||
level_number = logging.WARNING
|
||||
elif verbosity == -2:
|
||||
level_number = logging.ERROR
|
||||
elif verbosity <= -3:
|
||||
level_number = logging.CRITICAL
|
||||
else:
|
||||
level_number = logging.INFO
|
||||
|
||||
level = logging.getLevelName(level_number)
|
||||
|
||||
# The "root" logger should match the "console" level *unless* we also need
|
||||
# to log to a user log file.
|
||||
include_user_log = user_log_file is not None
|
||||
if include_user_log:
|
||||
additional_log_file = user_log_file
|
||||
root_level = "DEBUG"
|
||||
else:
|
||||
additional_log_file = "/dev/null"
|
||||
root_level = level
|
||||
|
||||
# Disable any logging besides WARNING unless we have DEBUG level logging
|
||||
# enabled for vendored libraries.
|
||||
vendored_log_level = "WARNING" if level in ["INFO", "ERROR"] else "DEBUG"
|
||||
|
||||
# Shorthands for clarity
|
||||
log_streams = {
|
||||
"stdout": "ext://sys.stdout",
|
||||
"stderr": "ext://sys.stderr",
|
||||
}
|
||||
handler_classes = {
|
||||
"stream": "pip._internal.utils.logging.RichPipStreamHandler",
|
||||
"file": "pip._internal.utils.logging.BetterRotatingFileHandler",
|
||||
}
|
||||
handlers = ["console", "console_errors", "console_subprocess"] + (
|
||||
["user_log"] if include_user_log else []
|
||||
)
|
||||
|
||||
logging.config.dictConfig(
|
||||
{
|
||||
"version": 1,
|
||||
"disable_existing_loggers": False,
|
||||
"filters": {
|
||||
"exclude_warnings": {
|
||||
"()": "pip._internal.utils.logging.MaxLevelFilter",
|
||||
"level": logging.WARNING,
|
||||
},
|
||||
"restrict_to_subprocess": {
|
||||
"()": "logging.Filter",
|
||||
"name": subprocess_logger.name,
|
||||
},
|
||||
"exclude_subprocess": {
|
||||
"()": "pip._internal.utils.logging.ExcludeLoggerFilter",
|
||||
"name": subprocess_logger.name,
|
||||
},
|
||||
},
|
||||
"formatters": {
|
||||
"indent": {
|
||||
"()": IndentingFormatter,
|
||||
"format": "%(message)s",
|
||||
},
|
||||
"indent_with_timestamp": {
|
||||
"()": IndentingFormatter,
|
||||
"format": "%(message)s",
|
||||
"add_timestamp": True,
|
||||
},
|
||||
},
|
||||
"handlers": {
|
||||
"console": {
|
||||
"level": level,
|
||||
"class": handler_classes["stream"],
|
||||
"no_color": no_color,
|
||||
"stream": log_streams["stdout"],
|
||||
"filters": ["exclude_subprocess", "exclude_warnings"],
|
||||
"formatter": "indent",
|
||||
},
|
||||
"console_errors": {
|
||||
"level": "WARNING",
|
||||
"class": handler_classes["stream"],
|
||||
"no_color": no_color,
|
||||
"stream": log_streams["stderr"],
|
||||
"filters": ["exclude_subprocess"],
|
||||
"formatter": "indent",
|
||||
},
|
||||
# A handler responsible for logging to the console messages
|
||||
# from the "subprocessor" logger.
|
||||
"console_subprocess": {
|
||||
"level": level,
|
||||
"class": handler_classes["stream"],
|
||||
"stream": log_streams["stderr"],
|
||||
"no_color": no_color,
|
||||
"filters": ["restrict_to_subprocess"],
|
||||
"formatter": "indent",
|
||||
},
|
||||
"user_log": {
|
||||
"level": "DEBUG",
|
||||
"class": handler_classes["file"],
|
||||
"filename": additional_log_file,
|
||||
"encoding": "utf-8",
|
||||
"delay": True,
|
||||
"formatter": "indent_with_timestamp",
|
||||
},
|
||||
},
|
||||
"root": {
|
||||
"level": root_level,
|
||||
"handlers": handlers,
|
||||
},
|
||||
"loggers": {"pip._vendor": {"level": vendored_log_level}},
|
||||
}
|
||||
)
|
||||
|
||||
return level_number
|
735
venv/Lib/site-packages/pip/_internal/utils/misc.py
Normal file
735
venv/Lib/site-packages/pip/_internal/utils/misc.py
Normal file
File diff suppressed because it is too large
Load Diff
39
venv/Lib/site-packages/pip/_internal/utils/models.py
Normal file
39
venv/Lib/site-packages/pip/_internal/utils/models.py
Normal file
@@ -0,0 +1,39 @@
|
||||
"""Utilities for defining models
|
||||
"""
|
||||
|
||||
import operator
|
||||
from typing import Any, Callable, Type
|
||||
|
||||
|
||||
class KeyBasedCompareMixin:
|
||||
"""Provides comparison capabilities that is based on a key"""
|
||||
|
||||
__slots__ = ["_compare_key", "_defining_class"]
|
||||
|
||||
def __init__(self, key: Any, defining_class: Type["KeyBasedCompareMixin"]) -> None:
|
||||
self._compare_key = key
|
||||
self._defining_class = defining_class
|
||||
|
||||
def __hash__(self) -> int:
|
||||
return hash(self._compare_key)
|
||||
|
||||
def __lt__(self, other: Any) -> bool:
|
||||
return self._compare(other, operator.__lt__)
|
||||
|
||||
def __le__(self, other: Any) -> bool:
|
||||
return self._compare(other, operator.__le__)
|
||||
|
||||
def __gt__(self, other: Any) -> bool:
|
||||
return self._compare(other, operator.__gt__)
|
||||
|
||||
def __ge__(self, other: Any) -> bool:
|
||||
return self._compare(other, operator.__ge__)
|
||||
|
||||
def __eq__(self, other: Any) -> bool:
|
||||
return self._compare(other, operator.__eq__)
|
||||
|
||||
def _compare(self, other: Any, method: Callable[[Any, Any], bool]) -> bool:
|
||||
if not isinstance(other, self._defining_class):
|
||||
return NotImplemented
|
||||
|
||||
return method(self._compare_key, other._compare_key)
|
57
venv/Lib/site-packages/pip/_internal/utils/packaging.py
Normal file
57
venv/Lib/site-packages/pip/_internal/utils/packaging.py
Normal file
@@ -0,0 +1,57 @@
|
||||
import functools
|
||||
import logging
|
||||
import re
|
||||
from typing import NewType, Optional, Tuple, cast
|
||||
|
||||
from pip._vendor.packaging import specifiers, version
|
||||
from pip._vendor.packaging.requirements import Requirement
|
||||
|
||||
NormalizedExtra = NewType("NormalizedExtra", str)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def check_requires_python(
|
||||
requires_python: Optional[str], version_info: Tuple[int, ...]
|
||||
) -> bool:
|
||||
"""
|
||||
Check if the given Python version matches a "Requires-Python" specifier.
|
||||
|
||||
:param version_info: A 3-tuple of ints representing a Python
|
||||
major-minor-micro version to check (e.g. `sys.version_info[:3]`).
|
||||
|
||||
:return: `True` if the given Python version satisfies the requirement.
|
||||
Otherwise, return `False`.
|
||||
|
||||
:raises InvalidSpecifier: If `requires_python` has an invalid format.
|
||||
"""
|
||||
if requires_python is None:
|
||||
# The package provides no information
|
||||
return True
|
||||
requires_python_specifier = specifiers.SpecifierSet(requires_python)
|
||||
|
||||
python_version = version.parse(".".join(map(str, version_info)))
|
||||
return python_version in requires_python_specifier
|
||||
|
||||
|
||||
@functools.lru_cache(maxsize=512)
|
||||
def get_requirement(req_string: str) -> Requirement:
|
||||
"""Construct a packaging.Requirement object with caching"""
|
||||
# Parsing requirement strings is expensive, and is also expected to happen
|
||||
# with a low diversity of different arguments (at least relative the number
|
||||
# constructed). This method adds a cache to requirement object creation to
|
||||
# minimize repeated parsing of the same string to construct equivalent
|
||||
# Requirement objects.
|
||||
return Requirement(req_string)
|
||||
|
||||
|
||||
def safe_extra(extra: str) -> NormalizedExtra:
|
||||
"""Convert an arbitrary string to a standard 'extra' name
|
||||
|
||||
Any runs of non-alphanumeric characters are replaced with a single '_',
|
||||
and the result is always lowercased.
|
||||
|
||||
This function is duplicated from ``pkg_resources``. Note that this is not
|
||||
the same to either ``canonicalize_name`` or ``_egg_link_name``.
|
||||
"""
|
||||
return cast(NormalizedExtra, re.sub("[^A-Za-z0-9.-]+", "_", extra).lower())
|
146
venv/Lib/site-packages/pip/_internal/utils/setuptools_build.py
Normal file
146
venv/Lib/site-packages/pip/_internal/utils/setuptools_build.py
Normal file
@@ -0,0 +1,146 @@
|
||||
import sys
|
||||
import textwrap
|
||||
from typing import List, Optional, Sequence
|
||||
|
||||
# Shim to wrap setup.py invocation with setuptools
|
||||
# Note that __file__ is handled via two {!r} *and* %r, to ensure that paths on
|
||||
# Windows are correctly handled (it should be "C:\\Users" not "C:\Users").
|
||||
_SETUPTOOLS_SHIM = textwrap.dedent(
|
||||
"""
|
||||
exec(compile('''
|
||||
# This is <pip-setuptools-caller> -- a caller that pip uses to run setup.py
|
||||
#
|
||||
# - It imports setuptools before invoking setup.py, to enable projects that directly
|
||||
# import from `distutils.core` to work with newer packaging standards.
|
||||
# - It provides a clear error message when setuptools is not installed.
|
||||
# - It sets `sys.argv[0]` to the underlying `setup.py`, when invoking `setup.py` so
|
||||
# setuptools doesn't think the script is `-c`. This avoids the following warning:
|
||||
# manifest_maker: standard file '-c' not found".
|
||||
# - It generates a shim setup.py, for handling setup.cfg-only projects.
|
||||
import os, sys, tokenize
|
||||
|
||||
try:
|
||||
import setuptools
|
||||
except ImportError as error:
|
||||
print(
|
||||
"ERROR: Can not execute `setup.py` since setuptools is not available in "
|
||||
"the build environment.",
|
||||
file=sys.stderr,
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
__file__ = %r
|
||||
sys.argv[0] = __file__
|
||||
|
||||
if os.path.exists(__file__):
|
||||
filename = __file__
|
||||
with tokenize.open(__file__) as f:
|
||||
setup_py_code = f.read()
|
||||
else:
|
||||
filename = "<auto-generated setuptools caller>"
|
||||
setup_py_code = "from setuptools import setup; setup()"
|
||||
|
||||
exec(compile(setup_py_code, filename, "exec"))
|
||||
''' % ({!r},), "<pip-setuptools-caller>", "exec"))
|
||||
"""
|
||||
).rstrip()
|
||||
|
||||
|
||||
def make_setuptools_shim_args(
|
||||
setup_py_path: str,
|
||||
global_options: Optional[Sequence[str]] = None,
|
||||
no_user_config: bool = False,
|
||||
unbuffered_output: bool = False,
|
||||
) -> List[str]:
|
||||
"""
|
||||
Get setuptools command arguments with shim wrapped setup file invocation.
|
||||
|
||||
:param setup_py_path: The path to setup.py to be wrapped.
|
||||
:param global_options: Additional global options.
|
||||
:param no_user_config: If True, disables personal user configuration.
|
||||
:param unbuffered_output: If True, adds the unbuffered switch to the
|
||||
argument list.
|
||||
"""
|
||||
args = [sys.executable]
|
||||
if unbuffered_output:
|
||||
args += ["-u"]
|
||||
args += ["-c", _SETUPTOOLS_SHIM.format(setup_py_path)]
|
||||
if global_options:
|
||||
args += global_options
|
||||
if no_user_config:
|
||||
args += ["--no-user-cfg"]
|
||||
return args
|
||||
|
||||
|
||||
def make_setuptools_bdist_wheel_args(
|
||||
setup_py_path: str,
|
||||
global_options: Sequence[str],
|
||||
build_options: Sequence[str],
|
||||
destination_dir: str,
|
||||
) -> List[str]:
|
||||
# NOTE: Eventually, we'd want to also -S to the flags here, when we're
|
||||
# isolating. Currently, it breaks Python in virtualenvs, because it
|
||||
# relies on site.py to find parts of the standard library outside the
|
||||
# virtualenv.
|
||||
args = make_setuptools_shim_args(
|
||||
setup_py_path, global_options=global_options, unbuffered_output=True
|
||||
)
|
||||
args += ["bdist_wheel", "-d", destination_dir]
|
||||
args += build_options
|
||||
return args
|
||||
|
||||
|
||||
def make_setuptools_clean_args(
|
||||
setup_py_path: str,
|
||||
global_options: Sequence[str],
|
||||
) -> List[str]:
|
||||
args = make_setuptools_shim_args(
|
||||
setup_py_path, global_options=global_options, unbuffered_output=True
|
||||
)
|
||||
args += ["clean", "--all"]
|
||||
return args
|
||||
|
||||
|
||||
def make_setuptools_develop_args(
|
||||
setup_py_path: str,
|
||||
*,
|
||||
global_options: Sequence[str],
|
||||
no_user_config: bool,
|
||||
prefix: Optional[str],
|
||||
home: Optional[str],
|
||||
use_user_site: bool,
|
||||
) -> List[str]:
|
||||
assert not (use_user_site and prefix)
|
||||
|
||||
args = make_setuptools_shim_args(
|
||||
setup_py_path,
|
||||
global_options=global_options,
|
||||
no_user_config=no_user_config,
|
||||
)
|
||||
|
||||
args += ["develop", "--no-deps"]
|
||||
|
||||
if prefix:
|
||||
args += ["--prefix", prefix]
|
||||
if home is not None:
|
||||
args += ["--install-dir", home]
|
||||
|
||||
if use_user_site:
|
||||
args += ["--user", "--prefix="]
|
||||
|
||||
return args
|
||||
|
||||
|
||||
def make_setuptools_egg_info_args(
|
||||
setup_py_path: str,
|
||||
egg_info_dir: Optional[str],
|
||||
no_user_config: bool,
|
||||
) -> List[str]:
|
||||
args = make_setuptools_shim_args(setup_py_path, no_user_config=no_user_config)
|
||||
|
||||
args += ["egg_info"]
|
||||
|
||||
if egg_info_dir:
|
||||
args += ["--egg-base", egg_info_dir]
|
||||
|
||||
return args
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user