first commit

This commit is contained in:
Victor
2023-11-16 16:57:13 +01:00
commit a8fb1fd811
1477 changed files with 275005 additions and 0 deletions

View File

@@ -0,0 +1,124 @@
import contextlib
import hashlib
import logging
import os
from types import TracebackType
from typing import Dict, Generator, Optional, Set, Type, Union
from pip._internal.models.link import Link
from pip._internal.req.req_install import InstallRequirement
from pip._internal.utils.temp_dir import TempDirectory
logger = logging.getLogger(__name__)
@contextlib.contextmanager
def update_env_context_manager(**changes: str) -> Generator[None, None, None]:
target = os.environ
# Save values from the target and change them.
non_existent_marker = object()
saved_values: Dict[str, Union[object, str]] = {}
for name, new_value in changes.items():
try:
saved_values[name] = target[name]
except KeyError:
saved_values[name] = non_existent_marker
target[name] = new_value
try:
yield
finally:
# Restore original values in the target.
for name, original_value in saved_values.items():
if original_value is non_existent_marker:
del target[name]
else:
assert isinstance(original_value, str) # for mypy
target[name] = original_value
@contextlib.contextmanager
def get_build_tracker() -> Generator["BuildTracker", None, None]:
root = os.environ.get("PIP_BUILD_TRACKER")
with contextlib.ExitStack() as ctx:
if root is None:
root = ctx.enter_context(TempDirectory(kind="build-tracker")).path
ctx.enter_context(update_env_context_manager(PIP_BUILD_TRACKER=root))
logger.debug("Initialized build tracking at %s", root)
with BuildTracker(root) as tracker:
yield tracker
class BuildTracker:
def __init__(self, root: str) -> None:
self._root = root
self._entries: Set[InstallRequirement] = set()
logger.debug("Created build tracker: %s", self._root)
def __enter__(self) -> "BuildTracker":
logger.debug("Entered build tracker: %s", self._root)
return self
def __exit__(
self,
exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType],
) -> None:
self.cleanup()
def _entry_path(self, link: Link) -> str:
hashed = hashlib.sha224(link.url_without_fragment.encode()).hexdigest()
return os.path.join(self._root, hashed)
def add(self, req: InstallRequirement) -> None:
"""Add an InstallRequirement to build tracking."""
assert req.link
# Get the file to write information about this requirement.
entry_path = self._entry_path(req.link)
# Try reading from the file. If it exists and can be read from, a build
# is already in progress, so a LookupError is raised.
try:
with open(entry_path) as fp:
contents = fp.read()
except FileNotFoundError:
pass
else:
message = "{} is already being built: {}".format(req.link, contents)
raise LookupError(message)
# If we're here, req should really not be building already.
assert req not in self._entries
# Start tracking this requirement.
with open(entry_path, "w", encoding="utf-8") as fp:
fp.write(str(req))
self._entries.add(req)
logger.debug("Added %s to build tracker %r", req, self._root)
def remove(self, req: InstallRequirement) -> None:
"""Remove an InstallRequirement from build tracking."""
assert req.link
# Delete the created file and the corresponding entries.
os.unlink(self._entry_path(req.link))
self._entries.remove(req)
logger.debug("Removed %s from build tracker %r", req, self._root)
def cleanup(self) -> None:
for req in set(self._entries):
self.remove(req)
logger.debug("Removed build tracker: %r", self._root)
@contextlib.contextmanager
def track(self, req: InstallRequirement) -> Generator[None, None, None]:
self.add(req)
yield
self.remove(req)

View File

@@ -0,0 +1,39 @@
"""Metadata generation logic for source distributions.
"""
import os
from pip._vendor.pyproject_hooks import BuildBackendHookCaller
from pip._internal.build_env import BuildEnvironment
from pip._internal.exceptions import (
InstallationSubprocessError,
MetadataGenerationFailed,
)
from pip._internal.utils.subprocess import runner_with_spinner_message
from pip._internal.utils.temp_dir import TempDirectory
def generate_metadata(
build_env: BuildEnvironment, backend: BuildBackendHookCaller, details: str
) -> str:
"""Generate metadata using mechanisms described in PEP 517.
Returns the generated metadata directory.
"""
metadata_tmpdir = TempDirectory(kind="modern-metadata", globally_managed=True)
metadata_dir = metadata_tmpdir.path
with build_env:
# Note that BuildBackendHookCaller implements a fallback for
# prepare_metadata_for_build_wheel, so we don't have to
# consider the possibility that this hook doesn't exist.
runner = runner_with_spinner_message("Preparing metadata (pyproject.toml)")
with backend.subprocess_runner(runner):
try:
distinfo_dir = backend.prepare_metadata_for_build_wheel(metadata_dir)
except InstallationSubprocessError as error:
raise MetadataGenerationFailed(package_details=details) from error
return os.path.join(metadata_dir, distinfo_dir)

View File

@@ -0,0 +1,41 @@
"""Metadata generation logic for source distributions.
"""
import os
from pip._vendor.pyproject_hooks import BuildBackendHookCaller
from pip._internal.build_env import BuildEnvironment
from pip._internal.exceptions import (
InstallationSubprocessError,
MetadataGenerationFailed,
)
from pip._internal.utils.subprocess import runner_with_spinner_message
from pip._internal.utils.temp_dir import TempDirectory
def generate_editable_metadata(
build_env: BuildEnvironment, backend: BuildBackendHookCaller, details: str
) -> str:
"""Generate metadata using mechanisms described in PEP 660.
Returns the generated metadata directory.
"""
metadata_tmpdir = TempDirectory(kind="modern-metadata", globally_managed=True)
metadata_dir = metadata_tmpdir.path
with build_env:
# Note that BuildBackendHookCaller implements a fallback for
# prepare_metadata_for_build_wheel/editable, so we don't have to
# consider the possibility that this hook doesn't exist.
runner = runner_with_spinner_message(
"Preparing editable metadata (pyproject.toml)"
)
with backend.subprocess_runner(runner):
try:
distinfo_dir = backend.prepare_metadata_for_build_editable(metadata_dir)
except InstallationSubprocessError as error:
raise MetadataGenerationFailed(package_details=details) from error
return os.path.join(metadata_dir, distinfo_dir)

View File

@@ -0,0 +1,74 @@
"""Metadata generation logic for legacy source distributions.
"""
import logging
import os
from pip._internal.build_env import BuildEnvironment
from pip._internal.cli.spinners import open_spinner
from pip._internal.exceptions import (
InstallationError,
InstallationSubprocessError,
MetadataGenerationFailed,
)
from pip._internal.utils.setuptools_build import make_setuptools_egg_info_args
from pip._internal.utils.subprocess import call_subprocess
from pip._internal.utils.temp_dir import TempDirectory
logger = logging.getLogger(__name__)
def _find_egg_info(directory: str) -> str:
"""Find an .egg-info subdirectory in `directory`."""
filenames = [f for f in os.listdir(directory) if f.endswith(".egg-info")]
if not filenames:
raise InstallationError(f"No .egg-info directory found in {directory}")
if len(filenames) > 1:
raise InstallationError(
"More than one .egg-info directory found in {}".format(directory)
)
return os.path.join(directory, filenames[0])
def generate_metadata(
build_env: BuildEnvironment,
setup_py_path: str,
source_dir: str,
isolated: bool,
details: str,
) -> str:
"""Generate metadata using setup.py-based defacto mechanisms.
Returns the generated metadata directory.
"""
logger.debug(
"Running setup.py (path:%s) egg_info for package %s",
setup_py_path,
details,
)
egg_info_dir = TempDirectory(kind="pip-egg-info", globally_managed=True).path
args = make_setuptools_egg_info_args(
setup_py_path,
egg_info_dir=egg_info_dir,
no_user_config=isolated,
)
with build_env:
with open_spinner("Preparing metadata (setup.py)") as spinner:
try:
call_subprocess(
args,
cwd=source_dir,
command_desc="python setup.py egg_info",
spinner=spinner,
)
except InstallationSubprocessError as error:
raise MetadataGenerationFailed(package_details=details) from error
# Return the .egg-info directory.
return _find_egg_info(egg_info_dir)

View File

@@ -0,0 +1,37 @@
import logging
import os
from typing import Optional
from pip._vendor.pyproject_hooks import BuildBackendHookCaller
from pip._internal.utils.subprocess import runner_with_spinner_message
logger = logging.getLogger(__name__)
def build_wheel_pep517(
name: str,
backend: BuildBackendHookCaller,
metadata_directory: str,
tempd: str,
) -> Optional[str]:
"""Build one InstallRequirement using the PEP 517 build process.
Returns path to wheel if successfully built. Otherwise, returns None.
"""
assert metadata_directory is not None
try:
logger.debug("Destination directory: %s", tempd)
runner = runner_with_spinner_message(
f"Building wheel for {name} (pyproject.toml)"
)
with backend.subprocess_runner(runner):
wheel_name = backend.build_wheel(
tempd,
metadata_directory=metadata_directory,
)
except Exception:
logger.error("Failed building wheel for %s", name)
return None
return os.path.join(tempd, wheel_name)

View File

@@ -0,0 +1,46 @@
import logging
import os
from typing import Optional
from pip._vendor.pyproject_hooks import BuildBackendHookCaller, HookMissing
from pip._internal.utils.subprocess import runner_with_spinner_message
logger = logging.getLogger(__name__)
def build_wheel_editable(
name: str,
backend: BuildBackendHookCaller,
metadata_directory: str,
tempd: str,
) -> Optional[str]:
"""Build one InstallRequirement using the PEP 660 build process.
Returns path to wheel if successfully built. Otherwise, returns None.
"""
assert metadata_directory is not None
try:
logger.debug("Destination directory: %s", tempd)
runner = runner_with_spinner_message(
f"Building editable for {name} (pyproject.toml)"
)
with backend.subprocess_runner(runner):
try:
wheel_name = backend.build_editable(
tempd,
metadata_directory=metadata_directory,
)
except HookMissing as e:
logger.error(
"Cannot build editable %s because the build "
"backend does not have the %s hook",
name,
e,
)
return None
except Exception:
logger.error("Failed building editable for %s", name)
return None
return os.path.join(tempd, wheel_name)

View File

@@ -0,0 +1,102 @@
import logging
import os.path
from typing import List, Optional
from pip._internal.cli.spinners import open_spinner
from pip._internal.utils.setuptools_build import make_setuptools_bdist_wheel_args
from pip._internal.utils.subprocess import call_subprocess, format_command_args
logger = logging.getLogger(__name__)
def format_command_result(
command_args: List[str],
command_output: str,
) -> str:
"""Format command information for logging."""
command_desc = format_command_args(command_args)
text = f"Command arguments: {command_desc}\n"
if not command_output:
text += "Command output: None"
elif logger.getEffectiveLevel() > logging.DEBUG:
text += "Command output: [use --verbose to show]"
else:
if not command_output.endswith("\n"):
command_output += "\n"
text += f"Command output:\n{command_output}"
return text
def get_legacy_build_wheel_path(
names: List[str],
temp_dir: str,
name: str,
command_args: List[str],
command_output: str,
) -> Optional[str]:
"""Return the path to the wheel in the temporary build directory."""
# Sort for determinism.
names = sorted(names)
if not names:
msg = ("Legacy build of wheel for {!r} created no files.\n").format(name)
msg += format_command_result(command_args, command_output)
logger.warning(msg)
return None
if len(names) > 1:
msg = (
"Legacy build of wheel for {!r} created more than one file.\n"
"Filenames (choosing first): {}\n"
).format(name, names)
msg += format_command_result(command_args, command_output)
logger.warning(msg)
return os.path.join(temp_dir, names[0])
def build_wheel_legacy(
name: str,
setup_py_path: str,
source_dir: str,
global_options: List[str],
build_options: List[str],
tempd: str,
) -> Optional[str]:
"""Build one unpacked package using the "legacy" build process.
Returns path to wheel if successfully built. Otherwise, returns None.
"""
wheel_args = make_setuptools_bdist_wheel_args(
setup_py_path,
global_options=global_options,
build_options=build_options,
destination_dir=tempd,
)
spin_message = f"Building wheel for {name} (setup.py)"
with open_spinner(spin_message) as spinner:
logger.debug("Destination directory: %s", tempd)
try:
output = call_subprocess(
wheel_args,
command_desc="python setup.py bdist_wheel",
cwd=source_dir,
spinner=spinner,
)
except Exception:
spinner.finish("error")
logger.error("Failed building wheel for %s", name)
return None
names = os.listdir(tempd)
wheel_path = get_legacy_build_wheel_path(
names=names,
temp_dir=tempd,
name=name,
command_args=wheel_args,
command_output=output,
)
return wheel_path

View File

@@ -0,0 +1,187 @@
"""Validation of dependencies of packages
"""
import logging
from typing import Callable, Dict, List, NamedTuple, Optional, Set, Tuple
from pip._vendor.packaging.requirements import Requirement
from pip._vendor.packaging.specifiers import LegacySpecifier
from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
from pip._vendor.packaging.version import LegacyVersion
from pip._internal.distributions import make_distribution_for_install_requirement
from pip._internal.metadata import get_default_environment
from pip._internal.metadata.base import DistributionVersion
from pip._internal.req.req_install import InstallRequirement
from pip._internal.utils.deprecation import deprecated
logger = logging.getLogger(__name__)
class PackageDetails(NamedTuple):
version: DistributionVersion
dependencies: List[Requirement]
# Shorthands
PackageSet = Dict[NormalizedName, PackageDetails]
Missing = Tuple[NormalizedName, Requirement]
Conflicting = Tuple[NormalizedName, DistributionVersion, Requirement]
MissingDict = Dict[NormalizedName, List[Missing]]
ConflictingDict = Dict[NormalizedName, List[Conflicting]]
CheckResult = Tuple[MissingDict, ConflictingDict]
ConflictDetails = Tuple[PackageSet, CheckResult]
def create_package_set_from_installed() -> Tuple[PackageSet, bool]:
"""Converts a list of distributions into a PackageSet."""
package_set = {}
problems = False
env = get_default_environment()
for dist in env.iter_installed_distributions(local_only=False, skip=()):
name = dist.canonical_name
try:
dependencies = list(dist.iter_dependencies())
package_set[name] = PackageDetails(dist.version, dependencies)
except (OSError, ValueError) as e:
# Don't crash on unreadable or broken metadata.
logger.warning("Error parsing requirements for %s: %s", name, e)
problems = True
return package_set, problems
def check_package_set(
package_set: PackageSet, should_ignore: Optional[Callable[[str], bool]] = None
) -> CheckResult:
"""Check if a package set is consistent
If should_ignore is passed, it should be a callable that takes a
package name and returns a boolean.
"""
warn_legacy_versions_and_specifiers(package_set)
missing = {}
conflicting = {}
for package_name, package_detail in package_set.items():
# Info about dependencies of package_name
missing_deps: Set[Missing] = set()
conflicting_deps: Set[Conflicting] = set()
if should_ignore and should_ignore(package_name):
continue
for req in package_detail.dependencies:
name = canonicalize_name(req.name)
# Check if it's missing
if name not in package_set:
missed = True
if req.marker is not None:
missed = req.marker.evaluate({"extra": ""})
if missed:
missing_deps.add((name, req))
continue
# Check if there's a conflict
version = package_set[name].version
if not req.specifier.contains(version, prereleases=True):
conflicting_deps.add((name, version, req))
if missing_deps:
missing[package_name] = sorted(missing_deps, key=str)
if conflicting_deps:
conflicting[package_name] = sorted(conflicting_deps, key=str)
return missing, conflicting
def check_install_conflicts(to_install: List[InstallRequirement]) -> ConflictDetails:
"""For checking if the dependency graph would be consistent after \
installing given requirements
"""
# Start from the current state
package_set, _ = create_package_set_from_installed()
# Install packages
would_be_installed = _simulate_installation_of(to_install, package_set)
# Only warn about directly-dependent packages; create a whitelist of them
whitelist = _create_whitelist(would_be_installed, package_set)
return (
package_set,
check_package_set(
package_set, should_ignore=lambda name: name not in whitelist
),
)
def _simulate_installation_of(
to_install: List[InstallRequirement], package_set: PackageSet
) -> Set[NormalizedName]:
"""Computes the version of packages after installing to_install."""
# Keep track of packages that were installed
installed = set()
# Modify it as installing requirement_set would (assuming no errors)
for inst_req in to_install:
abstract_dist = make_distribution_for_install_requirement(inst_req)
dist = abstract_dist.get_metadata_distribution()
name = dist.canonical_name
package_set[name] = PackageDetails(dist.version, list(dist.iter_dependencies()))
installed.add(name)
return installed
def _create_whitelist(
would_be_installed: Set[NormalizedName], package_set: PackageSet
) -> Set[NormalizedName]:
packages_affected = set(would_be_installed)
for package_name in package_set:
if package_name in packages_affected:
continue
for req in package_set[package_name].dependencies:
if canonicalize_name(req.name) in packages_affected:
packages_affected.add(package_name)
break
return packages_affected
def warn_legacy_versions_and_specifiers(package_set: PackageSet) -> None:
for project_name, package_details in package_set.items():
if isinstance(package_details.version, LegacyVersion):
deprecated(
reason=(
f"{project_name} {package_details.version} "
f"has a non-standard version number."
),
replacement=(
f"to upgrade to a newer version of {project_name} "
f"or contact the author to suggest that they "
f"release a version with a conforming version number"
),
issue=12063,
gone_in="23.3",
)
for dep in package_details.dependencies:
if any(isinstance(spec, LegacySpecifier) for spec in dep.specifier):
deprecated(
reason=(
f"{project_name} {package_details.version} "
f"has a non-standard dependency specifier {dep}."
),
replacement=(
f"to upgrade to a newer version of {project_name} "
f"or contact the author to suggest that they "
f"release a version with a conforming dependency specifiers"
),
issue=12063,
gone_in="23.3",
)

View File

@@ -0,0 +1,255 @@
import collections
import logging
import os
from typing import Container, Dict, Generator, Iterable, List, NamedTuple, Optional, Set
from pip._vendor.packaging.utils import canonicalize_name
from pip._vendor.packaging.version import Version
from pip._internal.exceptions import BadCommand, InstallationError
from pip._internal.metadata import BaseDistribution, get_environment
from pip._internal.req.constructors import (
install_req_from_editable,
install_req_from_line,
)
from pip._internal.req.req_file import COMMENT_RE
from pip._internal.utils.direct_url_helpers import direct_url_as_pep440_direct_reference
logger = logging.getLogger(__name__)
class _EditableInfo(NamedTuple):
requirement: str
comments: List[str]
def freeze(
requirement: Optional[List[str]] = None,
local_only: bool = False,
user_only: bool = False,
paths: Optional[List[str]] = None,
isolated: bool = False,
exclude_editable: bool = False,
skip: Container[str] = (),
) -> Generator[str, None, None]:
installations: Dict[str, FrozenRequirement] = {}
dists = get_environment(paths).iter_installed_distributions(
local_only=local_only,
skip=(),
user_only=user_only,
)
for dist in dists:
req = FrozenRequirement.from_dist(dist)
if exclude_editable and req.editable:
continue
installations[req.canonical_name] = req
if requirement:
# the options that don't get turned into an InstallRequirement
# should only be emitted once, even if the same option is in multiple
# requirements files, so we need to keep track of what has been emitted
# so that we don't emit it again if it's seen again
emitted_options: Set[str] = set()
# keep track of which files a requirement is in so that we can
# give an accurate warning if a requirement appears multiple times.
req_files: Dict[str, List[str]] = collections.defaultdict(list)
for req_file_path in requirement:
with open(req_file_path) as req_file:
for line in req_file:
if (
not line.strip()
or line.strip().startswith("#")
or line.startswith(
(
"-r",
"--requirement",
"-f",
"--find-links",
"-i",
"--index-url",
"--pre",
"--trusted-host",
"--process-dependency-links",
"--extra-index-url",
"--use-feature",
)
)
):
line = line.rstrip()
if line not in emitted_options:
emitted_options.add(line)
yield line
continue
if line.startswith("-e") or line.startswith("--editable"):
if line.startswith("-e"):
line = line[2:].strip()
else:
line = line[len("--editable") :].strip().lstrip("=")
line_req = install_req_from_editable(
line,
isolated=isolated,
)
else:
line_req = install_req_from_line(
COMMENT_RE.sub("", line).strip(),
isolated=isolated,
)
if not line_req.name:
logger.info(
"Skipping line in requirement file [%s] because "
"it's not clear what it would install: %s",
req_file_path,
line.strip(),
)
logger.info(
" (add #egg=PackageName to the URL to avoid"
" this warning)"
)
else:
line_req_canonical_name = canonicalize_name(line_req.name)
if line_req_canonical_name not in installations:
# either it's not installed, or it is installed
# but has been processed already
if not req_files[line_req.name]:
logger.warning(
"Requirement file [%s] contains %s, but "
"package %r is not installed",
req_file_path,
COMMENT_RE.sub("", line).strip(),
line_req.name,
)
else:
req_files[line_req.name].append(req_file_path)
else:
yield str(installations[line_req_canonical_name]).rstrip()
del installations[line_req_canonical_name]
req_files[line_req.name].append(req_file_path)
# Warn about requirements that were included multiple times (in a
# single requirements file or in different requirements files).
for name, files in req_files.items():
if len(files) > 1:
logger.warning(
"Requirement %s included multiple times [%s]",
name,
", ".join(sorted(set(files))),
)
yield ("## The following requirements were added by pip freeze:")
for installation in sorted(installations.values(), key=lambda x: x.name.lower()):
if installation.canonical_name not in skip:
yield str(installation).rstrip()
def _format_as_name_version(dist: BaseDistribution) -> str:
dist_version = dist.version
if isinstance(dist_version, Version):
return f"{dist.raw_name}=={dist_version}"
return f"{dist.raw_name}==={dist_version}"
def _get_editable_info(dist: BaseDistribution) -> _EditableInfo:
"""
Compute and return values (req, comments) for use in
FrozenRequirement.from_dist().
"""
editable_project_location = dist.editable_project_location
assert editable_project_location
location = os.path.normcase(os.path.abspath(editable_project_location))
from pip._internal.vcs import RemoteNotFoundError, RemoteNotValidError, vcs
vcs_backend = vcs.get_backend_for_dir(location)
if vcs_backend is None:
display = _format_as_name_version(dist)
logger.debug(
'No VCS found for editable requirement "%s" in: %r',
display,
location,
)
return _EditableInfo(
requirement=location,
comments=[f"# Editable install with no version control ({display})"],
)
vcs_name = type(vcs_backend).__name__
try:
req = vcs_backend.get_src_requirement(location, dist.raw_name)
except RemoteNotFoundError:
display = _format_as_name_version(dist)
return _EditableInfo(
requirement=location,
comments=[f"# Editable {vcs_name} install with no remote ({display})"],
)
except RemoteNotValidError as ex:
display = _format_as_name_version(dist)
return _EditableInfo(
requirement=location,
comments=[
f"# Editable {vcs_name} install ({display}) with either a deleted "
f"local remote or invalid URI:",
f"# '{ex.url}'",
],
)
except BadCommand:
logger.warning(
"cannot determine version of editable source in %s "
"(%s command not found in path)",
location,
vcs_backend.name,
)
return _EditableInfo(requirement=location, comments=[])
except InstallationError as exc:
logger.warning("Error when trying to get requirement for VCS system %s", exc)
else:
return _EditableInfo(requirement=req, comments=[])
logger.warning("Could not determine repository location of %s", location)
return _EditableInfo(
requirement=location,
comments=["## !! Could not determine repository location"],
)
class FrozenRequirement:
def __init__(
self,
name: str,
req: str,
editable: bool,
comments: Iterable[str] = (),
) -> None:
self.name = name
self.canonical_name = canonicalize_name(name)
self.req = req
self.editable = editable
self.comments = comments
@classmethod
def from_dist(cls, dist: BaseDistribution) -> "FrozenRequirement":
editable = dist.editable
if editable:
req, comments = _get_editable_info(dist)
else:
comments = []
direct_url = dist.direct_url
if direct_url:
# if PEP 610 metadata is present, use it
req = direct_url_as_pep440_direct_reference(direct_url, dist.raw_name)
else:
# name==version requirement
req = _format_as_name_version(dist)
return cls(dist.raw_name, req, editable, comments=comments)
def __str__(self) -> str:
req = self.req
if self.editable:
req = f"-e {req}"
return "\n".join(list(self.comments) + [str(req)]) + "\n"

View File

@@ -0,0 +1,2 @@
"""For modules related to installing packages.
"""

View File

@@ -0,0 +1,46 @@
"""Legacy editable installation process, i.e. `setup.py develop`.
"""
import logging
from typing import Optional, Sequence
from pip._internal.build_env import BuildEnvironment
from pip._internal.utils.logging import indent_log
from pip._internal.utils.setuptools_build import make_setuptools_develop_args
from pip._internal.utils.subprocess import call_subprocess
logger = logging.getLogger(__name__)
def install_editable(
*,
global_options: Sequence[str],
prefix: Optional[str],
home: Optional[str],
use_user_site: bool,
name: str,
setup_py_path: str,
isolated: bool,
build_env: BuildEnvironment,
unpacked_source_directory: str,
) -> None:
"""Install a package in editable mode. Most arguments are pass-through
to setuptools.
"""
logger.info("Running setup.py develop for %s", name)
args = make_setuptools_develop_args(
setup_py_path,
global_options=global_options,
no_user_config=isolated,
prefix=prefix,
home=home,
use_user_site=use_user_site,
)
with indent_log():
with build_env:
call_subprocess(
args,
command_desc="python setup.py develop",
cwd=unpacked_source_directory,
)

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff