first commit
This commit is contained in:
@@ -0,0 +1,25 @@
|
||||
"""distutils.command
|
||||
|
||||
Package containing implementation of all the standard Distutils
|
||||
commands."""
|
||||
|
||||
__all__ = [ # noqa: F822
|
||||
'build',
|
||||
'build_py',
|
||||
'build_ext',
|
||||
'build_clib',
|
||||
'build_scripts',
|
||||
'clean',
|
||||
'install',
|
||||
'install_lib',
|
||||
'install_headers',
|
||||
'install_scripts',
|
||||
'install_data',
|
||||
'sdist',
|
||||
'register',
|
||||
'bdist',
|
||||
'bdist_dumb',
|
||||
'bdist_rpm',
|
||||
'check',
|
||||
'upload',
|
||||
]
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,55 @@
|
||||
"""
|
||||
Backward compatibility for homebrew builds on macOS.
|
||||
"""
|
||||
|
||||
|
||||
import sys
|
||||
import os
|
||||
import functools
|
||||
import subprocess
|
||||
import sysconfig
|
||||
|
||||
|
||||
@functools.lru_cache()
|
||||
def enabled():
|
||||
"""
|
||||
Only enabled for Python 3.9 framework homebrew builds
|
||||
except ensurepip and venv.
|
||||
"""
|
||||
PY39 = (3, 9) < sys.version_info < (3, 10)
|
||||
framework = sys.platform == 'darwin' and sys._framework
|
||||
homebrew = "Cellar" in sysconfig.get_config_var('projectbase')
|
||||
venv = sys.prefix != sys.base_prefix
|
||||
ensurepip = os.environ.get("ENSUREPIP_OPTIONS")
|
||||
return PY39 and framework and homebrew and not venv and not ensurepip
|
||||
|
||||
|
||||
schemes = dict(
|
||||
osx_framework_library=dict(
|
||||
stdlib='{installed_base}/{platlibdir}/python{py_version_short}',
|
||||
platstdlib='{platbase}/{platlibdir}/python{py_version_short}',
|
||||
purelib='{homebrew_prefix}/lib/python{py_version_short}/site-packages',
|
||||
platlib='{homebrew_prefix}/{platlibdir}/python{py_version_short}/site-packages',
|
||||
include='{installed_base}/include/python{py_version_short}{abiflags}',
|
||||
platinclude='{installed_platbase}/include/python{py_version_short}{abiflags}',
|
||||
scripts='{homebrew_prefix}/bin',
|
||||
data='{homebrew_prefix}',
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@functools.lru_cache()
|
||||
def vars():
|
||||
if not enabled():
|
||||
return {}
|
||||
homebrew_prefix = subprocess.check_output(['brew', '--prefix'], text=True).strip()
|
||||
return locals()
|
||||
|
||||
|
||||
def scheme(name):
|
||||
"""
|
||||
Override the selected scheme for posix_prefix.
|
||||
"""
|
||||
if not enabled() or not name.endswith('_prefix'):
|
||||
return name
|
||||
return 'osx_framework_library'
|
157
venv/Lib/site-packages/setuptools/_distutils/command/bdist.py
Normal file
157
venv/Lib/site-packages/setuptools/_distutils/command/bdist.py
Normal file
@@ -0,0 +1,157 @@
|
||||
"""distutils.command.bdist
|
||||
|
||||
Implements the Distutils 'bdist' command (create a built [binary]
|
||||
distribution)."""
|
||||
|
||||
import os
|
||||
import warnings
|
||||
|
||||
from distutils.core import Command
|
||||
from distutils.errors import DistutilsPlatformError, DistutilsOptionError
|
||||
from distutils.util import get_platform
|
||||
|
||||
|
||||
def show_formats():
|
||||
"""Print list of available formats (arguments to "--format" option)."""
|
||||
from distutils.fancy_getopt import FancyGetopt
|
||||
|
||||
formats = []
|
||||
for format in bdist.format_commands:
|
||||
formats.append(("formats=" + format, None, bdist.format_commands[format][1]))
|
||||
pretty_printer = FancyGetopt(formats)
|
||||
pretty_printer.print_help("List of available distribution formats:")
|
||||
|
||||
|
||||
class ListCompat(dict):
|
||||
# adapter to allow for Setuptools compatibility in format_commands
|
||||
def append(self, item):
|
||||
warnings.warn(
|
||||
"""format_commands is now a dict. append is deprecated.""",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
|
||||
|
||||
class bdist(Command):
|
||||
|
||||
description = "create a built (binary) distribution"
|
||||
|
||||
user_options = [
|
||||
('bdist-base=', 'b', "temporary directory for creating built distributions"),
|
||||
(
|
||||
'plat-name=',
|
||||
'p',
|
||||
"platform name to embed in generated filenames "
|
||||
"(default: %s)" % get_platform(),
|
||||
),
|
||||
('formats=', None, "formats for distribution (comma-separated list)"),
|
||||
(
|
||||
'dist-dir=',
|
||||
'd',
|
||||
"directory to put final built distributions in " "[default: dist]",
|
||||
),
|
||||
('skip-build', None, "skip rebuilding everything (for testing/debugging)"),
|
||||
(
|
||||
'owner=',
|
||||
'u',
|
||||
"Owner name used when creating a tar file" " [default: current user]",
|
||||
),
|
||||
(
|
||||
'group=',
|
||||
'g',
|
||||
"Group name used when creating a tar file" " [default: current group]",
|
||||
),
|
||||
]
|
||||
|
||||
boolean_options = ['skip-build']
|
||||
|
||||
help_options = [
|
||||
('help-formats', None, "lists available distribution formats", show_formats),
|
||||
]
|
||||
|
||||
# The following commands do not take a format option from bdist
|
||||
no_format_option = ('bdist_rpm',)
|
||||
|
||||
# This won't do in reality: will need to distinguish RPM-ish Linux,
|
||||
# Debian-ish Linux, Solaris, FreeBSD, ..., Windows, Mac OS.
|
||||
default_format = {'posix': 'gztar', 'nt': 'zip'}
|
||||
|
||||
# Define commands in preferred order for the --help-formats option
|
||||
format_commands = ListCompat(
|
||||
{
|
||||
'rpm': ('bdist_rpm', "RPM distribution"),
|
||||
'gztar': ('bdist_dumb', "gzip'ed tar file"),
|
||||
'bztar': ('bdist_dumb', "bzip2'ed tar file"),
|
||||
'xztar': ('bdist_dumb', "xz'ed tar file"),
|
||||
'ztar': ('bdist_dumb', "compressed tar file"),
|
||||
'tar': ('bdist_dumb', "tar file"),
|
||||
'zip': ('bdist_dumb', "ZIP file"),
|
||||
}
|
||||
)
|
||||
|
||||
# for compatibility until consumers only reference format_commands
|
||||
format_command = format_commands
|
||||
|
||||
def initialize_options(self):
|
||||
self.bdist_base = None
|
||||
self.plat_name = None
|
||||
self.formats = None
|
||||
self.dist_dir = None
|
||||
self.skip_build = 0
|
||||
self.group = None
|
||||
self.owner = None
|
||||
|
||||
def finalize_options(self):
|
||||
# have to finalize 'plat_name' before 'bdist_base'
|
||||
if self.plat_name is None:
|
||||
if self.skip_build:
|
||||
self.plat_name = get_platform()
|
||||
else:
|
||||
self.plat_name = self.get_finalized_command('build').plat_name
|
||||
|
||||
# 'bdist_base' -- parent of per-built-distribution-format
|
||||
# temporary directories (eg. we'll probably have
|
||||
# "build/bdist.<plat>/dumb", "build/bdist.<plat>/rpm", etc.)
|
||||
if self.bdist_base is None:
|
||||
build_base = self.get_finalized_command('build').build_base
|
||||
self.bdist_base = os.path.join(build_base, 'bdist.' + self.plat_name)
|
||||
|
||||
self.ensure_string_list('formats')
|
||||
if self.formats is None:
|
||||
try:
|
||||
self.formats = [self.default_format[os.name]]
|
||||
except KeyError:
|
||||
raise DistutilsPlatformError(
|
||||
"don't know how to create built distributions "
|
||||
"on platform %s" % os.name
|
||||
)
|
||||
|
||||
if self.dist_dir is None:
|
||||
self.dist_dir = "dist"
|
||||
|
||||
def run(self):
|
||||
# Figure out which sub-commands we need to run.
|
||||
commands = []
|
||||
for format in self.formats:
|
||||
try:
|
||||
commands.append(self.format_commands[format][0])
|
||||
except KeyError:
|
||||
raise DistutilsOptionError("invalid format '%s'" % format)
|
||||
|
||||
# Reinitialize and run each command.
|
||||
for i in range(len(self.formats)):
|
||||
cmd_name = commands[i]
|
||||
sub_cmd = self.reinitialize_command(cmd_name)
|
||||
if cmd_name not in self.no_format_option:
|
||||
sub_cmd.format = self.formats[i]
|
||||
|
||||
# passing the owner and group names for tar archiving
|
||||
if cmd_name == 'bdist_dumb':
|
||||
sub_cmd.owner = self.owner
|
||||
sub_cmd.group = self.group
|
||||
|
||||
# If we're going to need to run this command again, tell it to
|
||||
# keep its temporary files around so subsequent runs go faster.
|
||||
if cmd_name in commands[i + 1 :]:
|
||||
sub_cmd.keep_temp = 1
|
||||
self.run_command(cmd_name)
|
@@ -0,0 +1,144 @@
|
||||
"""distutils.command.bdist_dumb
|
||||
|
||||
Implements the Distutils 'bdist_dumb' command (create a "dumb" built
|
||||
distribution -- i.e., just an archive to be unpacked under $prefix or
|
||||
$exec_prefix)."""
|
||||
|
||||
import os
|
||||
from distutils.core import Command
|
||||
from distutils.util import get_platform
|
||||
from distutils.dir_util import remove_tree, ensure_relative
|
||||
from distutils.errors import DistutilsPlatformError
|
||||
from distutils.sysconfig import get_python_version
|
||||
from distutils import log
|
||||
|
||||
|
||||
class bdist_dumb(Command):
|
||||
|
||||
description = "create a \"dumb\" built distribution"
|
||||
|
||||
user_options = [
|
||||
('bdist-dir=', 'd', "temporary directory for creating the distribution"),
|
||||
(
|
||||
'plat-name=',
|
||||
'p',
|
||||
"platform name to embed in generated filenames "
|
||||
"(default: %s)" % get_platform(),
|
||||
),
|
||||
(
|
||||
'format=',
|
||||
'f',
|
||||
"archive format to create (tar, gztar, bztar, xztar, " "ztar, zip)",
|
||||
),
|
||||
(
|
||||
'keep-temp',
|
||||
'k',
|
||||
"keep the pseudo-installation tree around after "
|
||||
+ "creating the distribution archive",
|
||||
),
|
||||
('dist-dir=', 'd', "directory to put final built distributions in"),
|
||||
('skip-build', None, "skip rebuilding everything (for testing/debugging)"),
|
||||
(
|
||||
'relative',
|
||||
None,
|
||||
"build the archive using relative paths " "(default: false)",
|
||||
),
|
||||
(
|
||||
'owner=',
|
||||
'u',
|
||||
"Owner name used when creating a tar file" " [default: current user]",
|
||||
),
|
||||
(
|
||||
'group=',
|
||||
'g',
|
||||
"Group name used when creating a tar file" " [default: current group]",
|
||||
),
|
||||
]
|
||||
|
||||
boolean_options = ['keep-temp', 'skip-build', 'relative']
|
||||
|
||||
default_format = {'posix': 'gztar', 'nt': 'zip'}
|
||||
|
||||
def initialize_options(self):
|
||||
self.bdist_dir = None
|
||||
self.plat_name = None
|
||||
self.format = None
|
||||
self.keep_temp = 0
|
||||
self.dist_dir = None
|
||||
self.skip_build = None
|
||||
self.relative = 0
|
||||
self.owner = None
|
||||
self.group = None
|
||||
|
||||
def finalize_options(self):
|
||||
if self.bdist_dir is None:
|
||||
bdist_base = self.get_finalized_command('bdist').bdist_base
|
||||
self.bdist_dir = os.path.join(bdist_base, 'dumb')
|
||||
|
||||
if self.format is None:
|
||||
try:
|
||||
self.format = self.default_format[os.name]
|
||||
except KeyError:
|
||||
raise DistutilsPlatformError(
|
||||
"don't know how to create dumb built distributions "
|
||||
"on platform %s" % os.name
|
||||
)
|
||||
|
||||
self.set_undefined_options(
|
||||
'bdist',
|
||||
('dist_dir', 'dist_dir'),
|
||||
('plat_name', 'plat_name'),
|
||||
('skip_build', 'skip_build'),
|
||||
)
|
||||
|
||||
def run(self):
|
||||
if not self.skip_build:
|
||||
self.run_command('build')
|
||||
|
||||
install = self.reinitialize_command('install', reinit_subcommands=1)
|
||||
install.root = self.bdist_dir
|
||||
install.skip_build = self.skip_build
|
||||
install.warn_dir = 0
|
||||
|
||||
log.info("installing to %s", self.bdist_dir)
|
||||
self.run_command('install')
|
||||
|
||||
# And make an archive relative to the root of the
|
||||
# pseudo-installation tree.
|
||||
archive_basename = "{}.{}".format(
|
||||
self.distribution.get_fullname(), self.plat_name
|
||||
)
|
||||
|
||||
pseudoinstall_root = os.path.join(self.dist_dir, archive_basename)
|
||||
if not self.relative:
|
||||
archive_root = self.bdist_dir
|
||||
else:
|
||||
if self.distribution.has_ext_modules() and (
|
||||
install.install_base != install.install_platbase
|
||||
):
|
||||
raise DistutilsPlatformError(
|
||||
"can't make a dumb built distribution where "
|
||||
"base and platbase are different (%s, %s)"
|
||||
% (repr(install.install_base), repr(install.install_platbase))
|
||||
)
|
||||
else:
|
||||
archive_root = os.path.join(
|
||||
self.bdist_dir, ensure_relative(install.install_base)
|
||||
)
|
||||
|
||||
# Make the archive
|
||||
filename = self.make_archive(
|
||||
pseudoinstall_root,
|
||||
self.format,
|
||||
root_dir=archive_root,
|
||||
owner=self.owner,
|
||||
group=self.group,
|
||||
)
|
||||
if self.distribution.has_ext_modules():
|
||||
pyversion = get_python_version()
|
||||
else:
|
||||
pyversion = 'any'
|
||||
self.distribution.dist_files.append(('bdist_dumb', pyversion, filename))
|
||||
|
||||
if not self.keep_temp:
|
||||
remove_tree(self.bdist_dir, dry_run=self.dry_run)
|
File diff suppressed because it is too large
Load Diff
153
venv/Lib/site-packages/setuptools/_distutils/command/build.py
Normal file
153
venv/Lib/site-packages/setuptools/_distutils/command/build.py
Normal file
@@ -0,0 +1,153 @@
|
||||
"""distutils.command.build
|
||||
|
||||
Implements the Distutils 'build' command."""
|
||||
|
||||
import sys
|
||||
import os
|
||||
from distutils.core import Command
|
||||
from distutils.errors import DistutilsOptionError
|
||||
from distutils.util import get_platform
|
||||
|
||||
|
||||
def show_compilers():
|
||||
from distutils.ccompiler import show_compilers
|
||||
|
||||
show_compilers()
|
||||
|
||||
|
||||
class build(Command):
|
||||
|
||||
description = "build everything needed to install"
|
||||
|
||||
user_options = [
|
||||
('build-base=', 'b', "base directory for build library"),
|
||||
('build-purelib=', None, "build directory for platform-neutral distributions"),
|
||||
('build-platlib=', None, "build directory for platform-specific distributions"),
|
||||
(
|
||||
'build-lib=',
|
||||
None,
|
||||
"build directory for all distribution (defaults to either "
|
||||
+ "build-purelib or build-platlib",
|
||||
),
|
||||
('build-scripts=', None, "build directory for scripts"),
|
||||
('build-temp=', 't', "temporary build directory"),
|
||||
(
|
||||
'plat-name=',
|
||||
'p',
|
||||
"platform name to build for, if supported "
|
||||
"(default: %s)" % get_platform(),
|
||||
),
|
||||
('compiler=', 'c', "specify the compiler type"),
|
||||
('parallel=', 'j', "number of parallel build jobs"),
|
||||
('debug', 'g', "compile extensions and libraries with debugging information"),
|
||||
('force', 'f', "forcibly build everything (ignore file timestamps)"),
|
||||
('executable=', 'e', "specify final destination interpreter path (build.py)"),
|
||||
]
|
||||
|
||||
boolean_options = ['debug', 'force']
|
||||
|
||||
help_options = [
|
||||
('help-compiler', None, "list available compilers", show_compilers),
|
||||
]
|
||||
|
||||
def initialize_options(self):
|
||||
self.build_base = 'build'
|
||||
# these are decided only after 'build_base' has its final value
|
||||
# (unless overridden by the user or client)
|
||||
self.build_purelib = None
|
||||
self.build_platlib = None
|
||||
self.build_lib = None
|
||||
self.build_temp = None
|
||||
self.build_scripts = None
|
||||
self.compiler = None
|
||||
self.plat_name = None
|
||||
self.debug = None
|
||||
self.force = 0
|
||||
self.executable = None
|
||||
self.parallel = None
|
||||
|
||||
def finalize_options(self): # noqa: C901
|
||||
if self.plat_name is None:
|
||||
self.plat_name = get_platform()
|
||||
else:
|
||||
# plat-name only supported for windows (other platforms are
|
||||
# supported via ./configure flags, if at all). Avoid misleading
|
||||
# other platforms.
|
||||
if os.name != 'nt':
|
||||
raise DistutilsOptionError(
|
||||
"--plat-name only supported on Windows (try "
|
||||
"using './configure --help' on your platform)"
|
||||
)
|
||||
|
||||
plat_specifier = ".{}-{}".format(self.plat_name, sys.implementation.cache_tag)
|
||||
|
||||
# Make it so Python 2.x and Python 2.x with --with-pydebug don't
|
||||
# share the same build directories. Doing so confuses the build
|
||||
# process for C modules
|
||||
if hasattr(sys, 'gettotalrefcount'):
|
||||
plat_specifier += '-pydebug'
|
||||
|
||||
# 'build_purelib' and 'build_platlib' just default to 'lib' and
|
||||
# 'lib.<plat>' under the base build directory. We only use one of
|
||||
# them for a given distribution, though --
|
||||
if self.build_purelib is None:
|
||||
self.build_purelib = os.path.join(self.build_base, 'lib')
|
||||
if self.build_platlib is None:
|
||||
self.build_platlib = os.path.join(self.build_base, 'lib' + plat_specifier)
|
||||
|
||||
# 'build_lib' is the actual directory that we will use for this
|
||||
# particular module distribution -- if user didn't supply it, pick
|
||||
# one of 'build_purelib' or 'build_platlib'.
|
||||
if self.build_lib is None:
|
||||
if self.distribution.has_ext_modules():
|
||||
self.build_lib = self.build_platlib
|
||||
else:
|
||||
self.build_lib = self.build_purelib
|
||||
|
||||
# 'build_temp' -- temporary directory for compiler turds,
|
||||
# "build/temp.<plat>"
|
||||
if self.build_temp is None:
|
||||
self.build_temp = os.path.join(self.build_base, 'temp' + plat_specifier)
|
||||
if self.build_scripts is None:
|
||||
self.build_scripts = os.path.join(
|
||||
self.build_base, 'scripts-%d.%d' % sys.version_info[:2]
|
||||
)
|
||||
|
||||
if self.executable is None and sys.executable:
|
||||
self.executable = os.path.normpath(sys.executable)
|
||||
|
||||
if isinstance(self.parallel, str):
|
||||
try:
|
||||
self.parallel = int(self.parallel)
|
||||
except ValueError:
|
||||
raise DistutilsOptionError("parallel should be an integer")
|
||||
|
||||
def run(self):
|
||||
# Run all relevant sub-commands. This will be some subset of:
|
||||
# - build_py - pure Python modules
|
||||
# - build_clib - standalone C libraries
|
||||
# - build_ext - Python extensions
|
||||
# - build_scripts - (Python) scripts
|
||||
for cmd_name in self.get_sub_commands():
|
||||
self.run_command(cmd_name)
|
||||
|
||||
# -- Predicates for the sub-command list ---------------------------
|
||||
|
||||
def has_pure_modules(self):
|
||||
return self.distribution.has_pure_modules()
|
||||
|
||||
def has_c_libraries(self):
|
||||
return self.distribution.has_c_libraries()
|
||||
|
||||
def has_ext_modules(self):
|
||||
return self.distribution.has_ext_modules()
|
||||
|
||||
def has_scripts(self):
|
||||
return self.distribution.has_scripts()
|
||||
|
||||
sub_commands = [
|
||||
('build_py', has_pure_modules),
|
||||
('build_clib', has_c_libraries),
|
||||
('build_ext', has_ext_modules),
|
||||
('build_scripts', has_scripts),
|
||||
]
|
@@ -0,0 +1,208 @@
|
||||
"""distutils.command.build_clib
|
||||
|
||||
Implements the Distutils 'build_clib' command, to build a C/C++ library
|
||||
that is included in the module distribution and needed by an extension
|
||||
module."""
|
||||
|
||||
|
||||
# XXX this module has *lots* of code ripped-off quite transparently from
|
||||
# build_ext.py -- not surprisingly really, as the work required to build
|
||||
# a static library from a collection of C source files is not really all
|
||||
# that different from what's required to build a shared object file from
|
||||
# a collection of C source files. Nevertheless, I haven't done the
|
||||
# necessary refactoring to account for the overlap in code between the
|
||||
# two modules, mainly because a number of subtle details changed in the
|
||||
# cut 'n paste. Sigh.
|
||||
|
||||
import os
|
||||
from distutils.core import Command
|
||||
from distutils.errors import DistutilsSetupError
|
||||
from distutils.sysconfig import customize_compiler
|
||||
from distutils import log
|
||||
|
||||
|
||||
def show_compilers():
|
||||
from distutils.ccompiler import show_compilers
|
||||
|
||||
show_compilers()
|
||||
|
||||
|
||||
class build_clib(Command):
|
||||
|
||||
description = "build C/C++ libraries used by Python extensions"
|
||||
|
||||
user_options = [
|
||||
('build-clib=', 'b', "directory to build C/C++ libraries to"),
|
||||
('build-temp=', 't', "directory to put temporary build by-products"),
|
||||
('debug', 'g', "compile with debugging information"),
|
||||
('force', 'f', "forcibly build everything (ignore file timestamps)"),
|
||||
('compiler=', 'c', "specify the compiler type"),
|
||||
]
|
||||
|
||||
boolean_options = ['debug', 'force']
|
||||
|
||||
help_options = [
|
||||
('help-compiler', None, "list available compilers", show_compilers),
|
||||
]
|
||||
|
||||
def initialize_options(self):
|
||||
self.build_clib = None
|
||||
self.build_temp = None
|
||||
|
||||
# List of libraries to build
|
||||
self.libraries = None
|
||||
|
||||
# Compilation options for all libraries
|
||||
self.include_dirs = None
|
||||
self.define = None
|
||||
self.undef = None
|
||||
self.debug = None
|
||||
self.force = 0
|
||||
self.compiler = None
|
||||
|
||||
def finalize_options(self):
|
||||
# This might be confusing: both build-clib and build-temp default
|
||||
# to build-temp as defined by the "build" command. This is because
|
||||
# I think that C libraries are really just temporary build
|
||||
# by-products, at least from the point of view of building Python
|
||||
# extensions -- but I want to keep my options open.
|
||||
self.set_undefined_options(
|
||||
'build',
|
||||
('build_temp', 'build_clib'),
|
||||
('build_temp', 'build_temp'),
|
||||
('compiler', 'compiler'),
|
||||
('debug', 'debug'),
|
||||
('force', 'force'),
|
||||
)
|
||||
|
||||
self.libraries = self.distribution.libraries
|
||||
if self.libraries:
|
||||
self.check_library_list(self.libraries)
|
||||
|
||||
if self.include_dirs is None:
|
||||
self.include_dirs = self.distribution.include_dirs or []
|
||||
if isinstance(self.include_dirs, str):
|
||||
self.include_dirs = self.include_dirs.split(os.pathsep)
|
||||
|
||||
# XXX same as for build_ext -- what about 'self.define' and
|
||||
# 'self.undef' ?
|
||||
|
||||
def run(self):
|
||||
if not self.libraries:
|
||||
return
|
||||
|
||||
# Yech -- this is cut 'n pasted from build_ext.py!
|
||||
from distutils.ccompiler import new_compiler
|
||||
|
||||
self.compiler = new_compiler(
|
||||
compiler=self.compiler, dry_run=self.dry_run, force=self.force
|
||||
)
|
||||
customize_compiler(self.compiler)
|
||||
|
||||
if self.include_dirs is not None:
|
||||
self.compiler.set_include_dirs(self.include_dirs)
|
||||
if self.define is not None:
|
||||
# 'define' option is a list of (name,value) tuples
|
||||
for (name, value) in self.define:
|
||||
self.compiler.define_macro(name, value)
|
||||
if self.undef is not None:
|
||||
for macro in self.undef:
|
||||
self.compiler.undefine_macro(macro)
|
||||
|
||||
self.build_libraries(self.libraries)
|
||||
|
||||
def check_library_list(self, libraries):
|
||||
"""Ensure that the list of libraries is valid.
|
||||
|
||||
`library` is presumably provided as a command option 'libraries'.
|
||||
This method checks that it is a list of 2-tuples, where the tuples
|
||||
are (library_name, build_info_dict).
|
||||
|
||||
Raise DistutilsSetupError if the structure is invalid anywhere;
|
||||
just returns otherwise.
|
||||
"""
|
||||
if not isinstance(libraries, list):
|
||||
raise DistutilsSetupError("'libraries' option must be a list of tuples")
|
||||
|
||||
for lib in libraries:
|
||||
if not isinstance(lib, tuple) and len(lib) != 2:
|
||||
raise DistutilsSetupError("each element of 'libraries' must a 2-tuple")
|
||||
|
||||
name, build_info = lib
|
||||
|
||||
if not isinstance(name, str):
|
||||
raise DistutilsSetupError(
|
||||
"first element of each tuple in 'libraries' "
|
||||
"must be a string (the library name)"
|
||||
)
|
||||
|
||||
if '/' in name or (os.sep != '/' and os.sep in name):
|
||||
raise DistutilsSetupError(
|
||||
"bad library name '%s': "
|
||||
"may not contain directory separators" % lib[0]
|
||||
)
|
||||
|
||||
if not isinstance(build_info, dict):
|
||||
raise DistutilsSetupError(
|
||||
"second element of each tuple in 'libraries' "
|
||||
"must be a dictionary (build info)"
|
||||
)
|
||||
|
||||
def get_library_names(self):
|
||||
# Assume the library list is valid -- 'check_library_list()' is
|
||||
# called from 'finalize_options()', so it should be!
|
||||
if not self.libraries:
|
||||
return None
|
||||
|
||||
lib_names = []
|
||||
for (lib_name, build_info) in self.libraries:
|
||||
lib_names.append(lib_name)
|
||||
return lib_names
|
||||
|
||||
def get_source_files(self):
|
||||
self.check_library_list(self.libraries)
|
||||
filenames = []
|
||||
for (lib_name, build_info) in self.libraries:
|
||||
sources = build_info.get('sources')
|
||||
if sources is None or not isinstance(sources, (list, tuple)):
|
||||
raise DistutilsSetupError(
|
||||
"in 'libraries' option (library '%s'), "
|
||||
"'sources' must be present and must be "
|
||||
"a list of source filenames" % lib_name
|
||||
)
|
||||
|
||||
filenames.extend(sources)
|
||||
return filenames
|
||||
|
||||
def build_libraries(self, libraries):
|
||||
for (lib_name, build_info) in libraries:
|
||||
sources = build_info.get('sources')
|
||||
if sources is None or not isinstance(sources, (list, tuple)):
|
||||
raise DistutilsSetupError(
|
||||
"in 'libraries' option (library '%s'), "
|
||||
"'sources' must be present and must be "
|
||||
"a list of source filenames" % lib_name
|
||||
)
|
||||
sources = list(sources)
|
||||
|
||||
log.info("building '%s' library", lib_name)
|
||||
|
||||
# First, compile the source code to object files in the library
|
||||
# directory. (This should probably change to putting object
|
||||
# files in a temporary build directory.)
|
||||
macros = build_info.get('macros')
|
||||
include_dirs = build_info.get('include_dirs')
|
||||
objects = self.compiler.compile(
|
||||
sources,
|
||||
output_dir=self.build_temp,
|
||||
macros=macros,
|
||||
include_dirs=include_dirs,
|
||||
debug=self.debug,
|
||||
)
|
||||
|
||||
# Now "link" the object files together into a static library.
|
||||
# (On Unix at least, this isn't really linking -- it just
|
||||
# builds an archive. Whatever.)
|
||||
self.compiler.create_static_lib(
|
||||
objects, lib_name, output_dir=self.build_clib, debug=self.debug
|
||||
)
|
File diff suppressed because it is too large
Load Diff
407
venv/Lib/site-packages/setuptools/_distutils/command/build_py.py
Normal file
407
venv/Lib/site-packages/setuptools/_distutils/command/build_py.py
Normal file
@@ -0,0 +1,407 @@
|
||||
"""distutils.command.build_py
|
||||
|
||||
Implements the Distutils 'build_py' command."""
|
||||
|
||||
import os
|
||||
import importlib.util
|
||||
import sys
|
||||
import glob
|
||||
|
||||
from distutils.core import Command
|
||||
from distutils.errors import DistutilsOptionError, DistutilsFileError
|
||||
from distutils.util import convert_path
|
||||
from distutils import log
|
||||
|
||||
|
||||
class build_py(Command):
|
||||
|
||||
description = "\"build\" pure Python modules (copy to build directory)"
|
||||
|
||||
user_options = [
|
||||
('build-lib=', 'd', "directory to \"build\" (copy) to"),
|
||||
('compile', 'c', "compile .py to .pyc"),
|
||||
('no-compile', None, "don't compile .py files [default]"),
|
||||
(
|
||||
'optimize=',
|
||||
'O',
|
||||
"also compile with optimization: -O1 for \"python -O\", "
|
||||
"-O2 for \"python -OO\", and -O0 to disable [default: -O0]",
|
||||
),
|
||||
('force', 'f', "forcibly build everything (ignore file timestamps)"),
|
||||
]
|
||||
|
||||
boolean_options = ['compile', 'force']
|
||||
negative_opt = {'no-compile': 'compile'}
|
||||
|
||||
def initialize_options(self):
|
||||
self.build_lib = None
|
||||
self.py_modules = None
|
||||
self.package = None
|
||||
self.package_data = None
|
||||
self.package_dir = None
|
||||
self.compile = 0
|
||||
self.optimize = 0
|
||||
self.force = None
|
||||
|
||||
def finalize_options(self):
|
||||
self.set_undefined_options(
|
||||
'build', ('build_lib', 'build_lib'), ('force', 'force')
|
||||
)
|
||||
|
||||
# Get the distribution options that are aliases for build_py
|
||||
# options -- list of packages and list of modules.
|
||||
self.packages = self.distribution.packages
|
||||
self.py_modules = self.distribution.py_modules
|
||||
self.package_data = self.distribution.package_data
|
||||
self.package_dir = {}
|
||||
if self.distribution.package_dir:
|
||||
for name, path in self.distribution.package_dir.items():
|
||||
self.package_dir[name] = convert_path(path)
|
||||
self.data_files = self.get_data_files()
|
||||
|
||||
# Ick, copied straight from install_lib.py (fancy_getopt needs a
|
||||
# type system! Hell, *everything* needs a type system!!!)
|
||||
if not isinstance(self.optimize, int):
|
||||
try:
|
||||
self.optimize = int(self.optimize)
|
||||
assert 0 <= self.optimize <= 2
|
||||
except (ValueError, AssertionError):
|
||||
raise DistutilsOptionError("optimize must be 0, 1, or 2")
|
||||
|
||||
def run(self):
|
||||
# XXX copy_file by default preserves atime and mtime. IMHO this is
|
||||
# the right thing to do, but perhaps it should be an option -- in
|
||||
# particular, a site administrator might want installed files to
|
||||
# reflect the time of installation rather than the last
|
||||
# modification time before the installed release.
|
||||
|
||||
# XXX copy_file by default preserves mode, which appears to be the
|
||||
# wrong thing to do: if a file is read-only in the working
|
||||
# directory, we want it to be installed read/write so that the next
|
||||
# installation of the same module distribution can overwrite it
|
||||
# without problems. (This might be a Unix-specific issue.) Thus
|
||||
# we turn off 'preserve_mode' when copying to the build directory,
|
||||
# since the build directory is supposed to be exactly what the
|
||||
# installation will look like (ie. we preserve mode when
|
||||
# installing).
|
||||
|
||||
# Two options control which modules will be installed: 'packages'
|
||||
# and 'py_modules'. The former lets us work with whole packages, not
|
||||
# specifying individual modules at all; the latter is for
|
||||
# specifying modules one-at-a-time.
|
||||
|
||||
if self.py_modules:
|
||||
self.build_modules()
|
||||
if self.packages:
|
||||
self.build_packages()
|
||||
self.build_package_data()
|
||||
|
||||
self.byte_compile(self.get_outputs(include_bytecode=0))
|
||||
|
||||
def get_data_files(self):
|
||||
"""Generate list of '(package,src_dir,build_dir,filenames)' tuples"""
|
||||
data = []
|
||||
if not self.packages:
|
||||
return data
|
||||
for package in self.packages:
|
||||
# Locate package source directory
|
||||
src_dir = self.get_package_dir(package)
|
||||
|
||||
# Compute package build directory
|
||||
build_dir = os.path.join(*([self.build_lib] + package.split('.')))
|
||||
|
||||
# Length of path to strip from found files
|
||||
plen = 0
|
||||
if src_dir:
|
||||
plen = len(src_dir) + 1
|
||||
|
||||
# Strip directory from globbed filenames
|
||||
filenames = [file[plen:] for file in self.find_data_files(package, src_dir)]
|
||||
data.append((package, src_dir, build_dir, filenames))
|
||||
return data
|
||||
|
||||
def find_data_files(self, package, src_dir):
|
||||
"""Return filenames for package's data files in 'src_dir'"""
|
||||
globs = self.package_data.get('', []) + self.package_data.get(package, [])
|
||||
files = []
|
||||
for pattern in globs:
|
||||
# Each pattern has to be converted to a platform-specific path
|
||||
filelist = glob.glob(
|
||||
os.path.join(glob.escape(src_dir), convert_path(pattern))
|
||||
)
|
||||
# Files that match more than one pattern are only added once
|
||||
files.extend(
|
||||
[fn for fn in filelist if fn not in files and os.path.isfile(fn)]
|
||||
)
|
||||
return files
|
||||
|
||||
def build_package_data(self):
|
||||
"""Copy data files into build directory"""
|
||||
for package, src_dir, build_dir, filenames in self.data_files:
|
||||
for filename in filenames:
|
||||
target = os.path.join(build_dir, filename)
|
||||
self.mkpath(os.path.dirname(target))
|
||||
self.copy_file(
|
||||
os.path.join(src_dir, filename), target, preserve_mode=False
|
||||
)
|
||||
|
||||
def get_package_dir(self, package):
|
||||
"""Return the directory, relative to the top of the source
|
||||
distribution, where package 'package' should be found
|
||||
(at least according to the 'package_dir' option, if any)."""
|
||||
path = package.split('.')
|
||||
|
||||
if not self.package_dir:
|
||||
if path:
|
||||
return os.path.join(*path)
|
||||
else:
|
||||
return ''
|
||||
else:
|
||||
tail = []
|
||||
while path:
|
||||
try:
|
||||
pdir = self.package_dir['.'.join(path)]
|
||||
except KeyError:
|
||||
tail.insert(0, path[-1])
|
||||
del path[-1]
|
||||
else:
|
||||
tail.insert(0, pdir)
|
||||
return os.path.join(*tail)
|
||||
else:
|
||||
# Oops, got all the way through 'path' without finding a
|
||||
# match in package_dir. If package_dir defines a directory
|
||||
# for the root (nameless) package, then fallback on it;
|
||||
# otherwise, we might as well have not consulted
|
||||
# package_dir at all, as we just use the directory implied
|
||||
# by 'tail' (which should be the same as the original value
|
||||
# of 'path' at this point).
|
||||
pdir = self.package_dir.get('')
|
||||
if pdir is not None:
|
||||
tail.insert(0, pdir)
|
||||
|
||||
if tail:
|
||||
return os.path.join(*tail)
|
||||
else:
|
||||
return ''
|
||||
|
||||
def check_package(self, package, package_dir):
|
||||
# Empty dir name means current directory, which we can probably
|
||||
# assume exists. Also, os.path.exists and isdir don't know about
|
||||
# my "empty string means current dir" convention, so we have to
|
||||
# circumvent them.
|
||||
if package_dir != "":
|
||||
if not os.path.exists(package_dir):
|
||||
raise DistutilsFileError(
|
||||
"package directory '%s' does not exist" % package_dir
|
||||
)
|
||||
if not os.path.isdir(package_dir):
|
||||
raise DistutilsFileError(
|
||||
"supposed package directory '%s' exists, "
|
||||
"but is not a directory" % package_dir
|
||||
)
|
||||
|
||||
# Directories without __init__.py are namespace packages (PEP 420).
|
||||
if package:
|
||||
init_py = os.path.join(package_dir, "__init__.py")
|
||||
if os.path.isfile(init_py):
|
||||
return init_py
|
||||
|
||||
# Either not in a package at all (__init__.py not expected), or
|
||||
# __init__.py doesn't exist -- so don't return the filename.
|
||||
return None
|
||||
|
||||
def check_module(self, module, module_file):
|
||||
if not os.path.isfile(module_file):
|
||||
log.warn("file %s (for module %s) not found", module_file, module)
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
def find_package_modules(self, package, package_dir):
|
||||
self.check_package(package, package_dir)
|
||||
module_files = glob.glob(os.path.join(glob.escape(package_dir), "*.py"))
|
||||
modules = []
|
||||
setup_script = os.path.abspath(self.distribution.script_name)
|
||||
|
||||
for f in module_files:
|
||||
abs_f = os.path.abspath(f)
|
||||
if abs_f != setup_script:
|
||||
module = os.path.splitext(os.path.basename(f))[0]
|
||||
modules.append((package, module, f))
|
||||
else:
|
||||
self.debug_print("excluding %s" % setup_script)
|
||||
return modules
|
||||
|
||||
def find_modules(self):
|
||||
"""Finds individually-specified Python modules, ie. those listed by
|
||||
module name in 'self.py_modules'. Returns a list of tuples (package,
|
||||
module_base, filename): 'package' is a tuple of the path through
|
||||
package-space to the module; 'module_base' is the bare (no
|
||||
packages, no dots) module name, and 'filename' is the path to the
|
||||
".py" file (relative to the distribution root) that implements the
|
||||
module.
|
||||
"""
|
||||
# Map package names to tuples of useful info about the package:
|
||||
# (package_dir, checked)
|
||||
# package_dir - the directory where we'll find source files for
|
||||
# this package
|
||||
# checked - true if we have checked that the package directory
|
||||
# is valid (exists, contains __init__.py, ... ?)
|
||||
packages = {}
|
||||
|
||||
# List of (package, module, filename) tuples to return
|
||||
modules = []
|
||||
|
||||
# We treat modules-in-packages almost the same as toplevel modules,
|
||||
# just the "package" for a toplevel is empty (either an empty
|
||||
# string or empty list, depending on context). Differences:
|
||||
# - don't check for __init__.py in directory for empty package
|
||||
for module in self.py_modules:
|
||||
path = module.split('.')
|
||||
package = '.'.join(path[0:-1])
|
||||
module_base = path[-1]
|
||||
|
||||
try:
|
||||
(package_dir, checked) = packages[package]
|
||||
except KeyError:
|
||||
package_dir = self.get_package_dir(package)
|
||||
checked = 0
|
||||
|
||||
if not checked:
|
||||
init_py = self.check_package(package, package_dir)
|
||||
packages[package] = (package_dir, 1)
|
||||
if init_py:
|
||||
modules.append((package, "__init__", init_py))
|
||||
|
||||
# XXX perhaps we should also check for just .pyc files
|
||||
# (so greedy closed-source bastards can distribute Python
|
||||
# modules too)
|
||||
module_file = os.path.join(package_dir, module_base + ".py")
|
||||
if not self.check_module(module, module_file):
|
||||
continue
|
||||
|
||||
modules.append((package, module_base, module_file))
|
||||
|
||||
return modules
|
||||
|
||||
def find_all_modules(self):
|
||||
"""Compute the list of all modules that will be built, whether
|
||||
they are specified one-module-at-a-time ('self.py_modules') or
|
||||
by whole packages ('self.packages'). Return a list of tuples
|
||||
(package, module, module_file), just like 'find_modules()' and
|
||||
'find_package_modules()' do."""
|
||||
modules = []
|
||||
if self.py_modules:
|
||||
modules.extend(self.find_modules())
|
||||
if self.packages:
|
||||
for package in self.packages:
|
||||
package_dir = self.get_package_dir(package)
|
||||
m = self.find_package_modules(package, package_dir)
|
||||
modules.extend(m)
|
||||
return modules
|
||||
|
||||
def get_source_files(self):
|
||||
return [module[-1] for module in self.find_all_modules()]
|
||||
|
||||
def get_module_outfile(self, build_dir, package, module):
|
||||
outfile_path = [build_dir] + list(package) + [module + ".py"]
|
||||
return os.path.join(*outfile_path)
|
||||
|
||||
def get_outputs(self, include_bytecode=1):
|
||||
modules = self.find_all_modules()
|
||||
outputs = []
|
||||
for (package, module, module_file) in modules:
|
||||
package = package.split('.')
|
||||
filename = self.get_module_outfile(self.build_lib, package, module)
|
||||
outputs.append(filename)
|
||||
if include_bytecode:
|
||||
if self.compile:
|
||||
outputs.append(
|
||||
importlib.util.cache_from_source(filename, optimization='')
|
||||
)
|
||||
if self.optimize > 0:
|
||||
outputs.append(
|
||||
importlib.util.cache_from_source(
|
||||
filename, optimization=self.optimize
|
||||
)
|
||||
)
|
||||
|
||||
outputs += [
|
||||
os.path.join(build_dir, filename)
|
||||
for package, src_dir, build_dir, filenames in self.data_files
|
||||
for filename in filenames
|
||||
]
|
||||
|
||||
return outputs
|
||||
|
||||
def build_module(self, module, module_file, package):
|
||||
if isinstance(package, str):
|
||||
package = package.split('.')
|
||||
elif not isinstance(package, (list, tuple)):
|
||||
raise TypeError(
|
||||
"'package' must be a string (dot-separated), list, or tuple"
|
||||
)
|
||||
|
||||
# Now put the module source file into the "build" area -- this is
|
||||
# easy, we just copy it somewhere under self.build_lib (the build
|
||||
# directory for Python source).
|
||||
outfile = self.get_module_outfile(self.build_lib, package, module)
|
||||
dir = os.path.dirname(outfile)
|
||||
self.mkpath(dir)
|
||||
return self.copy_file(module_file, outfile, preserve_mode=0)
|
||||
|
||||
def build_modules(self):
|
||||
modules = self.find_modules()
|
||||
for (package, module, module_file) in modules:
|
||||
# Now "build" the module -- ie. copy the source file to
|
||||
# self.build_lib (the build directory for Python source).
|
||||
# (Actually, it gets copied to the directory for this package
|
||||
# under self.build_lib.)
|
||||
self.build_module(module, module_file, package)
|
||||
|
||||
def build_packages(self):
|
||||
for package in self.packages:
|
||||
# Get list of (package, module, module_file) tuples based on
|
||||
# scanning the package directory. 'package' is only included
|
||||
# in the tuple so that 'find_modules()' and
|
||||
# 'find_package_tuples()' have a consistent interface; it's
|
||||
# ignored here (apart from a sanity check). Also, 'module' is
|
||||
# the *unqualified* module name (ie. no dots, no package -- we
|
||||
# already know its package!), and 'module_file' is the path to
|
||||
# the .py file, relative to the current directory
|
||||
# (ie. including 'package_dir').
|
||||
package_dir = self.get_package_dir(package)
|
||||
modules = self.find_package_modules(package, package_dir)
|
||||
|
||||
# Now loop over the modules we found, "building" each one (just
|
||||
# copy it to self.build_lib).
|
||||
for (package_, module, module_file) in modules:
|
||||
assert package == package_
|
||||
self.build_module(module, module_file, package)
|
||||
|
||||
def byte_compile(self, files):
|
||||
if sys.dont_write_bytecode:
|
||||
self.warn('byte-compiling is disabled, skipping.')
|
||||
return
|
||||
|
||||
from distutils.util import byte_compile
|
||||
|
||||
prefix = self.build_lib
|
||||
if prefix[-1] != os.sep:
|
||||
prefix = prefix + os.sep
|
||||
|
||||
# XXX this code is essentially the same as the 'byte_compile()
|
||||
# method of the "install_lib" command, except for the determination
|
||||
# of the 'prefix' string. Hmmm.
|
||||
if self.compile:
|
||||
byte_compile(
|
||||
files, optimize=0, force=self.force, prefix=prefix, dry_run=self.dry_run
|
||||
)
|
||||
if self.optimize > 0:
|
||||
byte_compile(
|
||||
files,
|
||||
optimize=self.optimize,
|
||||
force=self.force,
|
||||
prefix=prefix,
|
||||
dry_run=self.dry_run,
|
||||
)
|
@@ -0,0 +1,173 @@
|
||||
"""distutils.command.build_scripts
|
||||
|
||||
Implements the Distutils 'build_scripts' command."""
|
||||
|
||||
import os
|
||||
import re
|
||||
from stat import ST_MODE
|
||||
from distutils import sysconfig
|
||||
from distutils.core import Command
|
||||
from distutils.dep_util import newer
|
||||
from distutils.util import convert_path
|
||||
from distutils import log
|
||||
import tokenize
|
||||
|
||||
shebang_pattern = re.compile('^#!.*python[0-9.]*([ \t].*)?$')
|
||||
"""
|
||||
Pattern matching a Python interpreter indicated in first line of a script.
|
||||
"""
|
||||
|
||||
# for Setuptools compatibility
|
||||
first_line_re = shebang_pattern
|
||||
|
||||
|
||||
class build_scripts(Command):
|
||||
|
||||
description = "\"build\" scripts (copy and fixup #! line)"
|
||||
|
||||
user_options = [
|
||||
('build-dir=', 'd', "directory to \"build\" (copy) to"),
|
||||
('force', 'f', "forcibly build everything (ignore file timestamps"),
|
||||
('executable=', 'e', "specify final destination interpreter path"),
|
||||
]
|
||||
|
||||
boolean_options = ['force']
|
||||
|
||||
def initialize_options(self):
|
||||
self.build_dir = None
|
||||
self.scripts = None
|
||||
self.force = None
|
||||
self.executable = None
|
||||
|
||||
def finalize_options(self):
|
||||
self.set_undefined_options(
|
||||
'build',
|
||||
('build_scripts', 'build_dir'),
|
||||
('force', 'force'),
|
||||
('executable', 'executable'),
|
||||
)
|
||||
self.scripts = self.distribution.scripts
|
||||
|
||||
def get_source_files(self):
|
||||
return self.scripts
|
||||
|
||||
def run(self):
|
||||
if not self.scripts:
|
||||
return
|
||||
self.copy_scripts()
|
||||
|
||||
def copy_scripts(self):
|
||||
"""
|
||||
Copy each script listed in ``self.scripts``.
|
||||
|
||||
If a script is marked as a Python script (first line matches
|
||||
'shebang_pattern', i.e. starts with ``#!`` and contains
|
||||
"python"), then adjust in the copy the first line to refer to
|
||||
the current Python interpreter.
|
||||
"""
|
||||
self.mkpath(self.build_dir)
|
||||
outfiles = []
|
||||
updated_files = []
|
||||
for script in self.scripts:
|
||||
self._copy_script(script, outfiles, updated_files)
|
||||
|
||||
self._change_modes(outfiles)
|
||||
|
||||
return outfiles, updated_files
|
||||
|
||||
def _copy_script(self, script, outfiles, updated_files): # noqa: C901
|
||||
shebang_match = None
|
||||
script = convert_path(script)
|
||||
outfile = os.path.join(self.build_dir, os.path.basename(script))
|
||||
outfiles.append(outfile)
|
||||
|
||||
if not self.force and not newer(script, outfile):
|
||||
log.debug("not copying %s (up-to-date)", script)
|
||||
return
|
||||
|
||||
# Always open the file, but ignore failures in dry-run mode
|
||||
# in order to attempt to copy directly.
|
||||
try:
|
||||
f = tokenize.open(script)
|
||||
except OSError:
|
||||
if not self.dry_run:
|
||||
raise
|
||||
f = None
|
||||
else:
|
||||
first_line = f.readline()
|
||||
if not first_line:
|
||||
self.warn("%s is an empty file (skipping)" % script)
|
||||
return
|
||||
|
||||
shebang_match = shebang_pattern.match(first_line)
|
||||
|
||||
updated_files.append(outfile)
|
||||
if shebang_match:
|
||||
log.info("copying and adjusting %s -> %s", script, self.build_dir)
|
||||
if not self.dry_run:
|
||||
if not sysconfig.python_build:
|
||||
executable = self.executable
|
||||
else:
|
||||
executable = os.path.join(
|
||||
sysconfig.get_config_var("BINDIR"),
|
||||
"python%s%s"
|
||||
% (
|
||||
sysconfig.get_config_var("VERSION"),
|
||||
sysconfig.get_config_var("EXE"),
|
||||
),
|
||||
)
|
||||
post_interp = shebang_match.group(1) or ''
|
||||
shebang = "#!" + executable + post_interp + "\n"
|
||||
self._validate_shebang(shebang, f.encoding)
|
||||
with open(outfile, "w", encoding=f.encoding) as outf:
|
||||
outf.write(shebang)
|
||||
outf.writelines(f.readlines())
|
||||
if f:
|
||||
f.close()
|
||||
else:
|
||||
if f:
|
||||
f.close()
|
||||
self.copy_file(script, outfile)
|
||||
|
||||
def _change_modes(self, outfiles):
|
||||
if os.name != 'posix':
|
||||
return
|
||||
|
||||
for file in outfiles:
|
||||
self._change_mode(file)
|
||||
|
||||
def _change_mode(self, file):
|
||||
if self.dry_run:
|
||||
log.info("changing mode of %s", file)
|
||||
return
|
||||
|
||||
oldmode = os.stat(file)[ST_MODE] & 0o7777
|
||||
newmode = (oldmode | 0o555) & 0o7777
|
||||
if newmode != oldmode:
|
||||
log.info("changing mode of %s from %o to %o", file, oldmode, newmode)
|
||||
os.chmod(file, newmode)
|
||||
|
||||
@staticmethod
|
||||
def _validate_shebang(shebang, encoding):
|
||||
# Python parser starts to read a script using UTF-8 until
|
||||
# it gets a #coding:xxx cookie. The shebang has to be the
|
||||
# first line of a file, the #coding:xxx cookie cannot be
|
||||
# written before. So the shebang has to be encodable to
|
||||
# UTF-8.
|
||||
try:
|
||||
shebang.encode('utf-8')
|
||||
except UnicodeEncodeError:
|
||||
raise ValueError(
|
||||
"The shebang ({!r}) is not encodable " "to utf-8".format(shebang)
|
||||
)
|
||||
|
||||
# If the script is encoded to a custom encoding (use a
|
||||
# #coding:xxx cookie), the shebang has to be encodable to
|
||||
# the script encoding too.
|
||||
try:
|
||||
shebang.encode(encoding)
|
||||
except UnicodeEncodeError:
|
||||
raise ValueError(
|
||||
"The shebang ({!r}) is not encodable "
|
||||
"to the script encoding ({})".format(shebang, encoding)
|
||||
)
|
151
venv/Lib/site-packages/setuptools/_distutils/command/check.py
Normal file
151
venv/Lib/site-packages/setuptools/_distutils/command/check.py
Normal file
@@ -0,0 +1,151 @@
|
||||
"""distutils.command.check
|
||||
|
||||
Implements the Distutils 'check' command.
|
||||
"""
|
||||
import contextlib
|
||||
|
||||
from distutils.core import Command
|
||||
from distutils.errors import DistutilsSetupError
|
||||
|
||||
with contextlib.suppress(ImportError):
|
||||
import docutils.utils
|
||||
import docutils.parsers.rst
|
||||
import docutils.frontend
|
||||
import docutils.nodes
|
||||
|
||||
class SilentReporter(docutils.utils.Reporter):
|
||||
def __init__(
|
||||
self,
|
||||
source,
|
||||
report_level,
|
||||
halt_level,
|
||||
stream=None,
|
||||
debug=0,
|
||||
encoding='ascii',
|
||||
error_handler='replace',
|
||||
):
|
||||
self.messages = []
|
||||
super().__init__(
|
||||
source, report_level, halt_level, stream, debug, encoding, error_handler
|
||||
)
|
||||
|
||||
def system_message(self, level, message, *children, **kwargs):
|
||||
self.messages.append((level, message, children, kwargs))
|
||||
return docutils.nodes.system_message(
|
||||
message, level=level, type=self.levels[level], *children, **kwargs
|
||||
)
|
||||
|
||||
|
||||
class check(Command):
|
||||
"""This command checks the meta-data of the package."""
|
||||
|
||||
description = "perform some checks on the package"
|
||||
user_options = [
|
||||
('metadata', 'm', 'Verify meta-data'),
|
||||
(
|
||||
'restructuredtext',
|
||||
'r',
|
||||
(
|
||||
'Checks if long string meta-data syntax '
|
||||
'are reStructuredText-compliant'
|
||||
),
|
||||
),
|
||||
('strict', 's', 'Will exit with an error if a check fails'),
|
||||
]
|
||||
|
||||
boolean_options = ['metadata', 'restructuredtext', 'strict']
|
||||
|
||||
def initialize_options(self):
|
||||
"""Sets default values for options."""
|
||||
self.restructuredtext = 0
|
||||
self.metadata = 1
|
||||
self.strict = 0
|
||||
self._warnings = 0
|
||||
|
||||
def finalize_options(self):
|
||||
pass
|
||||
|
||||
def warn(self, msg):
|
||||
"""Counts the number of warnings that occurs."""
|
||||
self._warnings += 1
|
||||
return Command.warn(self, msg)
|
||||
|
||||
def run(self):
|
||||
"""Runs the command."""
|
||||
# perform the various tests
|
||||
if self.metadata:
|
||||
self.check_metadata()
|
||||
if self.restructuredtext:
|
||||
if 'docutils' in globals():
|
||||
try:
|
||||
self.check_restructuredtext()
|
||||
except TypeError as exc:
|
||||
raise DistutilsSetupError(str(exc))
|
||||
elif self.strict:
|
||||
raise DistutilsSetupError('The docutils package is needed.')
|
||||
|
||||
# let's raise an error in strict mode, if we have at least
|
||||
# one warning
|
||||
if self.strict and self._warnings > 0:
|
||||
raise DistutilsSetupError('Please correct your package.')
|
||||
|
||||
def check_metadata(self):
|
||||
"""Ensures that all required elements of meta-data are supplied.
|
||||
|
||||
Required fields:
|
||||
name, version
|
||||
|
||||
Warns if any are missing.
|
||||
"""
|
||||
metadata = self.distribution.metadata
|
||||
|
||||
missing = []
|
||||
for attr in 'name', 'version':
|
||||
if not getattr(metadata, attr, None):
|
||||
missing.append(attr)
|
||||
|
||||
if missing:
|
||||
self.warn("missing required meta-data: %s" % ', '.join(missing))
|
||||
|
||||
def check_restructuredtext(self):
|
||||
"""Checks if the long string fields are reST-compliant."""
|
||||
data = self.distribution.get_long_description()
|
||||
for warning in self._check_rst_data(data):
|
||||
line = warning[-1].get('line')
|
||||
if line is None:
|
||||
warning = warning[1]
|
||||
else:
|
||||
warning = '{} (line {})'.format(warning[1], line)
|
||||
self.warn(warning)
|
||||
|
||||
def _check_rst_data(self, data):
|
||||
"""Returns warnings when the provided data doesn't compile."""
|
||||
# the include and csv_table directives need this to be a path
|
||||
source_path = self.distribution.script_name or 'setup.py'
|
||||
parser = docutils.parsers.rst.Parser()
|
||||
settings = docutils.frontend.OptionParser(
|
||||
components=(docutils.parsers.rst.Parser,)
|
||||
).get_default_values()
|
||||
settings.tab_width = 4
|
||||
settings.pep_references = None
|
||||
settings.rfc_references = None
|
||||
reporter = SilentReporter(
|
||||
source_path,
|
||||
settings.report_level,
|
||||
settings.halt_level,
|
||||
stream=settings.warning_stream,
|
||||
debug=settings.debug,
|
||||
encoding=settings.error_encoding,
|
||||
error_handler=settings.error_encoding_error_handler,
|
||||
)
|
||||
|
||||
document = docutils.nodes.document(settings, reporter, source=source_path)
|
||||
document.note_source(source_path, -1)
|
||||
try:
|
||||
parser.parse(data, document)
|
||||
except AttributeError as e:
|
||||
reporter.messages.append(
|
||||
(-1, 'Could not finish the parsing: %s.' % e, '', {})
|
||||
)
|
||||
|
||||
return reporter.messages
|
@@ -0,0 +1,76 @@
|
||||
"""distutils.command.clean
|
||||
|
||||
Implements the Distutils 'clean' command."""
|
||||
|
||||
# contributed by Bastian Kleineidam <calvin@cs.uni-sb.de>, added 2000-03-18
|
||||
|
||||
import os
|
||||
from distutils.core import Command
|
||||
from distutils.dir_util import remove_tree
|
||||
from distutils import log
|
||||
|
||||
|
||||
class clean(Command):
|
||||
|
||||
description = "clean up temporary files from 'build' command"
|
||||
user_options = [
|
||||
('build-base=', 'b', "base build directory (default: 'build.build-base')"),
|
||||
(
|
||||
'build-lib=',
|
||||
None,
|
||||
"build directory for all modules (default: 'build.build-lib')",
|
||||
),
|
||||
('build-temp=', 't', "temporary build directory (default: 'build.build-temp')"),
|
||||
(
|
||||
'build-scripts=',
|
||||
None,
|
||||
"build directory for scripts (default: 'build.build-scripts')",
|
||||
),
|
||||
('bdist-base=', None, "temporary directory for built distributions"),
|
||||
('all', 'a', "remove all build output, not just temporary by-products"),
|
||||
]
|
||||
|
||||
boolean_options = ['all']
|
||||
|
||||
def initialize_options(self):
|
||||
self.build_base = None
|
||||
self.build_lib = None
|
||||
self.build_temp = None
|
||||
self.build_scripts = None
|
||||
self.bdist_base = None
|
||||
self.all = None
|
||||
|
||||
def finalize_options(self):
|
||||
self.set_undefined_options(
|
||||
'build',
|
||||
('build_base', 'build_base'),
|
||||
('build_lib', 'build_lib'),
|
||||
('build_scripts', 'build_scripts'),
|
||||
('build_temp', 'build_temp'),
|
||||
)
|
||||
self.set_undefined_options('bdist', ('bdist_base', 'bdist_base'))
|
||||
|
||||
def run(self):
|
||||
# remove the build/temp.<plat> directory (unless it's already
|
||||
# gone)
|
||||
if os.path.exists(self.build_temp):
|
||||
remove_tree(self.build_temp, dry_run=self.dry_run)
|
||||
else:
|
||||
log.debug("'%s' does not exist -- can't clean it", self.build_temp)
|
||||
|
||||
if self.all:
|
||||
# remove build directories
|
||||
for directory in (self.build_lib, self.bdist_base, self.build_scripts):
|
||||
if os.path.exists(directory):
|
||||
remove_tree(directory, dry_run=self.dry_run)
|
||||
else:
|
||||
log.warn("'%s' does not exist -- can't clean it", directory)
|
||||
|
||||
# just for the heck of it, try to remove the base build directory:
|
||||
# we might have emptied it right now, but if not we don't care
|
||||
if not self.dry_run:
|
||||
try:
|
||||
os.rmdir(self.build_base)
|
||||
log.info("removing '%s'", self.build_base)
|
||||
except OSError:
|
||||
pass
|
377
venv/Lib/site-packages/setuptools/_distutils/command/config.py
Normal file
377
venv/Lib/site-packages/setuptools/_distutils/command/config.py
Normal file
@@ -0,0 +1,377 @@
|
||||
"""distutils.command.config
|
||||
|
||||
Implements the Distutils 'config' command, a (mostly) empty command class
|
||||
that exists mainly to be sub-classed by specific module distributions and
|
||||
applications. The idea is that while every "config" command is different,
|
||||
at least they're all named the same, and users always see "config" in the
|
||||
list of standard commands. Also, this is a good place to put common
|
||||
configure-like tasks: "try to compile this C code", or "figure out where
|
||||
this header file lives".
|
||||
"""
|
||||
|
||||
import os
|
||||
import re
|
||||
|
||||
from distutils.core import Command
|
||||
from distutils.errors import DistutilsExecError
|
||||
from distutils.sysconfig import customize_compiler
|
||||
from distutils import log
|
||||
|
||||
LANG_EXT = {"c": ".c", "c++": ".cxx"}
|
||||
|
||||
|
||||
class config(Command):
|
||||
|
||||
description = "prepare to build"
|
||||
|
||||
user_options = [
|
||||
('compiler=', None, "specify the compiler type"),
|
||||
('cc=', None, "specify the compiler executable"),
|
||||
('include-dirs=', 'I', "list of directories to search for header files"),
|
||||
('define=', 'D', "C preprocessor macros to define"),
|
||||
('undef=', 'U', "C preprocessor macros to undefine"),
|
||||
('libraries=', 'l', "external C libraries to link with"),
|
||||
('library-dirs=', 'L', "directories to search for external C libraries"),
|
||||
('noisy', None, "show every action (compile, link, run, ...) taken"),
|
||||
(
|
||||
'dump-source',
|
||||
None,
|
||||
"dump generated source files before attempting to compile them",
|
||||
),
|
||||
]
|
||||
|
||||
# The three standard command methods: since the "config" command
|
||||
# does nothing by default, these are empty.
|
||||
|
||||
def initialize_options(self):
|
||||
self.compiler = None
|
||||
self.cc = None
|
||||
self.include_dirs = None
|
||||
self.libraries = None
|
||||
self.library_dirs = None
|
||||
|
||||
# maximal output for now
|
||||
self.noisy = 1
|
||||
self.dump_source = 1
|
||||
|
||||
# list of temporary files generated along-the-way that we have
|
||||
# to clean at some point
|
||||
self.temp_files = []
|
||||
|
||||
def finalize_options(self):
|
||||
if self.include_dirs is None:
|
||||
self.include_dirs = self.distribution.include_dirs or []
|
||||
elif isinstance(self.include_dirs, str):
|
||||
self.include_dirs = self.include_dirs.split(os.pathsep)
|
||||
|
||||
if self.libraries is None:
|
||||
self.libraries = []
|
||||
elif isinstance(self.libraries, str):
|
||||
self.libraries = [self.libraries]
|
||||
|
||||
if self.library_dirs is None:
|
||||
self.library_dirs = []
|
||||
elif isinstance(self.library_dirs, str):
|
||||
self.library_dirs = self.library_dirs.split(os.pathsep)
|
||||
|
||||
def run(self):
|
||||
pass
|
||||
|
||||
# Utility methods for actual "config" commands. The interfaces are
|
||||
# loosely based on Autoconf macros of similar names. Sub-classes
|
||||
# may use these freely.
|
||||
|
||||
def _check_compiler(self):
|
||||
"""Check that 'self.compiler' really is a CCompiler object;
|
||||
if not, make it one.
|
||||
"""
|
||||
# We do this late, and only on-demand, because this is an expensive
|
||||
# import.
|
||||
from distutils.ccompiler import CCompiler, new_compiler
|
||||
|
||||
if not isinstance(self.compiler, CCompiler):
|
||||
self.compiler = new_compiler(
|
||||
compiler=self.compiler, dry_run=self.dry_run, force=1
|
||||
)
|
||||
customize_compiler(self.compiler)
|
||||
if self.include_dirs:
|
||||
self.compiler.set_include_dirs(self.include_dirs)
|
||||
if self.libraries:
|
||||
self.compiler.set_libraries(self.libraries)
|
||||
if self.library_dirs:
|
||||
self.compiler.set_library_dirs(self.library_dirs)
|
||||
|
||||
def _gen_temp_sourcefile(self, body, headers, lang):
|
||||
filename = "_configtest" + LANG_EXT[lang]
|
||||
with open(filename, "w") as file:
|
||||
if headers:
|
||||
for header in headers:
|
||||
file.write("#include <%s>\n" % header)
|
||||
file.write("\n")
|
||||
file.write(body)
|
||||
if body[-1] != "\n":
|
||||
file.write("\n")
|
||||
return filename
|
||||
|
||||
def _preprocess(self, body, headers, include_dirs, lang):
|
||||
src = self._gen_temp_sourcefile(body, headers, lang)
|
||||
out = "_configtest.i"
|
||||
self.temp_files.extend([src, out])
|
||||
self.compiler.preprocess(src, out, include_dirs=include_dirs)
|
||||
return (src, out)
|
||||
|
||||
def _compile(self, body, headers, include_dirs, lang):
|
||||
src = self._gen_temp_sourcefile(body, headers, lang)
|
||||
if self.dump_source:
|
||||
dump_file(src, "compiling '%s':" % src)
|
||||
(obj,) = self.compiler.object_filenames([src])
|
||||
self.temp_files.extend([src, obj])
|
||||
self.compiler.compile([src], include_dirs=include_dirs)
|
||||
return (src, obj)
|
||||
|
||||
def _link(self, body, headers, include_dirs, libraries, library_dirs, lang):
|
||||
(src, obj) = self._compile(body, headers, include_dirs, lang)
|
||||
prog = os.path.splitext(os.path.basename(src))[0]
|
||||
self.compiler.link_executable(
|
||||
[obj],
|
||||
prog,
|
||||
libraries=libraries,
|
||||
library_dirs=library_dirs,
|
||||
target_lang=lang,
|
||||
)
|
||||
|
||||
if self.compiler.exe_extension is not None:
|
||||
prog = prog + self.compiler.exe_extension
|
||||
self.temp_files.append(prog)
|
||||
|
||||
return (src, obj, prog)
|
||||
|
||||
def _clean(self, *filenames):
|
||||
if not filenames:
|
||||
filenames = self.temp_files
|
||||
self.temp_files = []
|
||||
log.info("removing: %s", ' '.join(filenames))
|
||||
for filename in filenames:
|
||||
try:
|
||||
os.remove(filename)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
# XXX these ignore the dry-run flag: what to do, what to do? even if
|
||||
# you want a dry-run build, you still need some sort of configuration
|
||||
# info. My inclination is to make it up to the real config command to
|
||||
# consult 'dry_run', and assume a default (minimal) configuration if
|
||||
# true. The problem with trying to do it here is that you'd have to
|
||||
# return either true or false from all the 'try' methods, neither of
|
||||
# which is correct.
|
||||
|
||||
# XXX need access to the header search path and maybe default macros.
|
||||
|
||||
def try_cpp(self, body=None, headers=None, include_dirs=None, lang="c"):
|
||||
"""Construct a source file from 'body' (a string containing lines
|
||||
of C/C++ code) and 'headers' (a list of header files to include)
|
||||
and run it through the preprocessor. Return true if the
|
||||
preprocessor succeeded, false if there were any errors.
|
||||
('body' probably isn't of much use, but what the heck.)
|
||||
"""
|
||||
from distutils.ccompiler import CompileError
|
||||
|
||||
self._check_compiler()
|
||||
ok = True
|
||||
try:
|
||||
self._preprocess(body, headers, include_dirs, lang)
|
||||
except CompileError:
|
||||
ok = False
|
||||
|
||||
self._clean()
|
||||
return ok
|
||||
|
||||
def search_cpp(self, pattern, body=None, headers=None, include_dirs=None, lang="c"):
|
||||
"""Construct a source file (just like 'try_cpp()'), run it through
|
||||
the preprocessor, and return true if any line of the output matches
|
||||
'pattern'. 'pattern' should either be a compiled regex object or a
|
||||
string containing a regex. If both 'body' and 'headers' are None,
|
||||
preprocesses an empty file -- which can be useful to determine the
|
||||
symbols the preprocessor and compiler set by default.
|
||||
"""
|
||||
self._check_compiler()
|
||||
src, out = self._preprocess(body, headers, include_dirs, lang)
|
||||
|
||||
if isinstance(pattern, str):
|
||||
pattern = re.compile(pattern)
|
||||
|
||||
with open(out) as file:
|
||||
match = False
|
||||
while True:
|
||||
line = file.readline()
|
||||
if line == '':
|
||||
break
|
||||
if pattern.search(line):
|
||||
match = True
|
||||
break
|
||||
|
||||
self._clean()
|
||||
return match
|
||||
|
||||
def try_compile(self, body, headers=None, include_dirs=None, lang="c"):
|
||||
"""Try to compile a source file built from 'body' and 'headers'.
|
||||
Return true on success, false otherwise.
|
||||
"""
|
||||
from distutils.ccompiler import CompileError
|
||||
|
||||
self._check_compiler()
|
||||
try:
|
||||
self._compile(body, headers, include_dirs, lang)
|
||||
ok = True
|
||||
except CompileError:
|
||||
ok = False
|
||||
|
||||
log.info(ok and "success!" or "failure.")
|
||||
self._clean()
|
||||
return ok
|
||||
|
||||
def try_link(
|
||||
self,
|
||||
body,
|
||||
headers=None,
|
||||
include_dirs=None,
|
||||
libraries=None,
|
||||
library_dirs=None,
|
||||
lang="c",
|
||||
):
|
||||
"""Try to compile and link a source file, built from 'body' and
|
||||
'headers', to executable form. Return true on success, false
|
||||
otherwise.
|
||||
"""
|
||||
from distutils.ccompiler import CompileError, LinkError
|
||||
|
||||
self._check_compiler()
|
||||
try:
|
||||
self._link(body, headers, include_dirs, libraries, library_dirs, lang)
|
||||
ok = True
|
||||
except (CompileError, LinkError):
|
||||
ok = False
|
||||
|
||||
log.info(ok and "success!" or "failure.")
|
||||
self._clean()
|
||||
return ok
|
||||
|
||||
def try_run(
|
||||
self,
|
||||
body,
|
||||
headers=None,
|
||||
include_dirs=None,
|
||||
libraries=None,
|
||||
library_dirs=None,
|
||||
lang="c",
|
||||
):
|
||||
"""Try to compile, link to an executable, and run a program
|
||||
built from 'body' and 'headers'. Return true on success, false
|
||||
otherwise.
|
||||
"""
|
||||
from distutils.ccompiler import CompileError, LinkError
|
||||
|
||||
self._check_compiler()
|
||||
try:
|
||||
src, obj, exe = self._link(
|
||||
body, headers, include_dirs, libraries, library_dirs, lang
|
||||
)
|
||||
self.spawn([exe])
|
||||
ok = True
|
||||
except (CompileError, LinkError, DistutilsExecError):
|
||||
ok = False
|
||||
|
||||
log.info(ok and "success!" or "failure.")
|
||||
self._clean()
|
||||
return ok
|
||||
|
||||
# -- High-level methods --------------------------------------------
|
||||
# (these are the ones that are actually likely to be useful
|
||||
# when implementing a real-world config command!)
|
||||
|
||||
def check_func(
|
||||
self,
|
||||
func,
|
||||
headers=None,
|
||||
include_dirs=None,
|
||||
libraries=None,
|
||||
library_dirs=None,
|
||||
decl=0,
|
||||
call=0,
|
||||
):
|
||||
"""Determine if function 'func' is available by constructing a
|
||||
source file that refers to 'func', and compiles and links it.
|
||||
If everything succeeds, returns true; otherwise returns false.
|
||||
|
||||
The constructed source file starts out by including the header
|
||||
files listed in 'headers'. If 'decl' is true, it then declares
|
||||
'func' (as "int func()"); you probably shouldn't supply 'headers'
|
||||
and set 'decl' true in the same call, or you might get errors about
|
||||
a conflicting declarations for 'func'. Finally, the constructed
|
||||
'main()' function either references 'func' or (if 'call' is true)
|
||||
calls it. 'libraries' and 'library_dirs' are used when
|
||||
linking.
|
||||
"""
|
||||
self._check_compiler()
|
||||
body = []
|
||||
if decl:
|
||||
body.append("int %s ();" % func)
|
||||
body.append("int main () {")
|
||||
if call:
|
||||
body.append(" %s();" % func)
|
||||
else:
|
||||
body.append(" %s;" % func)
|
||||
body.append("}")
|
||||
body = "\n".join(body) + "\n"
|
||||
|
||||
return self.try_link(body, headers, include_dirs, libraries, library_dirs)
|
||||
|
||||
def check_lib(
|
||||
self,
|
||||
library,
|
||||
library_dirs=None,
|
||||
headers=None,
|
||||
include_dirs=None,
|
||||
other_libraries=[],
|
||||
):
|
||||
"""Determine if 'library' is available to be linked against,
|
||||
without actually checking that any particular symbols are provided
|
||||
by it. 'headers' will be used in constructing the source file to
|
||||
be compiled, but the only effect of this is to check if all the
|
||||
header files listed are available. Any libraries listed in
|
||||
'other_libraries' will be included in the link, in case 'library'
|
||||
has symbols that depend on other libraries.
|
||||
"""
|
||||
self._check_compiler()
|
||||
return self.try_link(
|
||||
"int main (void) { }",
|
||||
headers,
|
||||
include_dirs,
|
||||
[library] + other_libraries,
|
||||
library_dirs,
|
||||
)
|
||||
|
||||
def check_header(self, header, include_dirs=None, library_dirs=None, lang="c"):
|
||||
"""Determine if the system header file named by 'header_file'
|
||||
exists and can be found by the preprocessor; return true if so,
|
||||
false otherwise.
|
||||
"""
|
||||
return self.try_cpp(
|
||||
body="/* No body */", headers=[header], include_dirs=include_dirs
|
||||
)
|
||||
|
||||
|
||||
def dump_file(filename, head=None):
|
||||
"""Dumps a file content into log.info.
|
||||
|
||||
If head is not None, will be dumped before the file content.
|
||||
"""
|
||||
if head is None:
|
||||
log.info('%s', filename)
|
||||
else:
|
||||
log.info(head)
|
||||
file = open(filename)
|
||||
try:
|
||||
log.info(file.read())
|
||||
finally:
|
||||
file.close()
|
814
venv/Lib/site-packages/setuptools/_distutils/command/install.py
Normal file
814
venv/Lib/site-packages/setuptools/_distutils/command/install.py
Normal file
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,84 @@
|
||||
"""distutils.command.install_data
|
||||
|
||||
Implements the Distutils 'install_data' command, for installing
|
||||
platform-independent data files."""
|
||||
|
||||
# contributed by Bastian Kleineidam
|
||||
|
||||
import os
|
||||
from distutils.core import Command
|
||||
from distutils.util import change_root, convert_path
|
||||
|
||||
|
||||
class install_data(Command):
|
||||
|
||||
description = "install data files"
|
||||
|
||||
user_options = [
|
||||
(
|
||||
'install-dir=',
|
||||
'd',
|
||||
"base directory for installing data files "
|
||||
"(default: installation base dir)",
|
||||
),
|
||||
('root=', None, "install everything relative to this alternate root directory"),
|
||||
('force', 'f', "force installation (overwrite existing files)"),
|
||||
]
|
||||
|
||||
boolean_options = ['force']
|
||||
|
||||
def initialize_options(self):
|
||||
self.install_dir = None
|
||||
self.outfiles = []
|
||||
self.root = None
|
||||
self.force = 0
|
||||
self.data_files = self.distribution.data_files
|
||||
self.warn_dir = 1
|
||||
|
||||
def finalize_options(self):
|
||||
self.set_undefined_options(
|
||||
'install',
|
||||
('install_data', 'install_dir'),
|
||||
('root', 'root'),
|
||||
('force', 'force'),
|
||||
)
|
||||
|
||||
def run(self):
|
||||
self.mkpath(self.install_dir)
|
||||
for f in self.data_files:
|
||||
if isinstance(f, str):
|
||||
# it's a simple file, so copy it
|
||||
f = convert_path(f)
|
||||
if self.warn_dir:
|
||||
self.warn(
|
||||
"setup script did not provide a directory for "
|
||||
"'%s' -- installing right in '%s'" % (f, self.install_dir)
|
||||
)
|
||||
(out, _) = self.copy_file(f, self.install_dir)
|
||||
self.outfiles.append(out)
|
||||
else:
|
||||
# it's a tuple with path to install to and a list of files
|
||||
dir = convert_path(f[0])
|
||||
if not os.path.isabs(dir):
|
||||
dir = os.path.join(self.install_dir, dir)
|
||||
elif self.root:
|
||||
dir = change_root(self.root, dir)
|
||||
self.mkpath(dir)
|
||||
|
||||
if f[1] == []:
|
||||
# If there are no files listed, the user must be
|
||||
# trying to create an empty directory, so add the
|
||||
# directory to the list of output files.
|
||||
self.outfiles.append(dir)
|
||||
else:
|
||||
# Copy files, adding them to the list of output files.
|
||||
for data in f[1]:
|
||||
data = convert_path(data)
|
||||
(out, _) = self.copy_file(data, dir)
|
||||
self.outfiles.append(out)
|
||||
|
||||
def get_inputs(self):
|
||||
return self.data_files or []
|
||||
|
||||
def get_outputs(self):
|
||||
return self.outfiles
|
@@ -0,0 +1,91 @@
|
||||
"""
|
||||
distutils.command.install_egg_info
|
||||
|
||||
Implements the Distutils 'install_egg_info' command, for installing
|
||||
a package's PKG-INFO metadata.
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import re
|
||||
|
||||
from distutils.cmd import Command
|
||||
from distutils import log, dir_util
|
||||
|
||||
|
||||
class install_egg_info(Command):
|
||||
"""Install an .egg-info file for the package"""
|
||||
|
||||
description = "Install package's PKG-INFO metadata as an .egg-info file"
|
||||
user_options = [
|
||||
('install-dir=', 'd', "directory to install to"),
|
||||
]
|
||||
|
||||
def initialize_options(self):
|
||||
self.install_dir = None
|
||||
|
||||
@property
|
||||
def basename(self):
|
||||
"""
|
||||
Allow basename to be overridden by child class.
|
||||
Ref pypa/distutils#2.
|
||||
"""
|
||||
return "%s-%s-py%d.%d.egg-info" % (
|
||||
to_filename(safe_name(self.distribution.get_name())),
|
||||
to_filename(safe_version(self.distribution.get_version())),
|
||||
*sys.version_info[:2],
|
||||
)
|
||||
|
||||
def finalize_options(self):
|
||||
self.set_undefined_options('install_lib', ('install_dir', 'install_dir'))
|
||||
self.target = os.path.join(self.install_dir, self.basename)
|
||||
self.outputs = [self.target]
|
||||
|
||||
def run(self):
|
||||
target = self.target
|
||||
if os.path.isdir(target) and not os.path.islink(target):
|
||||
dir_util.remove_tree(target, dry_run=self.dry_run)
|
||||
elif os.path.exists(target):
|
||||
self.execute(os.unlink, (self.target,), "Removing " + target)
|
||||
elif not os.path.isdir(self.install_dir):
|
||||
self.execute(
|
||||
os.makedirs, (self.install_dir,), "Creating " + self.install_dir
|
||||
)
|
||||
log.info("Writing %s", target)
|
||||
if not self.dry_run:
|
||||
with open(target, 'w', encoding='UTF-8') as f:
|
||||
self.distribution.metadata.write_pkg_file(f)
|
||||
|
||||
def get_outputs(self):
|
||||
return self.outputs
|
||||
|
||||
|
||||
# The following routines are taken from setuptools' pkg_resources module and
|
||||
# can be replaced by importing them from pkg_resources once it is included
|
||||
# in the stdlib.
|
||||
|
||||
|
||||
def safe_name(name):
|
||||
"""Convert an arbitrary string to a standard distribution name
|
||||
|
||||
Any runs of non-alphanumeric/. characters are replaced with a single '-'.
|
||||
"""
|
||||
return re.sub('[^A-Za-z0-9.]+', '-', name)
|
||||
|
||||
|
||||
def safe_version(version):
|
||||
"""Convert an arbitrary string to a standard version string
|
||||
|
||||
Spaces become dots, and all other non-alphanumeric characters become
|
||||
dashes, with runs of multiple dashes condensed to a single dash.
|
||||
"""
|
||||
version = version.replace(' ', '.')
|
||||
return re.sub('[^A-Za-z0-9.]+', '-', version)
|
||||
|
||||
|
||||
def to_filename(name):
|
||||
"""Convert a project or version name to its filename-escaped form
|
||||
|
||||
Any '-' characters are currently replaced with '_'.
|
||||
"""
|
||||
return name.replace('-', '_')
|
@@ -0,0 +1,45 @@
|
||||
"""distutils.command.install_headers
|
||||
|
||||
Implements the Distutils 'install_headers' command, to install C/C++ header
|
||||
files to the Python include directory."""
|
||||
|
||||
from distutils.core import Command
|
||||
|
||||
|
||||
# XXX force is never used
|
||||
class install_headers(Command):
|
||||
|
||||
description = "install C/C++ header files"
|
||||
|
||||
user_options = [
|
||||
('install-dir=', 'd', "directory to install header files to"),
|
||||
('force', 'f', "force installation (overwrite existing files)"),
|
||||
]
|
||||
|
||||
boolean_options = ['force']
|
||||
|
||||
def initialize_options(self):
|
||||
self.install_dir = None
|
||||
self.force = 0
|
||||
self.outfiles = []
|
||||
|
||||
def finalize_options(self):
|
||||
self.set_undefined_options(
|
||||
'install', ('install_headers', 'install_dir'), ('force', 'force')
|
||||
)
|
||||
|
||||
def run(self):
|
||||
headers = self.distribution.headers
|
||||
if not headers:
|
||||
return
|
||||
|
||||
self.mkpath(self.install_dir)
|
||||
for header in headers:
|
||||
(out, _) = self.copy_file(header, self.install_dir)
|
||||
self.outfiles.append(out)
|
||||
|
||||
def get_inputs(self):
|
||||
return self.distribution.headers or []
|
||||
|
||||
def get_outputs(self):
|
||||
return self.outfiles
|
@@ -0,0 +1,238 @@
|
||||
"""distutils.command.install_lib
|
||||
|
||||
Implements the Distutils 'install_lib' command
|
||||
(install all Python modules)."""
|
||||
|
||||
import os
|
||||
import importlib.util
|
||||
import sys
|
||||
|
||||
from distutils.core import Command
|
||||
from distutils.errors import DistutilsOptionError
|
||||
|
||||
|
||||
# Extension for Python source files.
|
||||
PYTHON_SOURCE_EXTENSION = ".py"
|
||||
|
||||
|
||||
class install_lib(Command):
|
||||
|
||||
description = "install all Python modules (extensions and pure Python)"
|
||||
|
||||
# The byte-compilation options are a tad confusing. Here are the
|
||||
# possible scenarios:
|
||||
# 1) no compilation at all (--no-compile --no-optimize)
|
||||
# 2) compile .pyc only (--compile --no-optimize; default)
|
||||
# 3) compile .pyc and "opt-1" .pyc (--compile --optimize)
|
||||
# 4) compile "opt-1" .pyc only (--no-compile --optimize)
|
||||
# 5) compile .pyc and "opt-2" .pyc (--compile --optimize-more)
|
||||
# 6) compile "opt-2" .pyc only (--no-compile --optimize-more)
|
||||
#
|
||||
# The UI for this is two options, 'compile' and 'optimize'.
|
||||
# 'compile' is strictly boolean, and only decides whether to
|
||||
# generate .pyc files. 'optimize' is three-way (0, 1, or 2), and
|
||||
# decides both whether to generate .pyc files and what level of
|
||||
# optimization to use.
|
||||
|
||||
user_options = [
|
||||
('install-dir=', 'd', "directory to install to"),
|
||||
('build-dir=', 'b', "build directory (where to install from)"),
|
||||
('force', 'f', "force installation (overwrite existing files)"),
|
||||
('compile', 'c', "compile .py to .pyc [default]"),
|
||||
('no-compile', None, "don't compile .py files"),
|
||||
(
|
||||
'optimize=',
|
||||
'O',
|
||||
"also compile with optimization: -O1 for \"python -O\", "
|
||||
"-O2 for \"python -OO\", and -O0 to disable [default: -O0]",
|
||||
),
|
||||
('skip-build', None, "skip the build steps"),
|
||||
]
|
||||
|
||||
boolean_options = ['force', 'compile', 'skip-build']
|
||||
negative_opt = {'no-compile': 'compile'}
|
||||
|
||||
def initialize_options(self):
|
||||
# let the 'install' command dictate our installation directory
|
||||
self.install_dir = None
|
||||
self.build_dir = None
|
||||
self.force = 0
|
||||
self.compile = None
|
||||
self.optimize = None
|
||||
self.skip_build = None
|
||||
|
||||
def finalize_options(self):
|
||||
# Get all the information we need to install pure Python modules
|
||||
# from the umbrella 'install' command -- build (source) directory,
|
||||
# install (target) directory, and whether to compile .py files.
|
||||
self.set_undefined_options(
|
||||
'install',
|
||||
('build_lib', 'build_dir'),
|
||||
('install_lib', 'install_dir'),
|
||||
('force', 'force'),
|
||||
('compile', 'compile'),
|
||||
('optimize', 'optimize'),
|
||||
('skip_build', 'skip_build'),
|
||||
)
|
||||
|
||||
if self.compile is None:
|
||||
self.compile = True
|
||||
if self.optimize is None:
|
||||
self.optimize = False
|
||||
|
||||
if not isinstance(self.optimize, int):
|
||||
try:
|
||||
self.optimize = int(self.optimize)
|
||||
if self.optimize not in (0, 1, 2):
|
||||
raise AssertionError
|
||||
except (ValueError, AssertionError):
|
||||
raise DistutilsOptionError("optimize must be 0, 1, or 2")
|
||||
|
||||
def run(self):
|
||||
# Make sure we have built everything we need first
|
||||
self.build()
|
||||
|
||||
# Install everything: simply dump the entire contents of the build
|
||||
# directory to the installation directory (that's the beauty of
|
||||
# having a build directory!)
|
||||
outfiles = self.install()
|
||||
|
||||
# (Optionally) compile .py to .pyc
|
||||
if outfiles is not None and self.distribution.has_pure_modules():
|
||||
self.byte_compile(outfiles)
|
||||
|
||||
# -- Top-level worker functions ------------------------------------
|
||||
# (called from 'run()')
|
||||
|
||||
def build(self):
|
||||
if not self.skip_build:
|
||||
if self.distribution.has_pure_modules():
|
||||
self.run_command('build_py')
|
||||
if self.distribution.has_ext_modules():
|
||||
self.run_command('build_ext')
|
||||
|
||||
def install(self):
|
||||
if os.path.isdir(self.build_dir):
|
||||
outfiles = self.copy_tree(self.build_dir, self.install_dir)
|
||||
else:
|
||||
self.warn(
|
||||
"'%s' does not exist -- no Python modules to install" % self.build_dir
|
||||
)
|
||||
return
|
||||
return outfiles
|
||||
|
||||
def byte_compile(self, files):
|
||||
if sys.dont_write_bytecode:
|
||||
self.warn('byte-compiling is disabled, skipping.')
|
||||
return
|
||||
|
||||
from distutils.util import byte_compile
|
||||
|
||||
# Get the "--root" directory supplied to the "install" command,
|
||||
# and use it as a prefix to strip off the purported filename
|
||||
# encoded in bytecode files. This is far from complete, but it
|
||||
# should at least generate usable bytecode in RPM distributions.
|
||||
install_root = self.get_finalized_command('install').root
|
||||
|
||||
if self.compile:
|
||||
byte_compile(
|
||||
files,
|
||||
optimize=0,
|
||||
force=self.force,
|
||||
prefix=install_root,
|
||||
dry_run=self.dry_run,
|
||||
)
|
||||
if self.optimize > 0:
|
||||
byte_compile(
|
||||
files,
|
||||
optimize=self.optimize,
|
||||
force=self.force,
|
||||
prefix=install_root,
|
||||
verbose=self.verbose,
|
||||
dry_run=self.dry_run,
|
||||
)
|
||||
|
||||
# -- Utility methods -----------------------------------------------
|
||||
|
||||
def _mutate_outputs(self, has_any, build_cmd, cmd_option, output_dir):
|
||||
if not has_any:
|
||||
return []
|
||||
|
||||
build_cmd = self.get_finalized_command(build_cmd)
|
||||
build_files = build_cmd.get_outputs()
|
||||
build_dir = getattr(build_cmd, cmd_option)
|
||||
|
||||
prefix_len = len(build_dir) + len(os.sep)
|
||||
outputs = []
|
||||
for file in build_files:
|
||||
outputs.append(os.path.join(output_dir, file[prefix_len:]))
|
||||
|
||||
return outputs
|
||||
|
||||
def _bytecode_filenames(self, py_filenames):
|
||||
bytecode_files = []
|
||||
for py_file in py_filenames:
|
||||
# Since build_py handles package data installation, the
|
||||
# list of outputs can contain more than just .py files.
|
||||
# Make sure we only report bytecode for the .py files.
|
||||
ext = os.path.splitext(os.path.normcase(py_file))[1]
|
||||
if ext != PYTHON_SOURCE_EXTENSION:
|
||||
continue
|
||||
if self.compile:
|
||||
bytecode_files.append(
|
||||
importlib.util.cache_from_source(py_file, optimization='')
|
||||
)
|
||||
if self.optimize > 0:
|
||||
bytecode_files.append(
|
||||
importlib.util.cache_from_source(
|
||||
py_file, optimization=self.optimize
|
||||
)
|
||||
)
|
||||
|
||||
return bytecode_files
|
||||
|
||||
# -- External interface --------------------------------------------
|
||||
# (called by outsiders)
|
||||
|
||||
def get_outputs(self):
|
||||
"""Return the list of files that would be installed if this command
|
||||
were actually run. Not affected by the "dry-run" flag or whether
|
||||
modules have actually been built yet.
|
||||
"""
|
||||
pure_outputs = self._mutate_outputs(
|
||||
self.distribution.has_pure_modules(),
|
||||
'build_py',
|
||||
'build_lib',
|
||||
self.install_dir,
|
||||
)
|
||||
if self.compile:
|
||||
bytecode_outputs = self._bytecode_filenames(pure_outputs)
|
||||
else:
|
||||
bytecode_outputs = []
|
||||
|
||||
ext_outputs = self._mutate_outputs(
|
||||
self.distribution.has_ext_modules(),
|
||||
'build_ext',
|
||||
'build_lib',
|
||||
self.install_dir,
|
||||
)
|
||||
|
||||
return pure_outputs + bytecode_outputs + ext_outputs
|
||||
|
||||
def get_inputs(self):
|
||||
"""Get the list of files that are input to this command, ie. the
|
||||
files that get installed as they are named in the build tree.
|
||||
The files in this list correspond one-to-one to the output
|
||||
filenames returned by 'get_outputs()'.
|
||||
"""
|
||||
inputs = []
|
||||
|
||||
if self.distribution.has_pure_modules():
|
||||
build_py = self.get_finalized_command('build_py')
|
||||
inputs.extend(build_py.get_outputs())
|
||||
|
||||
if self.distribution.has_ext_modules():
|
||||
build_ext = self.get_finalized_command('build_ext')
|
||||
inputs.extend(build_ext.get_outputs())
|
||||
|
||||
return inputs
|
@@ -0,0 +1,61 @@
|
||||
"""distutils.command.install_scripts
|
||||
|
||||
Implements the Distutils 'install_scripts' command, for installing
|
||||
Python scripts."""
|
||||
|
||||
# contributed by Bastian Kleineidam
|
||||
|
||||
import os
|
||||
from distutils.core import Command
|
||||
from distutils import log
|
||||
from stat import ST_MODE
|
||||
|
||||
|
||||
class install_scripts(Command):
|
||||
|
||||
description = "install scripts (Python or otherwise)"
|
||||
|
||||
user_options = [
|
||||
('install-dir=', 'd', "directory to install scripts to"),
|
||||
('build-dir=', 'b', "build directory (where to install from)"),
|
||||
('force', 'f', "force installation (overwrite existing files)"),
|
||||
('skip-build', None, "skip the build steps"),
|
||||
]
|
||||
|
||||
boolean_options = ['force', 'skip-build']
|
||||
|
||||
def initialize_options(self):
|
||||
self.install_dir = None
|
||||
self.force = 0
|
||||
self.build_dir = None
|
||||
self.skip_build = None
|
||||
|
||||
def finalize_options(self):
|
||||
self.set_undefined_options('build', ('build_scripts', 'build_dir'))
|
||||
self.set_undefined_options(
|
||||
'install',
|
||||
('install_scripts', 'install_dir'),
|
||||
('force', 'force'),
|
||||
('skip_build', 'skip_build'),
|
||||
)
|
||||
|
||||
def run(self):
|
||||
if not self.skip_build:
|
||||
self.run_command('build_scripts')
|
||||
self.outfiles = self.copy_tree(self.build_dir, self.install_dir)
|
||||
if os.name == 'posix':
|
||||
# Set the executable bits (owner, group, and world) on
|
||||
# all the scripts we just installed.
|
||||
for file in self.get_outputs():
|
||||
if self.dry_run:
|
||||
log.info("changing mode of %s", file)
|
||||
else:
|
||||
mode = ((os.stat(file)[ST_MODE]) | 0o555) & 0o7777
|
||||
log.info("changing mode of %s to %o", file, mode)
|
||||
os.chmod(file, mode)
|
||||
|
||||
def get_inputs(self):
|
||||
return self.distribution.scripts or []
|
||||
|
||||
def get_outputs(self):
|
||||
return self.outfiles or []
|
@@ -0,0 +1,31 @@
|
||||
import sys
|
||||
|
||||
|
||||
def _pythonlib_compat():
|
||||
"""
|
||||
On Python 3.7 and earlier, distutils would include the Python
|
||||
library. See pypa/distutils#9.
|
||||
"""
|
||||
from distutils import sysconfig
|
||||
|
||||
if not sysconfig.get_config_var('Py_ENABLED_SHARED'):
|
||||
return
|
||||
|
||||
yield 'python{}.{}{}'.format(
|
||||
sys.hexversion >> 24,
|
||||
(sys.hexversion >> 16) & 0xFF,
|
||||
sysconfig.get_config_var('ABIFLAGS'),
|
||||
)
|
||||
|
||||
|
||||
def compose(f1, f2):
|
||||
return lambda *args, **kwargs: f1(f2(*args, **kwargs))
|
||||
|
||||
|
||||
pythonlib = (
|
||||
compose(list, _pythonlib_compat)
|
||||
if sys.version_info < (3, 8)
|
||||
and sys.platform != 'darwin'
|
||||
and sys.platform[:3] != 'aix'
|
||||
else list
|
||||
)
|
319
venv/Lib/site-packages/setuptools/_distutils/command/register.py
Normal file
319
venv/Lib/site-packages/setuptools/_distutils/command/register.py
Normal file
@@ -0,0 +1,319 @@
|
||||
"""distutils.command.register
|
||||
|
||||
Implements the Distutils 'register' command (register with the repository).
|
||||
"""
|
||||
|
||||
# created 2002/10/21, Richard Jones
|
||||
|
||||
import getpass
|
||||
import io
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
from warnings import warn
|
||||
|
||||
from distutils.core import PyPIRCCommand
|
||||
from distutils import log
|
||||
|
||||
|
||||
class register(PyPIRCCommand):
|
||||
|
||||
description = "register the distribution with the Python package index"
|
||||
user_options = PyPIRCCommand.user_options + [
|
||||
('list-classifiers', None, 'list the valid Trove classifiers'),
|
||||
(
|
||||
'strict',
|
||||
None,
|
||||
'Will stop the registering if the meta-data are not fully compliant',
|
||||
),
|
||||
]
|
||||
boolean_options = PyPIRCCommand.boolean_options + [
|
||||
'verify',
|
||||
'list-classifiers',
|
||||
'strict',
|
||||
]
|
||||
|
||||
sub_commands = [('check', lambda self: True)]
|
||||
|
||||
def initialize_options(self):
|
||||
PyPIRCCommand.initialize_options(self)
|
||||
self.list_classifiers = 0
|
||||
self.strict = 0
|
||||
|
||||
def finalize_options(self):
|
||||
PyPIRCCommand.finalize_options(self)
|
||||
# setting options for the `check` subcommand
|
||||
check_options = {
|
||||
'strict': ('register', self.strict),
|
||||
'restructuredtext': ('register', 1),
|
||||
}
|
||||
self.distribution.command_options['check'] = check_options
|
||||
|
||||
def run(self):
|
||||
self.finalize_options()
|
||||
self._set_config()
|
||||
|
||||
# Run sub commands
|
||||
for cmd_name in self.get_sub_commands():
|
||||
self.run_command(cmd_name)
|
||||
|
||||
if self.dry_run:
|
||||
self.verify_metadata()
|
||||
elif self.list_classifiers:
|
||||
self.classifiers()
|
||||
else:
|
||||
self.send_metadata()
|
||||
|
||||
def check_metadata(self):
|
||||
"""Deprecated API."""
|
||||
warn(
|
||||
"distutils.command.register.check_metadata is deprecated; "
|
||||
"use the check command instead",
|
||||
DeprecationWarning,
|
||||
)
|
||||
check = self.distribution.get_command_obj('check')
|
||||
check.ensure_finalized()
|
||||
check.strict = self.strict
|
||||
check.restructuredtext = 1
|
||||
check.run()
|
||||
|
||||
def _set_config(self):
|
||||
'''Reads the configuration file and set attributes.'''
|
||||
config = self._read_pypirc()
|
||||
if config != {}:
|
||||
self.username = config['username']
|
||||
self.password = config['password']
|
||||
self.repository = config['repository']
|
||||
self.realm = config['realm']
|
||||
self.has_config = True
|
||||
else:
|
||||
if self.repository not in ('pypi', self.DEFAULT_REPOSITORY):
|
||||
raise ValueError('%s not found in .pypirc' % self.repository)
|
||||
if self.repository == 'pypi':
|
||||
self.repository = self.DEFAULT_REPOSITORY
|
||||
self.has_config = False
|
||||
|
||||
def classifiers(self):
|
||||
'''Fetch the list of classifiers from the server.'''
|
||||
url = self.repository + '?:action=list_classifiers'
|
||||
response = urllib.request.urlopen(url)
|
||||
log.info(self._read_pypi_response(response))
|
||||
|
||||
def verify_metadata(self):
|
||||
'''Send the metadata to the package index server to be checked.'''
|
||||
# send the info to the server and report the result
|
||||
(code, result) = self.post_to_server(self.build_post_data('verify'))
|
||||
log.info('Server response (%s): %s', code, result)
|
||||
|
||||
def send_metadata(self): # noqa: C901
|
||||
'''Send the metadata to the package index server.
|
||||
|
||||
Well, do the following:
|
||||
1. figure who the user is, and then
|
||||
2. send the data as a Basic auth'ed POST.
|
||||
|
||||
First we try to read the username/password from $HOME/.pypirc,
|
||||
which is a ConfigParser-formatted file with a section
|
||||
[distutils] containing username and password entries (both
|
||||
in clear text). Eg:
|
||||
|
||||
[distutils]
|
||||
index-servers =
|
||||
pypi
|
||||
|
||||
[pypi]
|
||||
username: fred
|
||||
password: sekrit
|
||||
|
||||
Otherwise, to figure who the user is, we offer the user three
|
||||
choices:
|
||||
|
||||
1. use existing login,
|
||||
2. register as a new user, or
|
||||
3. set the password to a random string and email the user.
|
||||
|
||||
'''
|
||||
# see if we can short-cut and get the username/password from the
|
||||
# config
|
||||
if self.has_config:
|
||||
choice = '1'
|
||||
username = self.username
|
||||
password = self.password
|
||||
else:
|
||||
choice = 'x'
|
||||
username = password = ''
|
||||
|
||||
# get the user's login info
|
||||
choices = '1 2 3 4'.split()
|
||||
while choice not in choices:
|
||||
self.announce(
|
||||
'''\
|
||||
We need to know who you are, so please choose either:
|
||||
1. use your existing login,
|
||||
2. register as a new user,
|
||||
3. have the server generate a new password for you (and email it to you), or
|
||||
4. quit
|
||||
Your selection [default 1]: ''',
|
||||
log.INFO,
|
||||
)
|
||||
choice = input()
|
||||
if not choice:
|
||||
choice = '1'
|
||||
elif choice not in choices:
|
||||
print('Please choose one of the four options!')
|
||||
|
||||
if choice == '1':
|
||||
# get the username and password
|
||||
while not username:
|
||||
username = input('Username: ')
|
||||
while not password:
|
||||
password = getpass.getpass('Password: ')
|
||||
|
||||
# set up the authentication
|
||||
auth = urllib.request.HTTPPasswordMgr()
|
||||
host = urllib.parse.urlparse(self.repository)[1]
|
||||
auth.add_password(self.realm, host, username, password)
|
||||
# send the info to the server and report the result
|
||||
code, result = self.post_to_server(self.build_post_data('submit'), auth)
|
||||
self.announce('Server response ({}): {}'.format(code, result), log.INFO)
|
||||
|
||||
# possibly save the login
|
||||
if code == 200:
|
||||
if self.has_config:
|
||||
# sharing the password in the distribution instance
|
||||
# so the upload command can reuse it
|
||||
self.distribution.password = password
|
||||
else:
|
||||
self.announce(
|
||||
(
|
||||
'I can store your PyPI login so future '
|
||||
'submissions will be faster.'
|
||||
),
|
||||
log.INFO,
|
||||
)
|
||||
self.announce(
|
||||
'(the login will be stored in %s)' % self._get_rc_file(),
|
||||
log.INFO,
|
||||
)
|
||||
choice = 'X'
|
||||
while choice.lower() not in 'yn':
|
||||
choice = input('Save your login (y/N)?')
|
||||
if not choice:
|
||||
choice = 'n'
|
||||
if choice.lower() == 'y':
|
||||
self._store_pypirc(username, password)
|
||||
|
||||
elif choice == '2':
|
||||
data = {':action': 'user'}
|
||||
data['name'] = data['password'] = data['email'] = ''
|
||||
data['confirm'] = None
|
||||
while not data['name']:
|
||||
data['name'] = input('Username: ')
|
||||
while data['password'] != data['confirm']:
|
||||
while not data['password']:
|
||||
data['password'] = getpass.getpass('Password: ')
|
||||
while not data['confirm']:
|
||||
data['confirm'] = getpass.getpass(' Confirm: ')
|
||||
if data['password'] != data['confirm']:
|
||||
data['password'] = ''
|
||||
data['confirm'] = None
|
||||
print("Password and confirm don't match!")
|
||||
while not data['email']:
|
||||
data['email'] = input(' EMail: ')
|
||||
code, result = self.post_to_server(data)
|
||||
if code != 200:
|
||||
log.info('Server response (%s): %s', code, result)
|
||||
else:
|
||||
log.info('You will receive an email shortly.')
|
||||
log.info('Follow the instructions in it to ' 'complete registration.')
|
||||
elif choice == '3':
|
||||
data = {':action': 'password_reset'}
|
||||
data['email'] = ''
|
||||
while not data['email']:
|
||||
data['email'] = input('Your email address: ')
|
||||
code, result = self.post_to_server(data)
|
||||
log.info('Server response (%s): %s', code, result)
|
||||
|
||||
def build_post_data(self, action):
|
||||
# figure the data to send - the metadata plus some additional
|
||||
# information used by the package server
|
||||
meta = self.distribution.metadata
|
||||
data = {
|
||||
':action': action,
|
||||
'metadata_version': '1.0',
|
||||
'name': meta.get_name(),
|
||||
'version': meta.get_version(),
|
||||
'summary': meta.get_description(),
|
||||
'home_page': meta.get_url(),
|
||||
'author': meta.get_contact(),
|
||||
'author_email': meta.get_contact_email(),
|
||||
'license': meta.get_licence(),
|
||||
'description': meta.get_long_description(),
|
||||
'keywords': meta.get_keywords(),
|
||||
'platform': meta.get_platforms(),
|
||||
'classifiers': meta.get_classifiers(),
|
||||
'download_url': meta.get_download_url(),
|
||||
# PEP 314
|
||||
'provides': meta.get_provides(),
|
||||
'requires': meta.get_requires(),
|
||||
'obsoletes': meta.get_obsoletes(),
|
||||
}
|
||||
if data['provides'] or data['requires'] or data['obsoletes']:
|
||||
data['metadata_version'] = '1.1'
|
||||
return data
|
||||
|
||||
def post_to_server(self, data, auth=None): # noqa: C901
|
||||
'''Post a query to the server, and return a string response.'''
|
||||
if 'name' in data:
|
||||
self.announce(
|
||||
'Registering {} to {}'.format(data['name'], self.repository), log.INFO
|
||||
)
|
||||
# Build up the MIME payload for the urllib2 POST data
|
||||
boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
|
||||
sep_boundary = '\n--' + boundary
|
||||
end_boundary = sep_boundary + '--'
|
||||
body = io.StringIO()
|
||||
for key, value in data.items():
|
||||
# handle multiple entries for the same name
|
||||
if type(value) not in (type([]), type(())):
|
||||
value = [value]
|
||||
for value in value:
|
||||
value = str(value)
|
||||
body.write(sep_boundary)
|
||||
body.write('\nContent-Disposition: form-data; name="%s"' % key)
|
||||
body.write("\n\n")
|
||||
body.write(value)
|
||||
if value and value[-1] == '\r':
|
||||
body.write('\n') # write an extra newline (lurve Macs)
|
||||
body.write(end_boundary)
|
||||
body.write("\n")
|
||||
body = body.getvalue().encode("utf-8")
|
||||
|
||||
# build the Request
|
||||
headers = {
|
||||
'Content-type': 'multipart/form-data; boundary=%s; charset=utf-8'
|
||||
% boundary,
|
||||
'Content-length': str(len(body)),
|
||||
}
|
||||
req = urllib.request.Request(self.repository, body, headers)
|
||||
|
||||
# handle HTTP and include the Basic Auth handler
|
||||
opener = urllib.request.build_opener(
|
||||
urllib.request.HTTPBasicAuthHandler(password_mgr=auth)
|
||||
)
|
||||
data = ''
|
||||
try:
|
||||
result = opener.open(req)
|
||||
except urllib.error.HTTPError as e:
|
||||
if self.show_response:
|
||||
data = e.fp.read()
|
||||
result = e.code, e.msg
|
||||
except urllib.error.URLError as e:
|
||||
result = 500, str(e)
|
||||
else:
|
||||
if self.show_response:
|
||||
data = self._read_pypi_response(result)
|
||||
result = 200, 'OK'
|
||||
if self.show_response:
|
||||
msg = '\n'.join(('-' * 75, data, '-' * 75))
|
||||
self.announce(msg, log.INFO)
|
||||
return result
|
531
venv/Lib/site-packages/setuptools/_distutils/command/sdist.py
Normal file
531
venv/Lib/site-packages/setuptools/_distutils/command/sdist.py
Normal file
File diff suppressed because it is too large
Load Diff
205
venv/Lib/site-packages/setuptools/_distutils/command/upload.py
Normal file
205
venv/Lib/site-packages/setuptools/_distutils/command/upload.py
Normal file
@@ -0,0 +1,205 @@
|
||||
"""
|
||||
distutils.command.upload
|
||||
|
||||
Implements the Distutils 'upload' subcommand (upload package to a package
|
||||
index).
|
||||
"""
|
||||
|
||||
import os
|
||||
import io
|
||||
import hashlib
|
||||
from base64 import standard_b64encode
|
||||
from urllib.request import urlopen, Request, HTTPError
|
||||
from urllib.parse import urlparse
|
||||
from distutils.errors import DistutilsError, DistutilsOptionError
|
||||
from distutils.core import PyPIRCCommand
|
||||
from distutils.spawn import spawn
|
||||
from distutils import log
|
||||
|
||||
|
||||
# PyPI Warehouse supports MD5, SHA256, and Blake2 (blake2-256)
|
||||
# https://bugs.python.org/issue40698
|
||||
_FILE_CONTENT_DIGESTS = {
|
||||
"md5_digest": getattr(hashlib, "md5", None),
|
||||
"sha256_digest": getattr(hashlib, "sha256", None),
|
||||
"blake2_256_digest": getattr(hashlib, "blake2b", None),
|
||||
}
|
||||
|
||||
|
||||
class upload(PyPIRCCommand):
|
||||
|
||||
description = "upload binary package to PyPI"
|
||||
|
||||
user_options = PyPIRCCommand.user_options + [
|
||||
('sign', 's', 'sign files to upload using gpg'),
|
||||
('identity=', 'i', 'GPG identity used to sign files'),
|
||||
]
|
||||
|
||||
boolean_options = PyPIRCCommand.boolean_options + ['sign']
|
||||
|
||||
def initialize_options(self):
|
||||
PyPIRCCommand.initialize_options(self)
|
||||
self.username = ''
|
||||
self.password = ''
|
||||
self.show_response = 0
|
||||
self.sign = False
|
||||
self.identity = None
|
||||
|
||||
def finalize_options(self):
|
||||
PyPIRCCommand.finalize_options(self)
|
||||
if self.identity and not self.sign:
|
||||
raise DistutilsOptionError("Must use --sign for --identity to have meaning")
|
||||
config = self._read_pypirc()
|
||||
if config != {}:
|
||||
self.username = config['username']
|
||||
self.password = config['password']
|
||||
self.repository = config['repository']
|
||||
self.realm = config['realm']
|
||||
|
||||
# getting the password from the distribution
|
||||
# if previously set by the register command
|
||||
if not self.password and self.distribution.password:
|
||||
self.password = self.distribution.password
|
||||
|
||||
def run(self):
|
||||
if not self.distribution.dist_files:
|
||||
msg = (
|
||||
"Must create and upload files in one command "
|
||||
"(e.g. setup.py sdist upload)"
|
||||
)
|
||||
raise DistutilsOptionError(msg)
|
||||
for command, pyversion, filename in self.distribution.dist_files:
|
||||
self.upload_file(command, pyversion, filename)
|
||||
|
||||
def upload_file(self, command, pyversion, filename): # noqa: C901
|
||||
# Makes sure the repository URL is compliant
|
||||
schema, netloc, url, params, query, fragments = urlparse(self.repository)
|
||||
if params or query or fragments:
|
||||
raise AssertionError("Incompatible url %s" % self.repository)
|
||||
|
||||
if schema not in ('http', 'https'):
|
||||
raise AssertionError("unsupported schema " + schema)
|
||||
|
||||
# Sign if requested
|
||||
if self.sign:
|
||||
gpg_args = ["gpg", "--detach-sign", "-a", filename]
|
||||
if self.identity:
|
||||
gpg_args[2:2] = ["--local-user", self.identity]
|
||||
spawn(gpg_args, dry_run=self.dry_run)
|
||||
|
||||
# Fill in the data - send all the meta-data in case we need to
|
||||
# register a new release
|
||||
f = open(filename, 'rb')
|
||||
try:
|
||||
content = f.read()
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
meta = self.distribution.metadata
|
||||
data = {
|
||||
# action
|
||||
':action': 'file_upload',
|
||||
'protocol_version': '1',
|
||||
# identify release
|
||||
'name': meta.get_name(),
|
||||
'version': meta.get_version(),
|
||||
# file content
|
||||
'content': (os.path.basename(filename), content),
|
||||
'filetype': command,
|
||||
'pyversion': pyversion,
|
||||
# additional meta-data
|
||||
'metadata_version': '1.0',
|
||||
'summary': meta.get_description(),
|
||||
'home_page': meta.get_url(),
|
||||
'author': meta.get_contact(),
|
||||
'author_email': meta.get_contact_email(),
|
||||
'license': meta.get_licence(),
|
||||
'description': meta.get_long_description(),
|
||||
'keywords': meta.get_keywords(),
|
||||
'platform': meta.get_platforms(),
|
||||
'classifiers': meta.get_classifiers(),
|
||||
'download_url': meta.get_download_url(),
|
||||
# PEP 314
|
||||
'provides': meta.get_provides(),
|
||||
'requires': meta.get_requires(),
|
||||
'obsoletes': meta.get_obsoletes(),
|
||||
}
|
||||
|
||||
data['comment'] = ''
|
||||
|
||||
# file content digests
|
||||
for digest_name, digest_cons in _FILE_CONTENT_DIGESTS.items():
|
||||
if digest_cons is None:
|
||||
continue
|
||||
try:
|
||||
data[digest_name] = digest_cons(content).hexdigest()
|
||||
except ValueError:
|
||||
# hash digest not available or blocked by security policy
|
||||
pass
|
||||
|
||||
if self.sign:
|
||||
with open(filename + ".asc", "rb") as f:
|
||||
data['gpg_signature'] = (os.path.basename(filename) + ".asc", f.read())
|
||||
|
||||
# set up the authentication
|
||||
user_pass = (self.username + ":" + self.password).encode('ascii')
|
||||
# The exact encoding of the authentication string is debated.
|
||||
# Anyway PyPI only accepts ascii for both username or password.
|
||||
auth = "Basic " + standard_b64encode(user_pass).decode('ascii')
|
||||
|
||||
# Build up the MIME payload for the POST data
|
||||
boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
|
||||
sep_boundary = b'\r\n--' + boundary.encode('ascii')
|
||||
end_boundary = sep_boundary + b'--\r\n'
|
||||
body = io.BytesIO()
|
||||
for key, value in data.items():
|
||||
title = '\r\nContent-Disposition: form-data; name="%s"' % key
|
||||
# handle multiple entries for the same name
|
||||
if not isinstance(value, list):
|
||||
value = [value]
|
||||
for value in value:
|
||||
if type(value) is tuple:
|
||||
title += '; filename="%s"' % value[0]
|
||||
value = value[1]
|
||||
else:
|
||||
value = str(value).encode('utf-8')
|
||||
body.write(sep_boundary)
|
||||
body.write(title.encode('utf-8'))
|
||||
body.write(b"\r\n\r\n")
|
||||
body.write(value)
|
||||
body.write(end_boundary)
|
||||
body = body.getvalue()
|
||||
|
||||
msg = "Submitting {} to {}".format(filename, self.repository)
|
||||
self.announce(msg, log.INFO)
|
||||
|
||||
# build the Request
|
||||
headers = {
|
||||
'Content-type': 'multipart/form-data; boundary=%s' % boundary,
|
||||
'Content-length': str(len(body)),
|
||||
'Authorization': auth,
|
||||
}
|
||||
|
||||
request = Request(self.repository, data=body, headers=headers)
|
||||
# send the data
|
||||
try:
|
||||
result = urlopen(request)
|
||||
status = result.getcode()
|
||||
reason = result.msg
|
||||
except HTTPError as e:
|
||||
status = e.code
|
||||
reason = e.msg
|
||||
except OSError as e:
|
||||
self.announce(str(e), log.ERROR)
|
||||
raise
|
||||
|
||||
if status == 200:
|
||||
self.announce('Server response ({}): {}'.format(status, reason), log.INFO)
|
||||
if self.show_response:
|
||||
text = self._read_pypi_response(result)
|
||||
msg = '\n'.join(('-' * 75, text, '-' * 75))
|
||||
self.announce(msg, log.INFO)
|
||||
else:
|
||||
msg = 'Upload failed ({}): {}'.format(status, reason)
|
||||
self.announce(msg, log.ERROR)
|
||||
raise DistutilsError(msg)
|
Reference in New Issue
Block a user