1
1
Fork 0
mirror of https://github.com/pypa/pip synced 2023-12-13 21:30:23 +01:00
pip/src/pip/_internal/operations/install/wheel.py

695 lines
26 KiB
Python
Raw Normal View History

"""Support for installing and building the "wheel" binary package format.
"""
from __future__ import absolute_import
import collections
import compileall
2020-04-26 11:01:34 +02:00
import contextlib
import csv
import io
import logging
2015-06-01 23:24:11 +02:00
import os.path
import re
2013-04-05 23:21:11 +02:00
import shutil
import stat
2013-04-05 23:21:11 +02:00
import sys
import warnings
2013-04-05 23:21:11 +02:00
from base64 import urlsafe_b64encode
from itertools import starmap
from zipfile import ZipFile
from pip._vendor import pkg_resources
from pip._vendor.distlib.scripts import ScriptMaker
from pip._vendor.distlib.util import get_export_entry
from pip._vendor.six import PY2, ensure_str, ensure_text, itervalues, text_type
2013-04-05 23:21:11 +02:00
from pip._internal.exceptions import InstallationError
from pip._internal.locations import get_major_minor_version
2020-02-01 13:40:20 +01:00
from pip._internal.models.direct_url import DIRECT_URL_METADATA_NAME, DirectUrl
2020-03-29 12:19:34 +02:00
from pip._internal.utils.filesystem import adjacent_tmp_file, replace
from pip._internal.utils.misc import captured_stdout, ensure_dir, hash_file
from pip._internal.utils.temp_dir import TempDirectory
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
2020-04-27 10:48:22 +02:00
from pip._internal.utils.unpacking import current_umask, unpack_file
from pip._internal.utils.wheel import parse_wheel
# Use the custom cast function at runtime to make cast work,
# and import typing.cast when performing pre-commit and type
# checks
if not MYPY_CHECK_RUNNING:
from pip._internal.utils.typing import cast
else:
from email.message import Message
2019-02-22 12:17:07 +01:00
from typing import (
Any,
Callable,
Dict,
IO,
Iterable,
Iterator,
List,
NewType,
Optional,
Sequence,
Set,
Tuple,
Union,
cast,
)
from pip._internal.models.scheme import Scheme
2020-04-26 11:01:34 +02:00
from pip._internal.utils.filesystem import NamedTemporaryFileResult
RecordPath = NewType('RecordPath', text_type)
InstalledCSVRow = Tuple[RecordPath, str, Union[int, str]]
logger = logging.getLogger(__name__)
Add SHA256 hash of .whl as info output (#5908) * Add SHA256 hash of .whl as info output Currently I'm trying to debug some issues with what appear to be corrupt wheels. It would be very useful to see what pip thought the state of things was as it wrote the wheel output; if a final corrupt distributed file is then different to what pip has saved in its build logs, you know the problem is somewhere after pip but before distribution. Currently we get a log of the initial creation, then the stamp when it gets moved in the final output location, e.g.: creating '/tmp/pip-wheel-71CpBe/foo-1.2.3-py2.py3-none-any.whl ... Stored in directory: /opt/wheel/workspace A lot happens in between this, so my suggestion is we add the final output file and it's hash before the "Stored in directory:", e.g. you now see: Building wheels for collected packages: simple Running setup.py bdist_wheel for simple: started Running setup.py bdist_wheel for simple: finished with status 'done' Finished: simple-3.0-py3-none-any.whl sha256=39005a57a6327972575072af82e11d0817439fe6a069381f6f2a123a8c0bf1cf Stored in directory: /tmp/pytest-of-iwienand/pytest-18/test_pip_wheel_success0/workspace/scratch Successfully built simple Despite the hash being fairly important for things like --require-hashes, AFAICS the final hash is not put in the logs at all currently, so I think this is generically helpful. * Reword wheel hash details output This rewords the output to be more like the form of the preceding messages. Additionally the size is added, since we have calculated it anyway. The output will now look like: Collecting simple==3.0 Building wheels for collected packages: simple Building wheel for simple (setup.py): started Building wheel for simple (setup.py): finished with status 'done' Created wheel for simple: filename=simple-3.0-py3-none-any.whl size=1138 sha256=2a980a802c9d38a24d29aded2dc2df2b080e58370902e5fdf950090ff67aec10 Stored in directory: /tmp/pytest-of-iwienand/pytest-0/test_pip_wheel_success0/workspace/scratch Successfully built simple
2019-06-26 11:44:43 +02:00
def rehash(path, blocksize=1 << 20):
# type: (text_type, int) -> Tuple[str, str]
Add SHA256 hash of .whl as info output (#5908) * Add SHA256 hash of .whl as info output Currently I'm trying to debug some issues with what appear to be corrupt wheels. It would be very useful to see what pip thought the state of things was as it wrote the wheel output; if a final corrupt distributed file is then different to what pip has saved in its build logs, you know the problem is somewhere after pip but before distribution. Currently we get a log of the initial creation, then the stamp when it gets moved in the final output location, e.g.: creating '/tmp/pip-wheel-71CpBe/foo-1.2.3-py2.py3-none-any.whl ... Stored in directory: /opt/wheel/workspace A lot happens in between this, so my suggestion is we add the final output file and it's hash before the "Stored in directory:", e.g. you now see: Building wheels for collected packages: simple Running setup.py bdist_wheel for simple: started Running setup.py bdist_wheel for simple: finished with status 'done' Finished: simple-3.0-py3-none-any.whl sha256=39005a57a6327972575072af82e11d0817439fe6a069381f6f2a123a8c0bf1cf Stored in directory: /tmp/pytest-of-iwienand/pytest-18/test_pip_wheel_success0/workspace/scratch Successfully built simple Despite the hash being fairly important for things like --require-hashes, AFAICS the final hash is not put in the logs at all currently, so I think this is generically helpful. * Reword wheel hash details output This rewords the output to be more like the form of the preceding messages. Additionally the size is added, since we have calculated it anyway. The output will now look like: Collecting simple==3.0 Building wheels for collected packages: simple Building wheel for simple (setup.py): started Building wheel for simple (setup.py): finished with status 'done' Created wheel for simple: filename=simple-3.0-py3-none-any.whl size=1138 sha256=2a980a802c9d38a24d29aded2dc2df2b080e58370902e5fdf950090ff67aec10 Stored in directory: /tmp/pytest-of-iwienand/pytest-0/test_pip_wheel_success0/workspace/scratch Successfully built simple
2019-06-26 11:44:43 +02:00
"""Return (encoded_digest, length) for path using hashlib.sha256()"""
h, length = hash_file(path, blocksize)
digest = 'sha256=' + urlsafe_b64encode(
h.digest()
).decode('latin1').rstrip('=')
# unicode/str python2 issues
return (digest, str(length)) # type: ignore
2020-03-29 12:19:34 +02:00
def csv_io_kwargs(mode):
# type: (str) -> Dict[str, Any]
"""Return keyword arguments to properly open a CSV file
in the given mode.
"""
if PY2:
2020-03-29 12:19:34 +02:00
return {'mode': '{}b'.format(mode)}
else:
return {'mode': mode, 'newline': '', 'encoding': 'utf-8'}
def fix_script(path):
# type: (text_type) -> Optional[bool]
"""Replace #!python with #!/path/to/python
Return True if file was changed.
"""
# XXX RECORD hashes will need to be updated
if not os.path.isfile(path):
return None
with open(path, 'rb') as script:
firstline = script.readline()
if not firstline.startswith(b'#!python'):
return False
exename = sys.executable.encode(sys.getfilesystemencoding())
firstline = b'#!' + exename + os.linesep.encode("ascii")
rest = script.read()
with open(path, 'wb') as script:
script.write(firstline)
script.write(rest)
return True
2013-07-06 07:20:09 +02:00
2017-05-19 12:10:57 +02:00
def wheel_root_is_purelib(metadata):
# type: (Message) -> bool
return metadata.get("Root-Is-Purelib", "").lower() == "true"
def get_entrypoints(filename):
# type: (str) -> Tuple[Dict[str, str], Dict[str, str]]
if not os.path.exists(filename):
return {}, {}
2013-11-08 12:32:35 +01:00
2013-11-19 14:23:15 +01:00
# This is done because you can pass a string to entry_points wrappers which
# means that they may or may not be valid INI files. The attempt here is to
# strip leading and trailing whitespace in order to make them valid INI
# files.
with io.open(filename, encoding="utf-8") as fp:
data = io.StringIO()
2013-11-08 12:32:35 +01:00
for line in fp:
data.write(line.strip())
data.write(u"\n")
2013-11-08 12:32:35 +01:00
data.seek(0)
# get the entry points and then the script names
entry_points = pkg_resources.EntryPoint.parse_map(data)
console = entry_points.get('console_scripts', {})
gui = entry_points.get('gui_scripts', {})
2013-11-08 12:32:35 +01:00
def _split_ep(s):
# type: (pkg_resources.EntryPoint) -> Tuple[str, str]
"""get the string representation of EntryPoint,
remove space and split on '='
"""
split_parts = str(s).replace(" ", "").split("=")
return split_parts[0], split_parts[1]
# convert the EntryPoint objects into strings with module:function
console = dict(_split_ep(v) for v in console.values())
gui = dict(_split_ep(v) for v in gui.values())
return console, gui
2013-11-08 12:32:35 +01:00
def message_about_scripts_not_on_PATH(scripts):
# type: (Sequence[str]) -> Optional[str]
"""Determine if any scripts are not on PATH and format a warning.
Returns a warning message if one or more scripts are not on PATH,
otherwise None.
"""
if not scripts:
return None
# Group scripts by the path they were installed in
2019-09-28 16:42:27 +02:00
grouped_by_dir = collections.defaultdict(set) # type: Dict[str, Set[str]]
for destfile in scripts:
parent_dir = os.path.dirname(destfile)
script_name = os.path.basename(destfile)
grouped_by_dir[parent_dir].add(script_name)
# We don't want to warn for directories that are on PATH.
2018-04-07 13:55:01 +02:00
not_warn_dirs = [
os.path.normcase(i).rstrip(os.sep) for i in
os.environ.get("PATH", "").split(os.pathsep)
2018-04-07 13:55:01 +02:00
]
# If an executable sits with sys.executable, we don't warn for it.
# This covers the case of venv invocations without activating the venv.
not_warn_dirs.append(os.path.normcase(os.path.dirname(sys.executable)))
warn_for = {
parent_dir: scripts for parent_dir, scripts in grouped_by_dir.items()
if os.path.normcase(parent_dir) not in not_warn_dirs
2019-09-28 16:42:27 +02:00
} # type: Dict[str, Set[str]]
if not warn_for:
return None
# Format a message
msg_lines = []
2019-09-28 16:42:27 +02:00
for parent_dir, dir_scripts in warn_for.items():
sorted_scripts = sorted(dir_scripts) # type: List[str]
if len(sorted_scripts) == 1:
start_text = "script {} is".format(sorted_scripts[0])
else:
start_text = "scripts {} are".format(
", ".join(sorted_scripts[:-1]) + " and " + sorted_scripts[-1]
)
msg_lines.append(
"The {} installed in '{}' which is not on PATH."
.format(start_text, parent_dir)
)
last_line_fmt = (
"Consider adding {} to PATH or, if you prefer "
"to suppress this warning, use --no-warn-script-location."
)
if len(msg_lines) == 1:
msg_lines.append(last_line_fmt.format("this directory"))
else:
msg_lines.append(last_line_fmt.format("these directories"))
# Add a note if any directory starts with ~
warn_for_tilde = any(
i[0] == "~" for i in os.environ.get("PATH", "").split(os.pathsep) if i
)
if warn_for_tilde:
tilde_warning_msg = (
"NOTE: The current PATH contains path(s) starting with `~`, "
"which may not be expanded by all applications."
)
msg_lines.append(tilde_warning_msg)
# Returns the formatted multiline message
return "\n".join(msg_lines)
def _normalized_outrows(outrows):
# type: (Iterable[InstalledCSVRow]) -> List[Tuple[str, str, str]]
"""Normalize the given rows of a RECORD file.
Items in each row are converted into str. Rows are then sorted to make
the value more predictable for tests.
Each row is a 3-tuple (path, hash, size) and corresponds to a record of
a RECORD file (see PEP 376 and PEP 427 for details). For the rows
passed to this function, the size can be an integer as an int or string,
or the empty string.
"""
# Normally, there should only be one row per path, in which case the
# second and third elements don't come into play when sorting.
# However, in cases in the wild where a path might happen to occur twice,
# we don't want the sort operation to trigger an error (but still want
# determinism). Since the third element can be an int or string, we
# coerce each element to a string to avoid a TypeError in this case.
# For additional background, see--
# https://github.com/pypa/pip/issues/5868
return sorted(
2020-05-13 22:01:41 +02:00
(ensure_str(record_path, encoding='utf-8'), hash_, str(size))
for record_path, hash_, size in outrows
)
def _record_to_fs_path(record_path):
# type: (RecordPath) -> text_type
2020-05-12 15:33:05 +02:00
return record_path
def _fs_to_record_path(path, relative_to=None):
2020-05-12 15:33:05 +02:00
# type: (text_type, Optional[text_type]) -> RecordPath
if relative_to is not None:
2020-04-16 14:59:03 +02:00
# On Windows, do not handle relative paths if they belong to different
# logical disks
if os.path.splitdrive(path)[0].lower() == \
os.path.splitdrive(relative_to)[0].lower():
path = os.path.relpath(path, relative_to)
path = path.replace(os.path.sep, '/')
2020-05-12 15:33:05 +02:00
return cast('RecordPath', path)
def _parse_record_path(record_column):
# type: (str) -> RecordPath
p = ensure_text(record_column, encoding='utf-8')
return cast('RecordPath', p)
2019-01-24 03:44:54 +01:00
def get_csv_rows_for_installed(
old_csv_rows, # type: Iterable[List[str]]
installed, # type: Dict[RecordPath, RecordPath]
changed, # type: Set[RecordPath]
2019-01-24 03:44:54 +01:00
generated, # type: List[str]
lib_dir, # type: str
2019-01-24 03:44:54 +01:00
):
# type: (...) -> List[InstalledCSVRow]
"""
:param installed: A map from archive RECORD path to installation RECORD
path.
"""
2019-01-24 03:44:54 +01:00
installed_rows = [] # type: List[InstalledCSVRow]
for row in old_csv_rows:
if len(row) > 3:
logger.warning(
'RECORD line has more than three elements: {}'.format(row)
)
old_record_path = _parse_record_path(row[0])
new_record_path = installed.pop(old_record_path, old_record_path)
if new_record_path in changed:
digest, length = rehash(_record_to_fs_path(new_record_path))
else:
digest = row[1] if len(row) > 1 else ''
length = row[2] if len(row) > 2 else ''
installed_rows.append((new_record_path, digest, length))
2019-01-24 03:44:54 +01:00
for f in generated:
path = _fs_to_record_path(f, lib_dir)
2019-01-24 03:44:54 +01:00
digest, length = rehash(f)
installed_rows.append((path, digest, length))
for installed_record_path in itervalues(installed):
installed_rows.append((installed_record_path, '', ''))
2019-01-24 03:44:54 +01:00
return installed_rows
class MissingCallableSuffix(Exception):
pass
2019-09-07 15:59:41 +02:00
def _raise_for_invalid_entrypoint(specification):
# type: (str) -> None
entry = get_export_entry(specification)
if entry is not None and entry.suffix is None:
raise MissingCallableSuffix(str(entry))
class PipScriptMaker(ScriptMaker):
def make(self, specification, options=None):
# type: (str, Dict[str, Any]) -> List[str]
2019-09-07 15:59:41 +02:00
_raise_for_invalid_entrypoint(specification)
return super(PipScriptMaker, self).make(specification, options)
2019-10-12 03:31:35 +02:00
def install_unpacked_wheel(
name, # type: str
wheeldir, # type: str
wheel_zip, # type: ZipFile
scheme, # type: Scheme
2019-10-12 04:24:50 +02:00
req_description, # type: str
pycompile=True, # type: bool
2020-02-01 13:40:20 +01:00
warn_script_location=True, # type: bool
direct_url=None, # type: Optional[DirectUrl]
2020-04-11 18:40:55 +02:00
requested=False, # type: bool
):
# type: (...) -> None
2019-10-12 04:24:50 +02:00
"""Install a wheel.
:param name: Name of the project to install
:param wheeldir: Base directory of the unpacked wheel
:param wheel_zip: open ZipFile for wheel being installed
2019-10-12 04:24:50 +02:00
:param scheme: Distutils scheme dictating the install directories
:param req_description: String used in place of the requirement, for
logging
:param pycompile: Whether to byte-compile installed Python files
:param warn_script_location: Whether to check that scripts are installed
into a directory on PATH
:raises UnsupportedWheel:
* when the directory holds an unpacked wheel with incompatible
Wheel-Version
* when the .dist-info dir does not match the wheel
2019-10-12 04:24:50 +02:00
"""
2018-09-19 13:00:33 +02:00
# TODO: Investigate and break this up.
# TODO: Look into moving this into a dedicated class for representing an
# installation.
source = wheeldir.rstrip(os.path.sep) + os.path.sep
info_dir, metadata = parse_wheel(wheel_zip, name)
if wheel_root_is_purelib(metadata):
lib_dir = scheme.purelib
2013-06-30 19:58:54 +02:00
else:
lib_dir = scheme.platlib
subdirs = os.listdir(source)
data_dirs = [s for s in subdirs if s.endswith('.data')]
# Record details of the files moved
# installed = files copied from the wheel to the destination
# changed = files changed while installing (scripts #! line typically)
# generated = files newly generated during the install (script wrappers)
installed = {} # type: Dict[RecordPath, RecordPath]
changed = set() # type: Set[RecordPath]
generated = [] # type: List[str]
# Compile all of the pyc files that we're going to be installing
if pycompile:
with captured_stdout() as stdout:
with warnings.catch_warnings():
warnings.filterwarnings('ignore')
compileall.compile_dir(source, force=True, quiet=True)
logger.debug(stdout.getvalue())
def record_installed(srcfile, destfile, modified=False):
2020-05-12 15:33:05 +02:00
# type: (text_type, text_type, bool) -> None
"""Map archive RECORD paths to installation RECORD paths."""
oldpath = _fs_to_record_path(srcfile, wheeldir)
newpath = _fs_to_record_path(destfile, lib_dir)
installed[oldpath] = newpath
if modified:
changed.add(_fs_to_record_path(destfile))
def clobber(
2020-05-12 15:33:05 +02:00
source, # type: text_type
dest, # type: text_type
is_base, # type: bool
fixer=None, # type: Optional[Callable[[text_type], Any]]
filter=None # type: Optional[Callable[[text_type], bool]]
):
# type: (...) -> None
ensure_dir(dest) # common for the 'include' path
for dir, subdirs, files in os.walk(source):
basedir = dir[len(source):].lstrip(os.path.sep)
destdir = os.path.join(dest, basedir)
if is_base and basedir == '':
subdirs[:] = [s for s in subdirs if not s.endswith('.data')]
for f in files:
# Skip unwanted files
if filter and filter(f):
continue
srcfile = os.path.join(dir, f)
destfile = os.path.join(dest, basedir, f)
# directory creation is lazy and after the file filtering above
# to ensure we don't install empty dirs; empty dirs can't be
# uninstalled.
ensure_dir(destdir)
# copyfile (called below) truncates the destination if it
# exists and then writes the new contents. This is fine in most
# cases, but can cause a segfault if pip has loaded a shared
# object (e.g. from pyopenssl through its vendored urllib3)
# Since the shared object is mmap'd an attempt to call a
# symbol in it will then cause a segfault. Unlinking the file
# allows writing of new contents while allowing the process to
# continue to use the old copy.
if os.path.exists(destfile):
os.unlink(destfile)
# We use copyfile (not move, copy, or copy2) to be extra sure
# that we are not moving directories over (copyfile fails for
# directories) as well as to ensure that we are not copying
# over any metadata because we want more control over what
# metadata we actually copy over.
shutil.copyfile(srcfile, destfile)
# Copy over the metadata for the file, currently this only
# includes the atime and mtime.
st = os.stat(srcfile)
if hasattr(os, "utime"):
os.utime(destfile, (st.st_atime, st.st_mtime))
# If our file is executable, then make our destination file
# executable.
if os.access(srcfile, os.X_OK):
st = os.stat(srcfile)
permissions = (
st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
)
os.chmod(destfile, permissions)
changed = False
if fixer:
changed = fixer(destfile)
record_installed(srcfile, destfile, changed)
2020-05-12 15:33:05 +02:00
clobber(
ensure_text(source, encoding=sys.getfilesystemencoding()),
ensure_text(lib_dir, encoding=sys.getfilesystemencoding()),
True,
)
dest_info_dir = os.path.join(lib_dir, info_dir)
# Get the defined entry points
ep_file = os.path.join(dest_info_dir, 'entry_points.txt')
console, gui = get_entrypoints(ep_file)
def is_entrypoint_wrapper(name):
# type: (text_type) -> bool
# EP, EP.exe and EP-script.py are scripts generated for
# entry point EP by setuptools
if name.lower().endswith('.exe'):
matchname = name[:-4]
elif name.lower().endswith('-script.py'):
matchname = name[:-10]
elif name.lower().endswith(".pya"):
matchname = name[:-4]
else:
matchname = name
# Ignore setuptools-generated scripts
return (matchname in console or matchname in gui)
for datadir in data_dirs:
fixer = None
filter = None
for subdir in os.listdir(os.path.join(wheeldir, datadir)):
fixer = None
if subdir == 'scripts':
fixer = fix_script
filter = is_entrypoint_wrapper
source = os.path.join(wheeldir, datadir, subdir)
dest = getattr(scheme, subdir)
2020-05-12 15:33:05 +02:00
clobber(
ensure_text(source, encoding=sys.getfilesystemencoding()),
ensure_text(dest, encoding=sys.getfilesystemencoding()),
False,
fixer=fixer,
filter=filter,
)
maker = PipScriptMaker(None, scheme.scripts)
# Ensure old scripts are overwritten.
# See https://github.com/pypa/pip/issues/1800
maker.clobber = True
# Ensure we don't generate any variants for scripts because this is almost
# never what somebody wants.
# See https://bitbucket.org/pypa/distlib/issue/35/
maker.variants = {''}
# This is required because otherwise distlib creates scripts that are not
# executable.
# See https://bitbucket.org/pypa/distlib/issue/32/
maker.set_mode = True
scripts_to_generate = []
# Special case pip and setuptools to generate versioned wrappers
#
# The issue is that some projects (specifically, pip and setuptools) use
# code in setup.py to create "versioned" entry points - pip2.7 on Python
# 2.7, pip3.3 on Python 3.3, etc. But these entry points are baked into
# the wheel metadata at build time, and so if the wheel is installed with
# a *different* version of Python the entry points will be wrong. The
# correct fix for this is to enhance the metadata to be able to describe
# such versioned entry points, but that won't happen till Metadata 2.0 is
# available.
# In the meantime, projects using versioned entry points will either have
# incorrect versioned entry points, or they will not be able to distribute
# "universal" wheels (i.e., they will need a wheel per Python version).
#
# Because setuptools and pip are bundled with _ensurepip and virtualenv,
# we need to use universal wheels. So, as a stopgap until Metadata 2.0, we
# override the versioned entry points in the wheel and generate the
2016-06-10 21:27:07 +02:00
# correct ones. This code is purely a short-term measure until Metadata 2.0
# is available.
#
# To add the level of hack in this section of code, in order to support
# ensurepip this code will look for an ``ENSUREPIP_OPTIONS`` environment
# variable which will control which version scripts get installed.
#
# ENSUREPIP_OPTIONS=altinstall
# - Only pipX.Y and easy_install-X.Y will be generated and installed
# ENSUREPIP_OPTIONS=install
# - pipX.Y, pipX, easy_install-X.Y will be generated and installed. Note
# that this option is technically if ENSUREPIP_OPTIONS is set and is
# not altinstall
# DEFAULT
# - The default behavior is to install pip, pipX, pipX.Y, easy_install
# and easy_install-X.Y.
pip_script = console.pop('pip', None)
if pip_script:
if "ENSUREPIP_OPTIONS" not in os.environ:
scripts_to_generate.append('pip = ' + pip_script)
if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall":
scripts_to_generate.append(
2020-01-29 18:24:26 +01:00
'pip{} = {}'.format(sys.version_info[0], pip_script)
)
scripts_to_generate.append(
2020-01-29 18:24:26 +01:00
'pip{} = {}'.format(get_major_minor_version(), pip_script)
)
# Delete any other versioned pip entry points
pip_ep = [k for k in console if re.match(r'pip(\d(\.\d)?)?$', k)]
for k in pip_ep:
del console[k]
easy_install_script = console.pop('easy_install', None)
if easy_install_script:
if "ENSUREPIP_OPTIONS" not in os.environ:
scripts_to_generate.append(
'easy_install = ' + easy_install_script
)
scripts_to_generate.append(
2020-01-29 18:24:26 +01:00
'easy_install-{} = {}'.format(
get_major_minor_version(), easy_install_script
)
2019-07-18 09:12:05 +02:00
)
# Delete any other versioned easy_install entry points
easy_install_ep = [
k for k in console if re.match(r'easy_install(-\d\.\d)?$', k)
]
for k in easy_install_ep:
del console[k]
# Generate the console and GUI entry points specified in the wheel
scripts_to_generate.extend(starmap('{} = {}'.format, console.items()))
gui_scripts_to_generate = list(starmap('{} = {}'.format, gui.items()))
generated_console_scripts = [] # type: List[str]
try:
generated_console_scripts = maker.make_multiple(scripts_to_generate)
generated.extend(generated_console_scripts)
generated.extend(
maker.make_multiple(gui_scripts_to_generate, {'gui': True})
)
except MissingCallableSuffix as e:
entry = e.args[0]
raise InstallationError(
"Invalid script entry point: {} for req: {} - A callable "
2019-12-10 23:18:38 +01:00
"suffix is required. Cf https://packaging.python.org/"
"specifications/entry-points/#use-for-scripts for more "
2019-10-12 04:24:50 +02:00
"information.".format(entry, req_description)
)
if warn_script_location:
msg = message_about_scripts_not_on_PATH(generated_console_scripts)
if msg is not None:
logger.warning(msg)
generated_file_mode = 0o666 & ~current_umask()
2020-04-27 10:48:22 +02:00
2020-04-26 11:01:34 +02:00
@contextlib.contextmanager
def _generate_file(path, **kwargs):
# type: (str, **Any) -> Iterator[NamedTemporaryFileResult]
with adjacent_tmp_file(path, **kwargs) as f:
yield f
2020-04-27 10:48:22 +02:00
os.chmod(f.name, generated_file_mode)
2020-04-26 11:01:34 +02:00
replace(f.name, path)
2015-12-04 10:19:17 +01:00
# Record pip as the installer
2020-03-29 12:19:34 +02:00
installer_path = os.path.join(dest_info_dir, 'INSTALLER')
2020-04-26 11:01:34 +02:00
with _generate_file(installer_path) as installer_file:
2015-12-04 10:19:17 +01:00
installer_file.write(b'pip\n')
2020-03-29 12:19:34 +02:00
generated.append(installer_path)
2015-12-04 10:19:17 +01:00
2020-02-01 13:40:20 +01:00
# Record the PEP 610 direct URL reference
if direct_url is not None:
direct_url_path = os.path.join(dest_info_dir, DIRECT_URL_METADATA_NAME)
2020-04-26 11:01:34 +02:00
with _generate_file(direct_url_path) as direct_url_file:
2020-02-01 13:40:20 +01:00
direct_url_file.write(direct_url.to_json().encode("utf-8"))
generated.append(direct_url_path)
2020-04-11 18:40:55 +02:00
# Record the REQUESTED file
if requested:
requested_path = os.path.join(dest_info_dir, 'REQUESTED')
with open(requested_path, "w"):
pass
generated.append(requested_path)
2015-12-04 10:19:17 +01:00
# Record details of all files installed
2020-03-29 12:19:34 +02:00
record_path = os.path.join(dest_info_dir, 'RECORD')
with open(record_path, **csv_io_kwargs('r')) as record_file:
rows = get_csv_rows_for_installed(
csv.reader(record_file),
installed=installed,
changed=changed,
generated=generated,
lib_dir=lib_dir)
2020-04-26 11:01:34 +02:00
with _generate_file(record_path, **csv_io_kwargs('w')) as record_file:
# The type mypy infers for record_file is different for Python 3
# (typing.IO[Any]) and Python 2 (typing.BinaryIO). We explicitly
# cast to typing.IO[str] as a workaround.
writer = csv.writer(cast('IO[str]', record_file))
writer.writerows(_normalized_outrows(rows))
def install_wheel(
name, # type: str
wheel_path, # type: str
scheme, # type: Scheme
req_description, # type: str
pycompile=True, # type: bool
warn_script_location=True, # type: bool
_temp_dir_for_testing=None, # type: Optional[str]
2020-02-01 13:40:20 +01:00
direct_url=None, # type: Optional[DirectUrl]
2020-04-11 18:40:55 +02:00
requested=False, # type: bool
):
# type: (...) -> None
with TempDirectory(
path=_temp_dir_for_testing, kind="unpacked-wheel"
) as unpacked_dir, ZipFile(wheel_path, allowZip64=True) as z:
unpack_file(wheel_path, unpacked_dir.path)
install_unpacked_wheel(
name=name,
wheeldir=unpacked_dir.path,
wheel_zip=z,
scheme=scheme,
req_description=req_description,
pycompile=pycompile,
warn_script_location=warn_script_location,
2020-02-01 13:40:20 +01:00
direct_url=direct_url,
2020-04-11 18:40:55 +02:00
requested=requested,
)