2019-10-20 15:14:50 +02:00
|
|
|
"""Support for installing and building the "wheel" binary package format.
|
2012-10-02 07:50:24 +02:00
|
|
|
"""
|
2019-07-20 08:36:59 +02:00
|
|
|
|
2014-08-31 01:52:28 +02:00
|
|
|
from __future__ import absolute_import
|
2012-10-02 07:50:24 +02:00
|
|
|
|
2017-10-02 18:54:37 +02:00
|
|
|
import collections
|
2013-11-22 03:59:31 +01:00
|
|
|
import compileall
|
2020-04-26 11:01:34 +02:00
|
|
|
import contextlib
|
2012-10-02 07:50:24 +02:00
|
|
|
import csv
|
2020-05-28 10:08:17 +02:00
|
|
|
import io
|
2014-08-31 01:52:28 +02:00
|
|
|
import logging
|
2015-06-01 23:24:11 +02:00
|
|
|
import os.path
|
2013-04-02 07:44:46 +02:00
|
|
|
import re
|
2013-04-05 23:21:11 +02:00
|
|
|
import shutil
|
2014-12-22 19:47:37 +01:00
|
|
|
import stat
|
2013-04-05 23:21:11 +02:00
|
|
|
import sys
|
2015-02-28 11:10:42 +01:00
|
|
|
import warnings
|
2013-04-05 23:21:11 +02:00
|
|
|
from base64 import urlsafe_b64encode
|
2020-02-29 20:53:59 +01:00
|
|
|
from itertools import starmap
|
2020-01-01 18:29:45 +01:00
|
|
|
from zipfile import ZipFile
|
2017-06-13 14:17:00 +02:00
|
|
|
|
2018-01-23 17:02:00 +01:00
|
|
|
from pip._vendor import pkg_resources
|
2017-06-13 14:17:00 +02:00
|
|
|
from pip._vendor.distlib.scripts import ScriptMaker
|
2019-07-22 04:49:51 +02:00
|
|
|
from pip._vendor.distlib.util import get_export_entry
|
2020-05-28 10:08:17 +02:00
|
|
|
from pip._vendor.six import PY2, ensure_str, ensure_text, itervalues, text_type
|
2013-04-05 23:21:11 +02:00
|
|
|
|
2020-01-02 00:01:50 +01:00
|
|
|
from pip._internal.exceptions import InstallationError
|
2019-10-12 03:49:39 +02:00
|
|
|
from pip._internal.locations import get_major_minor_version
|
2020-02-01 13:40:20 +01:00
|
|
|
from pip._internal.models.direct_url import DIRECT_URL_METADATA_NAME, DirectUrl
|
2020-03-29 12:19:34 +02:00
|
|
|
from pip._internal.utils.filesystem import adjacent_tmp_file, replace
|
2019-11-03 14:24:11 +01:00
|
|
|
from pip._internal.utils.misc import captured_stdout, ensure_dir, hash_file
|
2019-12-14 17:10:36 +01:00
|
|
|
from pip._internal.utils.temp_dir import TempDirectory
|
2020-05-18 17:05:38 +02:00
|
|
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
2020-04-27 10:48:22 +02:00
|
|
|
from pip._internal.utils.unpacking import current_umask, unpack_file
|
2020-01-02 00:01:50 +01:00
|
|
|
from pip._internal.utils.wheel import parse_wheel
|
2016-11-29 19:33:30 +01:00
|
|
|
|
2020-05-18 17:05:38 +02:00
|
|
|
# Use the custom cast function at runtime to make cast work,
|
|
|
|
# and import typing.cast when performing pre-commit and type
|
|
|
|
# checks
|
|
|
|
if not MYPY_CHECK_RUNNING:
|
|
|
|
from pip._internal.utils.typing import cast
|
|
|
|
else:
|
2019-12-31 18:28:26 +01:00
|
|
|
from email.message import Message
|
2019-02-22 12:17:07 +01:00
|
|
|
from typing import (
|
2020-05-12 12:34:30 +02:00
|
|
|
Any,
|
|
|
|
Callable,
|
|
|
|
Dict,
|
|
|
|
IO,
|
|
|
|
Iterable,
|
|
|
|
Iterator,
|
|
|
|
List,
|
|
|
|
NewType,
|
|
|
|
Optional,
|
|
|
|
Sequence,
|
|
|
|
Set,
|
|
|
|
Tuple,
|
|
|
|
Union,
|
|
|
|
cast,
|
2018-12-16 10:16:39 +01:00
|
|
|
)
|
2019-11-07 03:10:11 +01:00
|
|
|
|
|
|
|
from pip._internal.models.scheme import Scheme
|
2020-04-26 11:01:34 +02:00
|
|
|
from pip._internal.utils.filesystem import NamedTemporaryFileResult
|
2018-12-16 10:16:39 +01:00
|
|
|
|
2020-05-12 12:34:30 +02:00
|
|
|
RecordPath = NewType('RecordPath', text_type)
|
|
|
|
InstalledCSVRow = Tuple[RecordPath, str, Union[int, str]]
|
2018-12-16 10:16:39 +01:00
|
|
|
|
2013-04-02 07:44:46 +02:00
|
|
|
|
2014-08-31 01:52:28 +02:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
2019-06-26 11:44:43 +02:00
|
|
|
def rehash(path, blocksize=1 << 20):
|
2020-05-13 21:57:56 +02:00
|
|
|
# type: (text_type, int) -> Tuple[str, str]
|
2019-06-26 11:44:43 +02:00
|
|
|
"""Return (encoded_digest, length) for path using hashlib.sha256()"""
|
|
|
|
h, length = hash_file(path, blocksize)
|
2014-01-27 15:07:10 +01:00
|
|
|
digest = 'sha256=' + urlsafe_b64encode(
|
|
|
|
h.digest()
|
|
|
|
).decode('latin1').rstrip('=')
|
2018-12-16 10:16:39 +01:00
|
|
|
# unicode/str python2 issues
|
|
|
|
return (digest, str(length)) # type: ignore
|
2012-10-02 07:50:24 +02:00
|
|
|
|
2014-01-27 15:07:10 +01:00
|
|
|
|
2020-03-29 12:19:34 +02:00
|
|
|
def csv_io_kwargs(mode):
|
|
|
|
# type: (str) -> Dict[str, Any]
|
|
|
|
"""Return keyword arguments to properly open a CSV file
|
|
|
|
in the given mode.
|
|
|
|
"""
|
2020-05-12 12:34:30 +02:00
|
|
|
if PY2:
|
2020-03-29 12:19:34 +02:00
|
|
|
return {'mode': '{}b'.format(mode)}
|
2012-10-02 07:50:24 +02:00
|
|
|
else:
|
2020-05-12 12:34:30 +02:00
|
|
|
return {'mode': mode, 'newline': '', 'encoding': 'utf-8'}
|
2012-10-02 07:50:24 +02:00
|
|
|
|
2014-01-27 15:07:10 +01:00
|
|
|
|
2012-10-02 07:50:24 +02:00
|
|
|
def fix_script(path):
|
2020-05-13 21:57:56 +02:00
|
|
|
# type: (text_type) -> Optional[bool]
|
2012-10-02 07:50:24 +02:00
|
|
|
"""Replace #!python with #!/path/to/python
|
2019-10-20 15:14:50 +02:00
|
|
|
Return True if file was changed.
|
|
|
|
"""
|
2012-10-02 07:50:24 +02:00
|
|
|
# XXX RECORD hashes will need to be updated
|
2020-07-03 02:54:45 +02:00
|
|
|
if not os.path.isfile(path):
|
|
|
|
return None
|
|
|
|
|
|
|
|
with open(path, 'rb') as script:
|
|
|
|
firstline = script.readline()
|
|
|
|
if not firstline.startswith(b'#!python'):
|
|
|
|
return False
|
|
|
|
exename = sys.executable.encode(sys.getfilesystemencoding())
|
|
|
|
firstline = b'#!' + exename + os.linesep.encode("ascii")
|
|
|
|
rest = script.read()
|
|
|
|
with open(path, 'wb') as script:
|
|
|
|
script.write(firstline)
|
|
|
|
script.write(rest)
|
|
|
|
return True
|
2013-07-06 07:20:09 +02:00
|
|
|
|
2017-05-19 12:10:57 +02:00
|
|
|
|
2019-12-31 18:45:13 +01:00
|
|
|
def wheel_root_is_purelib(metadata):
|
|
|
|
# type: (Message) -> bool
|
|
|
|
return metadata.get("Root-Is-Purelib", "").lower() == "true"
|
|
|
|
|
|
|
|
|
2013-10-23 16:57:12 +02:00
|
|
|
def get_entrypoints(filename):
|
2018-12-16 10:16:39 +01:00
|
|
|
# type: (str) -> Tuple[Dict[str, str], Dict[str, str]]
|
2013-10-23 16:57:12 +02:00
|
|
|
if not os.path.exists(filename):
|
|
|
|
return {}, {}
|
2013-11-08 12:32:35 +01:00
|
|
|
|
2013-11-19 14:23:15 +01:00
|
|
|
# This is done because you can pass a string to entry_points wrappers which
|
|
|
|
# means that they may or may not be valid INI files. The attempt here is to
|
|
|
|
# strip leading and trailing whitespace in order to make them valid INI
|
|
|
|
# files.
|
2020-05-28 10:08:17 +02:00
|
|
|
with io.open(filename, encoding="utf-8") as fp:
|
|
|
|
data = io.StringIO()
|
2013-11-08 12:32:35 +01:00
|
|
|
for line in fp:
|
|
|
|
data.write(line.strip())
|
2020-05-28 10:08:17 +02:00
|
|
|
data.write(u"\n")
|
2013-11-08 12:32:35 +01:00
|
|
|
data.seek(0)
|
|
|
|
|
2016-11-19 00:27:16 +01:00
|
|
|
# get the entry points and then the script names
|
|
|
|
entry_points = pkg_resources.EntryPoint.parse_map(data)
|
|
|
|
console = entry_points.get('console_scripts', {})
|
|
|
|
gui = entry_points.get('gui_scripts', {})
|
2013-11-08 12:32:35 +01:00
|
|
|
|
2016-11-19 00:27:16 +01:00
|
|
|
def _split_ep(s):
|
2019-09-28 17:17:53 +02:00
|
|
|
# type: (pkg_resources.EntryPoint) -> Tuple[str, str]
|
2019-10-20 15:14:50 +02:00
|
|
|
"""get the string representation of EntryPoint,
|
|
|
|
remove space and split on '='
|
|
|
|
"""
|
2019-09-28 17:17:53 +02:00
|
|
|
split_parts = str(s).replace(" ", "").split("=")
|
|
|
|
return split_parts[0], split_parts[1]
|
2016-11-19 00:27:16 +01:00
|
|
|
|
|
|
|
# convert the EntryPoint objects into strings with module:function
|
|
|
|
console = dict(_split_ep(v) for v in console.values())
|
|
|
|
gui = dict(_split_ep(v) for v in gui.values())
|
2013-10-23 16:57:12 +02:00
|
|
|
return console, gui
|
|
|
|
|
2013-11-08 12:32:35 +01:00
|
|
|
|
2017-10-02 18:54:37 +02:00
|
|
|
def message_about_scripts_not_on_PATH(scripts):
|
2018-12-16 10:16:39 +01:00
|
|
|
# type: (Sequence[str]) -> Optional[str]
|
2017-10-02 18:54:37 +02:00
|
|
|
"""Determine if any scripts are not on PATH and format a warning.
|
|
|
|
Returns a warning message if one or more scripts are not on PATH,
|
|
|
|
otherwise None.
|
|
|
|
"""
|
|
|
|
if not scripts:
|
|
|
|
return None
|
|
|
|
|
|
|
|
# Group scripts by the path they were installed in
|
2019-09-28 16:42:27 +02:00
|
|
|
grouped_by_dir = collections.defaultdict(set) # type: Dict[str, Set[str]]
|
2017-10-02 18:54:37 +02:00
|
|
|
for destfile in scripts:
|
|
|
|
parent_dir = os.path.dirname(destfile)
|
|
|
|
script_name = os.path.basename(destfile)
|
|
|
|
grouped_by_dir[parent_dir].add(script_name)
|
|
|
|
|
2018-04-04 09:51:57 +02:00
|
|
|
# We don't want to warn for directories that are on PATH.
|
2018-04-07 13:55:01 +02:00
|
|
|
not_warn_dirs = [
|
2018-04-19 03:10:45 +02:00
|
|
|
os.path.normcase(i).rstrip(os.sep) for i in
|
2018-07-02 14:14:28 +02:00
|
|
|
os.environ.get("PATH", "").split(os.pathsep)
|
2018-04-07 13:55:01 +02:00
|
|
|
]
|
2018-04-04 09:51:57 +02:00
|
|
|
# If an executable sits with sys.executable, we don't warn for it.
|
|
|
|
# This covers the case of venv invocations without activating the venv.
|
2018-04-06 10:54:16 +02:00
|
|
|
not_warn_dirs.append(os.path.normcase(os.path.dirname(sys.executable)))
|
2017-10-02 18:54:37 +02:00
|
|
|
warn_for = {
|
|
|
|
parent_dir: scripts for parent_dir, scripts in grouped_by_dir.items()
|
2018-04-06 10:54:16 +02:00
|
|
|
if os.path.normcase(parent_dir) not in not_warn_dirs
|
2019-09-28 16:42:27 +02:00
|
|
|
} # type: Dict[str, Set[str]]
|
2017-10-02 18:54:37 +02:00
|
|
|
if not warn_for:
|
|
|
|
return None
|
|
|
|
|
|
|
|
# Format a message
|
|
|
|
msg_lines = []
|
2019-09-28 16:42:27 +02:00
|
|
|
for parent_dir, dir_scripts in warn_for.items():
|
|
|
|
sorted_scripts = sorted(dir_scripts) # type: List[str]
|
2019-02-21 22:40:29 +01:00
|
|
|
if len(sorted_scripts) == 1:
|
|
|
|
start_text = "script {} is".format(sorted_scripts[0])
|
2017-10-02 18:54:37 +02:00
|
|
|
else:
|
|
|
|
start_text = "scripts {} are".format(
|
2019-02-21 22:40:29 +01:00
|
|
|
", ".join(sorted_scripts[:-1]) + " and " + sorted_scripts[-1]
|
2017-10-02 18:54:37 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
msg_lines.append(
|
|
|
|
"The {} installed in '{}' which is not on PATH."
|
|
|
|
.format(start_text, parent_dir)
|
|
|
|
)
|
|
|
|
|
|
|
|
last_line_fmt = (
|
|
|
|
"Consider adding {} to PATH or, if you prefer "
|
|
|
|
"to suppress this warning, use --no-warn-script-location."
|
|
|
|
)
|
|
|
|
if len(msg_lines) == 1:
|
|
|
|
msg_lines.append(last_line_fmt.format("this directory"))
|
|
|
|
else:
|
|
|
|
msg_lines.append(last_line_fmt.format("these directories"))
|
|
|
|
|
2019-11-16 00:16:41 +01:00
|
|
|
# Add a note if any directory starts with ~
|
|
|
|
warn_for_tilde = any(
|
|
|
|
i[0] == "~" for i in os.environ.get("PATH", "").split(os.pathsep) if i
|
|
|
|
)
|
|
|
|
if warn_for_tilde:
|
|
|
|
tilde_warning_msg = (
|
|
|
|
"NOTE: The current PATH contains path(s) starting with `~`, "
|
|
|
|
"which may not be expanded by all applications."
|
|
|
|
)
|
|
|
|
msg_lines.append(tilde_warning_msg)
|
|
|
|
|
2017-10-02 18:54:37 +02:00
|
|
|
# Returns the formatted multiline message
|
|
|
|
return "\n".join(msg_lines)
|
|
|
|
|
|
|
|
|
2020-05-12 12:34:30 +02:00
|
|
|
def _normalized_outrows(outrows):
|
|
|
|
# type: (Iterable[InstalledCSVRow]) -> List[Tuple[str, str, str]]
|
|
|
|
"""Normalize the given rows of a RECORD file.
|
|
|
|
|
|
|
|
Items in each row are converted into str. Rows are then sorted to make
|
|
|
|
the value more predictable for tests.
|
2018-10-24 18:19:58 +02:00
|
|
|
|
|
|
|
Each row is a 3-tuple (path, hash, size) and corresponds to a record of
|
|
|
|
a RECORD file (see PEP 376 and PEP 427 for details). For the rows
|
|
|
|
passed to this function, the size can be an integer as an int or string,
|
|
|
|
or the empty string.
|
|
|
|
"""
|
|
|
|
# Normally, there should only be one row per path, in which case the
|
|
|
|
# second and third elements don't come into play when sorting.
|
|
|
|
# However, in cases in the wild where a path might happen to occur twice,
|
|
|
|
# we don't want the sort operation to trigger an error (but still want
|
|
|
|
# determinism). Since the third element can be an int or string, we
|
|
|
|
# coerce each element to a string to avoid a TypeError in this case.
|
|
|
|
# For additional background, see--
|
|
|
|
# https://github.com/pypa/pip/issues/5868
|
2020-05-12 12:34:30 +02:00
|
|
|
return sorted(
|
2020-05-13 22:01:41 +02:00
|
|
|
(ensure_str(record_path, encoding='utf-8'), hash_, str(size))
|
|
|
|
for record_path, hash_, size in outrows
|
2020-05-12 12:34:30 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
def _record_to_fs_path(record_path):
|
2020-05-13 21:57:56 +02:00
|
|
|
# type: (RecordPath) -> text_type
|
2020-05-12 15:33:05 +02:00
|
|
|
return record_path
|
2020-05-12 12:34:30 +02:00
|
|
|
|
|
|
|
|
|
|
|
def _fs_to_record_path(path, relative_to=None):
|
2020-05-12 15:33:05 +02:00
|
|
|
# type: (text_type, Optional[text_type]) -> RecordPath
|
2020-05-12 12:34:30 +02:00
|
|
|
if relative_to is not None:
|
2020-04-16 14:59:03 +02:00
|
|
|
# On Windows, do not handle relative paths if they belong to different
|
|
|
|
# logical disks
|
|
|
|
if os.path.splitdrive(path)[0].lower() == \
|
|
|
|
os.path.splitdrive(relative_to)[0].lower():
|
|
|
|
path = os.path.relpath(path, relative_to)
|
2020-05-12 12:34:30 +02:00
|
|
|
path = path.replace(os.path.sep, '/')
|
2020-05-12 15:33:05 +02:00
|
|
|
return cast('RecordPath', path)
|
2020-05-12 12:34:30 +02:00
|
|
|
|
|
|
|
|
|
|
|
def _parse_record_path(record_column):
|
|
|
|
# type: (str) -> RecordPath
|
|
|
|
p = ensure_text(record_column, encoding='utf-8')
|
|
|
|
return cast('RecordPath', p)
|
2018-10-24 18:19:58 +02:00
|
|
|
|
|
|
|
|
2019-01-24 03:44:54 +01:00
|
|
|
def get_csv_rows_for_installed(
|
|
|
|
old_csv_rows, # type: Iterable[List[str]]
|
2020-05-12 12:34:30 +02:00
|
|
|
installed, # type: Dict[RecordPath, RecordPath]
|
|
|
|
changed, # type: Set[RecordPath]
|
2019-01-24 03:44:54 +01:00
|
|
|
generated, # type: List[str]
|
2019-01-24 04:16:10 +01:00
|
|
|
lib_dir, # type: str
|
2019-01-24 03:44:54 +01:00
|
|
|
):
|
|
|
|
# type: (...) -> List[InstalledCSVRow]
|
2019-02-15 11:26:18 +01:00
|
|
|
"""
|
|
|
|
:param installed: A map from archive RECORD path to installation RECORD
|
|
|
|
path.
|
|
|
|
"""
|
2019-01-24 03:44:54 +01:00
|
|
|
installed_rows = [] # type: List[InstalledCSVRow]
|
2019-01-24 04:16:10 +01:00
|
|
|
for row in old_csv_rows:
|
|
|
|
if len(row) > 3:
|
|
|
|
logger.warning(
|
|
|
|
'RECORD line has more than three elements: {}'.format(row)
|
|
|
|
)
|
2020-05-12 12:34:30 +02:00
|
|
|
old_record_path = _parse_record_path(row[0])
|
|
|
|
new_record_path = installed.pop(old_record_path, old_record_path)
|
|
|
|
if new_record_path in changed:
|
|
|
|
digest, length = rehash(_record_to_fs_path(new_record_path))
|
|
|
|
else:
|
|
|
|
digest = row[1] if len(row) > 1 else ''
|
|
|
|
length = row[2] if len(row) > 2 else ''
|
|
|
|
installed_rows.append((new_record_path, digest, length))
|
2019-01-24 03:44:54 +01:00
|
|
|
for f in generated:
|
2020-05-12 12:34:30 +02:00
|
|
|
path = _fs_to_record_path(f, lib_dir)
|
2019-01-24 03:44:54 +01:00
|
|
|
digest, length = rehash(f)
|
2020-05-12 12:34:30 +02:00
|
|
|
installed_rows.append((path, digest, length))
|
|
|
|
for installed_record_path in itervalues(installed):
|
|
|
|
installed_rows.append((installed_record_path, '', ''))
|
2019-01-24 03:44:54 +01:00
|
|
|
return installed_rows
|
|
|
|
|
|
|
|
|
2019-07-22 04:49:51 +02:00
|
|
|
class MissingCallableSuffix(Exception):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2019-09-07 15:59:41 +02:00
|
|
|
def _raise_for_invalid_entrypoint(specification):
|
2019-09-28 17:17:53 +02:00
|
|
|
# type: (str) -> None
|
2019-07-22 04:49:51 +02:00
|
|
|
entry = get_export_entry(specification)
|
|
|
|
if entry is not None and entry.suffix is None:
|
|
|
|
raise MissingCallableSuffix(str(entry))
|
|
|
|
|
|
|
|
|
|
|
|
class PipScriptMaker(ScriptMaker):
|
|
|
|
def make(self, specification, options=None):
|
2019-09-28 17:17:53 +02:00
|
|
|
# type: (str, Dict[str, Any]) -> List[str]
|
2019-09-07 15:59:41 +02:00
|
|
|
_raise_for_invalid_entrypoint(specification)
|
2019-07-22 04:49:51 +02:00
|
|
|
return super(PipScriptMaker, self).make(specification, options)
|
|
|
|
|
|
|
|
|
2019-10-12 03:31:35 +02:00
|
|
|
def install_unpacked_wheel(
|
2018-12-16 10:16:39 +01:00
|
|
|
name, # type: str
|
|
|
|
wheeldir, # type: str
|
2020-01-01 18:29:45 +01:00
|
|
|
wheel_zip, # type: ZipFile
|
2019-11-07 03:10:11 +01:00
|
|
|
scheme, # type: Scheme
|
2019-10-12 04:24:50 +02:00
|
|
|
req_description, # type: str
|
2018-12-16 10:16:39 +01:00
|
|
|
pycompile=True, # type: bool
|
2020-02-01 13:40:20 +01:00
|
|
|
warn_script_location=True, # type: bool
|
|
|
|
direct_url=None, # type: Optional[DirectUrl]
|
2020-04-11 18:40:55 +02:00
|
|
|
requested=False, # type: bool
|
2018-12-16 10:16:39 +01:00
|
|
|
):
|
|
|
|
# type: (...) -> None
|
2019-10-12 04:24:50 +02:00
|
|
|
"""Install a wheel.
|
|
|
|
|
|
|
|
:param name: Name of the project to install
|
|
|
|
:param wheeldir: Base directory of the unpacked wheel
|
2020-01-01 18:29:45 +01:00
|
|
|
:param wheel_zip: open ZipFile for wheel being installed
|
2019-10-12 04:24:50 +02:00
|
|
|
:param scheme: Distutils scheme dictating the install directories
|
|
|
|
:param req_description: String used in place of the requirement, for
|
|
|
|
logging
|
|
|
|
:param pycompile: Whether to byte-compile installed Python files
|
|
|
|
:param warn_script_location: Whether to check that scripts are installed
|
|
|
|
into a directory on PATH
|
2019-12-21 17:14:50 +01:00
|
|
|
:raises UnsupportedWheel:
|
|
|
|
* when the directory holds an unpacked wheel with incompatible
|
|
|
|
Wheel-Version
|
|
|
|
* when the .dist-info dir does not match the wheel
|
2019-10-12 04:24:50 +02:00
|
|
|
"""
|
2018-09-19 13:00:33 +02:00
|
|
|
# TODO: Investigate and break this up.
|
|
|
|
# TODO: Look into moving this into a dedicated class for representing an
|
|
|
|
# installation.
|
|
|
|
|
2019-12-31 18:12:17 +01:00
|
|
|
source = wheeldir.rstrip(os.path.sep) + os.path.sep
|
|
|
|
|
2020-01-01 23:39:06 +01:00
|
|
|
info_dir, metadata = parse_wheel(wheel_zip, name)
|
2019-12-01 23:20:34 +01:00
|
|
|
|
2019-12-31 18:45:13 +01:00
|
|
|
if wheel_root_is_purelib(metadata):
|
2019-11-07 03:10:11 +01:00
|
|
|
lib_dir = scheme.purelib
|
2013-06-30 19:58:54 +02:00
|
|
|
else:
|
2019-11-07 03:10:11 +01:00
|
|
|
lib_dir = scheme.platlib
|
2012-10-02 07:50:24 +02:00
|
|
|
|
2019-12-31 19:23:48 +01:00
|
|
|
subdirs = os.listdir(source)
|
2019-12-30 16:59:23 +01:00
|
|
|
data_dirs = [s for s in subdirs if s.endswith('.data')]
|
2013-11-01 18:28:35 +01:00
|
|
|
|
|
|
|
# Record details of the files moved
|
|
|
|
# installed = files copied from the wheel to the destination
|
|
|
|
# changed = files changed while installing (scripts #! line typically)
|
|
|
|
# generated = files newly generated during the install (script wrappers)
|
2020-05-12 12:34:30 +02:00
|
|
|
installed = {} # type: Dict[RecordPath, RecordPath]
|
|
|
|
changed = set() # type: Set[RecordPath]
|
2018-12-16 10:16:39 +01:00
|
|
|
generated = [] # type: List[str]
|
2012-10-02 07:50:24 +02:00
|
|
|
|
2013-11-22 03:59:31 +01:00
|
|
|
# Compile all of the pyc files that we're going to be installing
|
|
|
|
if pycompile:
|
2014-08-21 15:40:02 +02:00
|
|
|
with captured_stdout() as stdout:
|
2015-02-28 11:10:42 +01:00
|
|
|
with warnings.catch_warnings():
|
|
|
|
warnings.filterwarnings('ignore')
|
|
|
|
compileall.compile_dir(source, force=True, quiet=True)
|
2015-02-28 00:17:21 +01:00
|
|
|
logger.debug(stdout.getvalue())
|
2013-11-22 03:59:31 +01:00
|
|
|
|
2012-10-02 07:50:24 +02:00
|
|
|
def record_installed(srcfile, destfile, modified=False):
|
2020-05-12 15:33:05 +02:00
|
|
|
# type: (text_type, text_type, bool) -> None
|
2012-10-02 07:50:24 +02:00
|
|
|
"""Map archive RECORD paths to installation RECORD paths."""
|
2020-05-12 12:34:30 +02:00
|
|
|
oldpath = _fs_to_record_path(srcfile, wheeldir)
|
|
|
|
newpath = _fs_to_record_path(destfile, lib_dir)
|
2012-10-02 07:50:24 +02:00
|
|
|
installed[oldpath] = newpath
|
|
|
|
if modified:
|
2020-05-12 12:34:30 +02:00
|
|
|
changed.add(_fs_to_record_path(destfile))
|
2012-10-02 07:50:24 +02:00
|
|
|
|
2019-09-28 17:17:53 +02:00
|
|
|
def clobber(
|
2020-05-12 15:33:05 +02:00
|
|
|
source, # type: text_type
|
|
|
|
dest, # type: text_type
|
2019-09-28 17:17:53 +02:00
|
|
|
is_base, # type: bool
|
2020-05-13 21:57:56 +02:00
|
|
|
fixer=None, # type: Optional[Callable[[text_type], Any]]
|
|
|
|
filter=None # type: Optional[Callable[[text_type], bool]]
|
2019-09-28 17:17:53 +02:00
|
|
|
):
|
|
|
|
# type: (...) -> None
|
2015-03-31 03:40:30 +02:00
|
|
|
ensure_dir(dest) # common for the 'include' path
|
2013-05-24 04:42:03 +02:00
|
|
|
|
2012-10-02 07:50:24 +02:00
|
|
|
for dir, subdirs, files in os.walk(source):
|
|
|
|
basedir = dir[len(source):].lstrip(os.path.sep)
|
2014-04-22 08:07:25 +02:00
|
|
|
destdir = os.path.join(dest, basedir)
|
2019-12-30 16:59:23 +01:00
|
|
|
if is_base and basedir == '':
|
|
|
|
subdirs[:] = [s for s in subdirs if not s.endswith('.data')]
|
2012-10-02 07:50:24 +02:00
|
|
|
for f in files:
|
2013-10-23 16:57:12 +02:00
|
|
|
# Skip unwanted files
|
|
|
|
if filter and filter(f):
|
|
|
|
continue
|
2012-10-02 07:50:24 +02:00
|
|
|
srcfile = os.path.join(dir, f)
|
|
|
|
destfile = os.path.join(dest, basedir, f)
|
2014-04-23 01:09:04 +02:00
|
|
|
# directory creation is lazy and after the file filtering above
|
2014-04-23 22:08:07 +02:00
|
|
|
# to ensure we don't install empty dirs; empty dirs can't be
|
|
|
|
# uninstalled.
|
2015-03-31 03:40:30 +02:00
|
|
|
ensure_dir(destdir)
|
2014-11-20 17:17:42 +01:00
|
|
|
|
2018-05-14 19:13:35 +02:00
|
|
|
# copyfile (called below) truncates the destination if it
|
|
|
|
# exists and then writes the new contents. This is fine in most
|
|
|
|
# cases, but can cause a segfault if pip has loaded a shared
|
|
|
|
# object (e.g. from pyopenssl through its vendored urllib3)
|
|
|
|
# Since the shared object is mmap'd an attempt to call a
|
|
|
|
# symbol in it will then cause a segfault. Unlinking the file
|
|
|
|
# allows writing of new contents while allowing the process to
|
|
|
|
# continue to use the old copy.
|
|
|
|
if os.path.exists(destfile):
|
|
|
|
os.unlink(destfile)
|
|
|
|
|
2014-11-20 17:17:42 +01:00
|
|
|
# We use copyfile (not move, copy, or copy2) to be extra sure
|
|
|
|
# that we are not moving directories over (copyfile fails for
|
|
|
|
# directories) as well as to ensure that we are not copying
|
|
|
|
# over any metadata because we want more control over what
|
|
|
|
# metadata we actually copy over.
|
|
|
|
shutil.copyfile(srcfile, destfile)
|
|
|
|
|
|
|
|
# Copy over the metadata for the file, currently this only
|
|
|
|
# includes the atime and mtime.
|
|
|
|
st = os.stat(srcfile)
|
|
|
|
if hasattr(os, "utime"):
|
|
|
|
os.utime(destfile, (st.st_atime, st.st_mtime))
|
|
|
|
|
2014-12-22 19:47:37 +01:00
|
|
|
# If our file is executable, then make our destination file
|
|
|
|
# executable.
|
|
|
|
if os.access(srcfile, os.X_OK):
|
|
|
|
st = os.stat(srcfile)
|
|
|
|
permissions = (
|
|
|
|
st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
|
|
|
|
)
|
|
|
|
os.chmod(destfile, permissions)
|
|
|
|
|
2012-10-02 07:50:24 +02:00
|
|
|
changed = False
|
|
|
|
if fixer:
|
|
|
|
changed = fixer(destfile)
|
|
|
|
record_installed(srcfile, destfile, changed)
|
|
|
|
|
2020-05-12 15:33:05 +02:00
|
|
|
clobber(
|
|
|
|
ensure_text(source, encoding=sys.getfilesystemencoding()),
|
|
|
|
ensure_text(lib_dir, encoding=sys.getfilesystemencoding()),
|
|
|
|
True,
|
|
|
|
)
|
2012-10-02 07:50:24 +02:00
|
|
|
|
2019-12-30 16:59:23 +01:00
|
|
|
dest_info_dir = os.path.join(lib_dir, info_dir)
|
|
|
|
|
2013-10-23 16:57:12 +02:00
|
|
|
# Get the defined entry points
|
2019-12-30 16:59:23 +01:00
|
|
|
ep_file = os.path.join(dest_info_dir, 'entry_points.txt')
|
2013-10-23 16:57:12 +02:00
|
|
|
console, gui = get_entrypoints(ep_file)
|
|
|
|
|
|
|
|
def is_entrypoint_wrapper(name):
|
2020-05-13 21:57:56 +02:00
|
|
|
# type: (text_type) -> bool
|
2013-10-23 16:57:12 +02:00
|
|
|
# EP, EP.exe and EP-script.py are scripts generated for
|
|
|
|
# entry point EP by setuptools
|
|
|
|
if name.lower().endswith('.exe'):
|
|
|
|
matchname = name[:-4]
|
|
|
|
elif name.lower().endswith('-script.py'):
|
|
|
|
matchname = name[:-10]
|
2013-11-02 18:04:24 +01:00
|
|
|
elif name.lower().endswith(".pya"):
|
|
|
|
matchname = name[:-4]
|
2013-10-23 16:57:12 +02:00
|
|
|
else:
|
|
|
|
matchname = name
|
|
|
|
# Ignore setuptools-generated scripts
|
|
|
|
return (matchname in console or matchname in gui)
|
|
|
|
|
2012-10-02 07:50:24 +02:00
|
|
|
for datadir in data_dirs:
|
|
|
|
fixer = None
|
2013-10-23 16:57:12 +02:00
|
|
|
filter = None
|
2012-10-02 07:50:24 +02:00
|
|
|
for subdir in os.listdir(os.path.join(wheeldir, datadir)):
|
|
|
|
fixer = None
|
|
|
|
if subdir == 'scripts':
|
|
|
|
fixer = fix_script
|
2013-10-23 16:57:12 +02:00
|
|
|
filter = is_entrypoint_wrapper
|
2012-10-02 07:50:24 +02:00
|
|
|
source = os.path.join(wheeldir, datadir, subdir)
|
2019-11-07 03:10:11 +01:00
|
|
|
dest = getattr(scheme, subdir)
|
2020-05-12 15:33:05 +02:00
|
|
|
clobber(
|
|
|
|
ensure_text(source, encoding=sys.getfilesystemencoding()),
|
|
|
|
ensure_text(dest, encoding=sys.getfilesystemencoding()),
|
|
|
|
False,
|
|
|
|
fixer=fixer,
|
|
|
|
filter=filter,
|
|
|
|
)
|
2013-10-23 16:57:12 +02:00
|
|
|
|
2019-11-07 03:10:11 +01:00
|
|
|
maker = PipScriptMaker(None, scheme.scripts)
|
2014-06-07 05:20:42 +02:00
|
|
|
|
2014-06-07 05:07:43 +02:00
|
|
|
# Ensure old scripts are overwritten.
|
|
|
|
# See https://github.com/pypa/pip/issues/1800
|
|
|
|
maker.clobber = True
|
2013-11-03 02:00:02 +01:00
|
|
|
|
|
|
|
# Ensure we don't generate any variants for scripts because this is almost
|
|
|
|
# never what somebody wants.
|
|
|
|
# See https://bitbucket.org/pypa/distlib/issue/35/
|
2017-12-15 06:58:30 +01:00
|
|
|
maker.variants = {''}
|
2013-10-23 16:57:12 +02:00
|
|
|
|
2013-11-02 17:03:29 +01:00
|
|
|
# This is required because otherwise distlib creates scripts that are not
|
|
|
|
# executable.
|
|
|
|
# See https://bitbucket.org/pypa/distlib/issue/32/
|
|
|
|
maker.set_mode = True
|
|
|
|
|
2019-07-22 04:49:51 +02:00
|
|
|
scripts_to_generate = []
|
2013-11-03 02:00:02 +01:00
|
|
|
|
2013-10-23 16:57:12 +02:00
|
|
|
# Special case pip and setuptools to generate versioned wrappers
|
2013-11-01 18:28:35 +01:00
|
|
|
#
|
|
|
|
# The issue is that some projects (specifically, pip and setuptools) use
|
|
|
|
# code in setup.py to create "versioned" entry points - pip2.7 on Python
|
|
|
|
# 2.7, pip3.3 on Python 3.3, etc. But these entry points are baked into
|
|
|
|
# the wheel metadata at build time, and so if the wheel is installed with
|
|
|
|
# a *different* version of Python the entry points will be wrong. The
|
|
|
|
# correct fix for this is to enhance the metadata to be able to describe
|
|
|
|
# such versioned entry points, but that won't happen till Metadata 2.0 is
|
|
|
|
# available.
|
|
|
|
# In the meantime, projects using versioned entry points will either have
|
|
|
|
# incorrect versioned entry points, or they will not be able to distribute
|
|
|
|
# "universal" wheels (i.e., they will need a wheel per Python version).
|
|
|
|
#
|
|
|
|
# Because setuptools and pip are bundled with _ensurepip and virtualenv,
|
|
|
|
# we need to use universal wheels. So, as a stopgap until Metadata 2.0, we
|
|
|
|
# override the versioned entry points in the wheel and generate the
|
2016-06-10 21:27:07 +02:00
|
|
|
# correct ones. This code is purely a short-term measure until Metadata 2.0
|
2013-11-01 18:28:35 +01:00
|
|
|
# is available.
|
2013-11-07 18:28:10 +01:00
|
|
|
#
|
|
|
|
# To add the level of hack in this section of code, in order to support
|
|
|
|
# ensurepip this code will look for an ``ENSUREPIP_OPTIONS`` environment
|
|
|
|
# variable which will control which version scripts get installed.
|
|
|
|
#
|
|
|
|
# ENSUREPIP_OPTIONS=altinstall
|
|
|
|
# - Only pipX.Y and easy_install-X.Y will be generated and installed
|
|
|
|
# ENSUREPIP_OPTIONS=install
|
|
|
|
# - pipX.Y, pipX, easy_install-X.Y will be generated and installed. Note
|
|
|
|
# that this option is technically if ENSUREPIP_OPTIONS is set and is
|
|
|
|
# not altinstall
|
|
|
|
# DEFAULT
|
|
|
|
# - The default behavior is to install pip, pipX, pipX.Y, easy_install
|
|
|
|
# and easy_install-X.Y.
|
2013-10-23 16:57:12 +02:00
|
|
|
pip_script = console.pop('pip', None)
|
|
|
|
if pip_script:
|
2013-11-05 01:12:59 +01:00
|
|
|
if "ENSUREPIP_OPTIONS" not in os.environ:
|
2019-07-22 04:49:51 +02:00
|
|
|
scripts_to_generate.append('pip = ' + pip_script)
|
2013-11-05 01:12:59 +01:00
|
|
|
|
|
|
|
if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall":
|
2019-07-22 04:49:51 +02:00
|
|
|
scripts_to_generate.append(
|
2020-01-29 18:24:26 +01:00
|
|
|
'pip{} = {}'.format(sys.version_info[0], pip_script)
|
2019-07-22 04:49:51 +02:00
|
|
|
)
|
2013-11-05 01:12:59 +01:00
|
|
|
|
2019-07-22 04:49:51 +02:00
|
|
|
scripts_to_generate.append(
|
2020-01-29 18:24:26 +01:00
|
|
|
'pip{} = {}'.format(get_major_minor_version(), pip_script)
|
2019-07-22 04:49:51 +02:00
|
|
|
)
|
2013-10-23 16:57:12 +02:00
|
|
|
# Delete any other versioned pip entry points
|
|
|
|
pip_ep = [k for k in console if re.match(r'pip(\d(\.\d)?)?$', k)]
|
|
|
|
for k in pip_ep:
|
|
|
|
del console[k]
|
|
|
|
easy_install_script = console.pop('easy_install', None)
|
|
|
|
if easy_install_script:
|
2013-11-05 01:12:59 +01:00
|
|
|
if "ENSUREPIP_OPTIONS" not in os.environ:
|
2019-07-22 04:49:51 +02:00
|
|
|
scripts_to_generate.append(
|
|
|
|
'easy_install = ' + easy_install_script
|
|
|
|
)
|
2013-11-05 01:12:59 +01:00
|
|
|
|
2019-07-22 04:49:51 +02:00
|
|
|
scripts_to_generate.append(
|
2020-01-29 18:24:26 +01:00
|
|
|
'easy_install-{} = {}'.format(
|
2019-07-22 04:49:51 +02:00
|
|
|
get_major_minor_version(), easy_install_script
|
|
|
|
)
|
2019-07-18 09:12:05 +02:00
|
|
|
)
|
2013-10-23 16:57:12 +02:00
|
|
|
# Delete any other versioned easy_install entry points
|
2014-01-27 15:07:10 +01:00
|
|
|
easy_install_ep = [
|
|
|
|
k for k in console if re.match(r'easy_install(-\d\.\d)?$', k)
|
|
|
|
]
|
2013-10-23 16:57:12 +02:00
|
|
|
for k in easy_install_ep:
|
|
|
|
del console[k]
|
|
|
|
|
2013-11-01 18:28:35 +01:00
|
|
|
# Generate the console and GUI entry points specified in the wheel
|
2020-02-29 20:53:59 +01:00
|
|
|
scripts_to_generate.extend(starmap('{} = {}'.format, console.items()))
|
2017-10-02 18:54:37 +02:00
|
|
|
|
2020-02-29 20:53:59 +01:00
|
|
|
gui_scripts_to_generate = list(starmap('{} = {}'.format, gui.items()))
|
2019-07-22 04:49:51 +02:00
|
|
|
|
|
|
|
generated_console_scripts = [] # type: List[str]
|
|
|
|
|
|
|
|
try:
|
|
|
|
generated_console_scripts = maker.make_multiple(scripts_to_generate)
|
|
|
|
generated.extend(generated_console_scripts)
|
2017-10-02 18:54:37 +02:00
|
|
|
|
2014-01-27 15:07:10 +01:00
|
|
|
generated.extend(
|
2019-07-22 04:49:51 +02:00
|
|
|
maker.make_multiple(gui_scripts_to_generate, {'gui': True})
|
|
|
|
)
|
|
|
|
except MissingCallableSuffix as e:
|
|
|
|
entry = e.args[0]
|
|
|
|
raise InstallationError(
|
2019-09-07 15:56:36 +02:00
|
|
|
"Invalid script entry point: {} for req: {} - A callable "
|
2019-12-10 23:18:38 +01:00
|
|
|
"suffix is required. Cf https://packaging.python.org/"
|
|
|
|
"specifications/entry-points/#use-for-scripts for more "
|
2019-10-12 04:24:50 +02:00
|
|
|
"information.".format(entry, req_description)
|
2014-01-27 15:07:10 +01:00
|
|
|
)
|
2012-10-02 07:50:24 +02:00
|
|
|
|
2019-07-22 04:49:51 +02:00
|
|
|
if warn_script_location:
|
|
|
|
msg = message_about_scripts_not_on_PATH(generated_console_scripts)
|
|
|
|
if msg is not None:
|
|
|
|
logger.warning(msg)
|
|
|
|
|
2020-04-29 06:35:29 +02:00
|
|
|
generated_file_mode = 0o666 & ~current_umask()
|
2020-04-27 10:48:22 +02:00
|
|
|
|
2020-04-26 11:01:34 +02:00
|
|
|
@contextlib.contextmanager
|
|
|
|
def _generate_file(path, **kwargs):
|
|
|
|
# type: (str, **Any) -> Iterator[NamedTemporaryFileResult]
|
|
|
|
with adjacent_tmp_file(path, **kwargs) as f:
|
|
|
|
yield f
|
2020-04-27 10:48:22 +02:00
|
|
|
os.chmod(f.name, generated_file_mode)
|
2020-04-26 11:01:34 +02:00
|
|
|
replace(f.name, path)
|
|
|
|
|
2015-12-04 10:19:17 +01:00
|
|
|
# Record pip as the installer
|
2020-03-29 12:19:34 +02:00
|
|
|
installer_path = os.path.join(dest_info_dir, 'INSTALLER')
|
2020-04-26 11:01:34 +02:00
|
|
|
with _generate_file(installer_path) as installer_file:
|
2015-12-04 10:19:17 +01:00
|
|
|
installer_file.write(b'pip\n')
|
2020-03-29 12:19:34 +02:00
|
|
|
generated.append(installer_path)
|
2015-12-04 10:19:17 +01:00
|
|
|
|
2020-02-01 13:40:20 +01:00
|
|
|
# Record the PEP 610 direct URL reference
|
|
|
|
if direct_url is not None:
|
|
|
|
direct_url_path = os.path.join(dest_info_dir, DIRECT_URL_METADATA_NAME)
|
2020-04-26 11:01:34 +02:00
|
|
|
with _generate_file(direct_url_path) as direct_url_file:
|
2020-02-01 13:40:20 +01:00
|
|
|
direct_url_file.write(direct_url.to_json().encode("utf-8"))
|
|
|
|
generated.append(direct_url_path)
|
|
|
|
|
2020-04-11 18:40:55 +02:00
|
|
|
# Record the REQUESTED file
|
|
|
|
if requested:
|
|
|
|
requested_path = os.path.join(dest_info_dir, 'REQUESTED')
|
|
|
|
with open(requested_path, "w"):
|
|
|
|
pass
|
|
|
|
generated.append(requested_path)
|
|
|
|
|
2015-12-04 10:19:17 +01:00
|
|
|
# Record details of all files installed
|
2020-03-29 12:19:34 +02:00
|
|
|
record_path = os.path.join(dest_info_dir, 'RECORD')
|
|
|
|
with open(record_path, **csv_io_kwargs('r')) as record_file:
|
|
|
|
rows = get_csv_rows_for_installed(
|
|
|
|
csv.reader(record_file),
|
|
|
|
installed=installed,
|
|
|
|
changed=changed,
|
|
|
|
generated=generated,
|
|
|
|
lib_dir=lib_dir)
|
2020-04-26 11:01:34 +02:00
|
|
|
with _generate_file(record_path, **csv_io_kwargs('w')) as record_file:
|
2020-05-12 12:34:30 +02:00
|
|
|
# The type mypy infers for record_file is different for Python 3
|
|
|
|
# (typing.IO[Any]) and Python 2 (typing.BinaryIO). We explicitly
|
|
|
|
# cast to typing.IO[str] as a workaround.
|
|
|
|
writer = csv.writer(cast('IO[str]', record_file))
|
|
|
|
writer.writerows(_normalized_outrows(rows))
|
2012-10-02 07:50:24 +02:00
|
|
|
|
2014-01-27 15:07:10 +01:00
|
|
|
|
2019-12-14 17:10:36 +01:00
|
|
|
def install_wheel(
|
|
|
|
name, # type: str
|
|
|
|
wheel_path, # type: str
|
|
|
|
scheme, # type: Scheme
|
|
|
|
req_description, # type: str
|
|
|
|
pycompile=True, # type: bool
|
|
|
|
warn_script_location=True, # type: bool
|
2019-12-14 17:21:00 +01:00
|
|
|
_temp_dir_for_testing=None, # type: Optional[str]
|
2020-02-01 13:40:20 +01:00
|
|
|
direct_url=None, # type: Optional[DirectUrl]
|
2020-04-11 18:40:55 +02:00
|
|
|
requested=False, # type: bool
|
2019-12-14 17:10:36 +01:00
|
|
|
):
|
|
|
|
# type: (...) -> None
|
2019-12-14 17:21:00 +01:00
|
|
|
with TempDirectory(
|
|
|
|
path=_temp_dir_for_testing, kind="unpacked-wheel"
|
2020-01-01 18:29:45 +01:00
|
|
|
) as unpacked_dir, ZipFile(wheel_path, allowZip64=True) as z:
|
2019-12-14 17:21:00 +01:00
|
|
|
unpack_file(wheel_path, unpacked_dir.path)
|
2019-12-14 17:10:36 +01:00
|
|
|
install_unpacked_wheel(
|
|
|
|
name=name,
|
2019-12-14 17:21:00 +01:00
|
|
|
wheeldir=unpacked_dir.path,
|
2020-01-01 18:29:45 +01:00
|
|
|
wheel_zip=z,
|
2019-12-14 17:10:36 +01:00
|
|
|
scheme=scheme,
|
|
|
|
req_description=req_description,
|
|
|
|
pycompile=pycompile,
|
|
|
|
warn_script_location=warn_script_location,
|
2020-02-01 13:40:20 +01:00
|
|
|
direct_url=direct_url,
|
2020-04-11 18:40:55 +02:00
|
|
|
requested=requested,
|
2019-12-14 17:10:36 +01:00
|
|
|
)
|