2012-10-02 07:50:24 +02:00
|
|
|
"""
|
2013-04-02 07:44:46 +02:00
|
|
|
Support for installing and building the "wheel" binary package format.
|
2012-10-02 07:50:24 +02:00
|
|
|
"""
|
2019-07-20 08:36:59 +02:00
|
|
|
|
|
|
|
# The following comment should be removed at some point in the future.
|
|
|
|
# mypy: strict-optional=False
|
2019-09-28 20:12:49 +02:00
|
|
|
# mypy: disallow-untyped-defs=False
|
2019-07-20 08:36:59 +02:00
|
|
|
|
2014-08-31 01:52:28 +02:00
|
|
|
from __future__ import absolute_import
|
2012-10-02 07:50:24 +02:00
|
|
|
|
2017-10-02 18:54:37 +02:00
|
|
|
import collections
|
2013-11-22 03:59:31 +01:00
|
|
|
import compileall
|
2012-10-02 07:50:24 +02:00
|
|
|
import csv
|
|
|
|
import hashlib
|
2014-08-31 01:52:28 +02:00
|
|
|
import logging
|
2015-06-01 23:24:11 +02:00
|
|
|
import os.path
|
2013-04-02 07:44:46 +02:00
|
|
|
import re
|
2013-04-05 23:21:11 +02:00
|
|
|
import shutil
|
2014-12-22 19:47:37 +01:00
|
|
|
import stat
|
2013-04-05 23:21:11 +02:00
|
|
|
import sys
|
2015-02-28 11:10:42 +01:00
|
|
|
import warnings
|
2013-04-05 23:21:11 +02:00
|
|
|
from base64 import urlsafe_b64encode
|
2014-02-17 00:01:32 +01:00
|
|
|
from email.parser import Parser
|
2017-06-13 14:17:00 +02:00
|
|
|
|
2018-01-23 17:02:00 +01:00
|
|
|
from pip._vendor import pkg_resources
|
2017-06-13 14:17:00 +02:00
|
|
|
from pip._vendor.distlib.scripts import ScriptMaker
|
2019-07-22 04:49:51 +02:00
|
|
|
from pip._vendor.distlib.util import get_export_entry
|
2017-06-13 14:17:00 +02:00
|
|
|
from pip._vendor.packaging.utils import canonicalize_name
|
|
|
|
from pip._vendor.six import StringIO
|
2013-04-05 23:21:11 +02:00
|
|
|
|
2017-08-31 17:48:18 +02:00
|
|
|
from pip._internal import pep425tags
|
|
|
|
from pip._internal.exceptions import (
|
2019-07-22 06:45:27 +02:00
|
|
|
InstallationError,
|
|
|
|
InvalidWheelFilename,
|
|
|
|
UnsupportedWheel,
|
2017-05-16 12:16:30 +02:00
|
|
|
)
|
2019-10-12 03:49:39 +02:00
|
|
|
from pip._internal.locations import get_major_minor_version
|
2018-09-19 13:00:23 +02:00
|
|
|
from pip._internal.models.link import Link
|
2017-08-31 17:48:18 +02:00
|
|
|
from pip._internal.utils.logging import indent_log
|
2019-09-02 21:47:32 +02:00
|
|
|
from pip._internal.utils.marker_files import has_delete_marker_file
|
2019-09-30 07:45:24 +02:00
|
|
|
from pip._internal.utils.misc import captured_stdout, ensure_dir, read_chunks
|
2019-10-13 03:20:10 +02:00
|
|
|
from pip._internal.utils.setuptools_build import (
|
|
|
|
make_setuptools_bdist_wheel_args,
|
|
|
|
make_setuptools_clean_args,
|
|
|
|
)
|
2019-09-30 07:45:24 +02:00
|
|
|
from pip._internal.utils.subprocess import (
|
2019-07-22 06:45:27 +02:00
|
|
|
LOG_DIVIDER,
|
|
|
|
call_subprocess,
|
|
|
|
format_command_args,
|
2019-09-30 08:36:05 +02:00
|
|
|
runner_with_spinner_message,
|
2017-08-31 17:48:18 +02:00
|
|
|
)
|
|
|
|
from pip._internal.utils.temp_dir import TempDirectory
|
2017-10-02 18:54:37 +02:00
|
|
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
2017-08-31 17:48:18 +02:00
|
|
|
from pip._internal.utils.ui import open_spinner
|
2019-09-20 02:19:24 +02:00
|
|
|
from pip._internal.utils.unpacking import unpack_file
|
2019-09-24 10:56:42 +02:00
|
|
|
from pip._internal.utils.urls import path_to_url
|
2016-11-29 19:33:30 +01:00
|
|
|
|
2017-10-02 18:54:37 +02:00
|
|
|
if MYPY_CHECK_RUNNING:
|
2019-02-22 12:17:07 +01:00
|
|
|
from typing import (
|
2019-09-05 02:55:11 +02:00
|
|
|
Dict, List, Optional, Sequence, Mapping, Tuple, IO, Text, Any,
|
2019-09-30 18:03:34 +02:00
|
|
|
Iterable, Callable, Set, Union,
|
2018-12-16 10:16:39 +01:00
|
|
|
)
|
2019-02-22 12:17:07 +01:00
|
|
|
from pip._internal.req.req_install import InstallRequirement
|
|
|
|
from pip._internal.operations.prepare import (
|
2018-12-16 10:16:39 +01:00
|
|
|
RequirementPreparer
|
|
|
|
)
|
2019-02-22 12:17:07 +01:00
|
|
|
from pip._internal.cache import WheelCache
|
|
|
|
from pip._internal.pep425tags import Pep425Tag
|
2018-12-16 10:16:39 +01:00
|
|
|
|
2019-01-24 03:44:54 +01:00
|
|
|
InstalledCSVRow = Tuple[str, ...]
|
2018-12-16 10:16:39 +01:00
|
|
|
|
2019-09-05 02:55:11 +02:00
|
|
|
BinaryAllowedPredicate = Callable[[InstallRequirement], bool]
|
|
|
|
|
2016-11-29 19:33:30 +01:00
|
|
|
|
2014-02-15 07:21:22 +01:00
|
|
|
VERSION_COMPATIBLE = (1, 0)
|
|
|
|
|
2013-04-02 07:44:46 +02:00
|
|
|
|
2014-08-31 01:52:28 +02:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
2019-01-24 03:44:54 +01:00
|
|
|
def normpath(src, p):
|
|
|
|
return os.path.relpath(src, p).replace(os.path.sep, '/')
|
|
|
|
|
|
|
|
|
2019-06-26 11:44:43 +02:00
|
|
|
def hash_file(path, blocksize=1 << 20):
|
|
|
|
# type: (str, int) -> Tuple[Any, int]
|
2018-06-11 19:13:32 +02:00
|
|
|
"""Return (hash, length) for path using hashlib.sha256()"""
|
|
|
|
h = hashlib.sha256()
|
2012-10-02 07:50:24 +02:00
|
|
|
length = 0
|
2013-10-23 16:57:12 +02:00
|
|
|
with open(path, 'rb') as f:
|
2015-10-12 18:54:29 +02:00
|
|
|
for block in read_chunks(f, size=blocksize):
|
2012-10-02 07:50:24 +02:00
|
|
|
length += len(block)
|
|
|
|
h.update(block)
|
2019-06-26 11:44:43 +02:00
|
|
|
return (h, length) # type: ignore
|
|
|
|
|
|
|
|
|
|
|
|
def rehash(path, blocksize=1 << 20):
|
|
|
|
# type: (str, int) -> Tuple[str, str]
|
|
|
|
"""Return (encoded_digest, length) for path using hashlib.sha256()"""
|
|
|
|
h, length = hash_file(path, blocksize)
|
2014-01-27 15:07:10 +01:00
|
|
|
digest = 'sha256=' + urlsafe_b64encode(
|
|
|
|
h.digest()
|
|
|
|
).decode('latin1').rstrip('=')
|
2018-12-16 10:16:39 +01:00
|
|
|
# unicode/str python2 issues
|
|
|
|
return (digest, str(length)) # type: ignore
|
2012-10-02 07:50:24 +02:00
|
|
|
|
2014-01-27 15:07:10 +01:00
|
|
|
|
2012-10-02 07:50:24 +02:00
|
|
|
def open_for_csv(name, mode):
|
2018-12-16 10:16:39 +01:00
|
|
|
# type: (str, Text) -> IO
|
2012-10-02 07:50:24 +02:00
|
|
|
if sys.version_info[0] < 3:
|
2018-12-16 10:16:39 +01:00
|
|
|
nl = {} # type: Dict[str, Any]
|
2012-10-02 07:50:24 +02:00
|
|
|
bin = 'b'
|
|
|
|
else:
|
2018-12-16 10:16:39 +01:00
|
|
|
nl = {'newline': ''} # type: Dict[str, Any]
|
2012-10-02 07:50:24 +02:00
|
|
|
bin = ''
|
|
|
|
return open(name, mode + bin, **nl)
|
|
|
|
|
2014-01-27 15:07:10 +01:00
|
|
|
|
2018-10-16 14:53:34 +02:00
|
|
|
def replace_python_tag(wheelname, new_tag):
|
2018-12-16 10:16:39 +01:00
|
|
|
# type: (str, str) -> str
|
2018-10-16 14:53:34 +02:00
|
|
|
"""Replace the Python tag in a wheel file name with a new value.
|
|
|
|
"""
|
|
|
|
parts = wheelname.split('-')
|
|
|
|
parts[-3] = new_tag
|
|
|
|
return '-'.join(parts)
|
|
|
|
|
|
|
|
|
2012-10-02 07:50:24 +02:00
|
|
|
def fix_script(path):
|
2018-12-16 10:16:39 +01:00
|
|
|
# type: (str) -> Optional[bool]
|
2012-10-02 07:50:24 +02:00
|
|
|
"""Replace #!python with #!/path/to/python
|
|
|
|
Return True if file was changed."""
|
|
|
|
# XXX RECORD hashes will need to be updated
|
|
|
|
if os.path.isfile(path):
|
2014-10-02 23:45:37 +02:00
|
|
|
with open(path, 'rb') as script:
|
2012-10-02 07:50:24 +02:00
|
|
|
firstline = script.readline()
|
2014-06-27 04:44:42 +02:00
|
|
|
if not firstline.startswith(b'#!python'):
|
2012-10-02 07:50:24 +02:00
|
|
|
return False
|
|
|
|
exename = sys.executable.encode(sys.getfilesystemencoding())
|
2014-06-27 04:44:42 +02:00
|
|
|
firstline = b'#!' + exename + os.linesep.encode("ascii")
|
2012-10-02 07:50:24 +02:00
|
|
|
rest = script.read()
|
2014-10-02 23:45:37 +02:00
|
|
|
with open(path, 'wb') as script:
|
2012-10-02 07:50:24 +02:00
|
|
|
script.write(firstline)
|
|
|
|
script.write(rest)
|
|
|
|
return True
|
2018-12-16 10:16:39 +01:00
|
|
|
return None
|
2013-07-06 07:20:09 +02:00
|
|
|
|
2017-05-19 12:10:57 +02:00
|
|
|
|
2017-03-28 04:49:21 +02:00
|
|
|
dist_info_re = re.compile(r"""^(?P<namever>(?P<name>.+?)(-(?P<ver>.+?))?)
|
2013-06-30 19:58:54 +02:00
|
|
|
\.dist-info$""", re.VERBOSE)
|
|
|
|
|
2014-01-27 15:07:10 +01:00
|
|
|
|
2013-06-30 19:58:54 +02:00
|
|
|
def root_is_purelib(name, wheeldir):
|
2018-12-16 10:16:39 +01:00
|
|
|
# type: (str, str) -> bool
|
2013-06-30 19:58:54 +02:00
|
|
|
"""
|
|
|
|
Return True if the extracted wheel in wheeldir should go into purelib.
|
|
|
|
"""
|
|
|
|
name_folded = name.replace("-", "_")
|
|
|
|
for item in os.listdir(wheeldir):
|
|
|
|
match = dist_info_re.match(item)
|
|
|
|
if match and match.group('name') == name_folded:
|
|
|
|
with open(os.path.join(wheeldir, item, 'WHEEL')) as wheel:
|
|
|
|
for line in wheel:
|
|
|
|
line = line.lower().rstrip()
|
|
|
|
if line == "root-is-purelib: true":
|
|
|
|
return True
|
|
|
|
return False
|
2012-10-02 07:50:24 +02:00
|
|
|
|
2013-11-08 12:32:35 +01:00
|
|
|
|
2013-10-23 16:57:12 +02:00
|
|
|
def get_entrypoints(filename):
|
2018-12-16 10:16:39 +01:00
|
|
|
# type: (str) -> Tuple[Dict[str, str], Dict[str, str]]
|
2013-10-23 16:57:12 +02:00
|
|
|
if not os.path.exists(filename):
|
|
|
|
return {}, {}
|
2013-11-08 12:32:35 +01:00
|
|
|
|
2013-11-19 14:23:15 +01:00
|
|
|
# This is done because you can pass a string to entry_points wrappers which
|
|
|
|
# means that they may or may not be valid INI files. The attempt here is to
|
|
|
|
# strip leading and trailing whitespace in order to make them valid INI
|
|
|
|
# files.
|
2013-11-08 12:32:35 +01:00
|
|
|
with open(filename) as fp:
|
|
|
|
data = StringIO()
|
|
|
|
for line in fp:
|
|
|
|
data.write(line.strip())
|
|
|
|
data.write("\n")
|
|
|
|
data.seek(0)
|
|
|
|
|
2016-11-19 00:27:16 +01:00
|
|
|
# get the entry points and then the script names
|
|
|
|
entry_points = pkg_resources.EntryPoint.parse_map(data)
|
|
|
|
console = entry_points.get('console_scripts', {})
|
|
|
|
gui = entry_points.get('gui_scripts', {})
|
2013-11-08 12:32:35 +01:00
|
|
|
|
2016-11-19 00:27:16 +01:00
|
|
|
def _split_ep(s):
|
|
|
|
"""get the string representation of EntryPoint, remove space and split
|
|
|
|
on '='"""
|
|
|
|
return str(s).replace(" ", "").split("=")
|
|
|
|
|
|
|
|
# convert the EntryPoint objects into strings with module:function
|
|
|
|
console = dict(_split_ep(v) for v in console.values())
|
|
|
|
gui = dict(_split_ep(v) for v in gui.values())
|
2013-10-23 16:57:12 +02:00
|
|
|
return console, gui
|
|
|
|
|
2013-11-08 12:32:35 +01:00
|
|
|
|
2017-10-02 18:54:37 +02:00
|
|
|
def message_about_scripts_not_on_PATH(scripts):
|
2018-12-16 10:16:39 +01:00
|
|
|
# type: (Sequence[str]) -> Optional[str]
|
2017-10-02 18:54:37 +02:00
|
|
|
"""Determine if any scripts are not on PATH and format a warning.
|
|
|
|
|
|
|
|
Returns a warning message if one or more scripts are not on PATH,
|
|
|
|
otherwise None.
|
|
|
|
"""
|
|
|
|
if not scripts:
|
|
|
|
return None
|
|
|
|
|
|
|
|
# Group scripts by the path they were installed in
|
2019-09-28 16:42:27 +02:00
|
|
|
grouped_by_dir = collections.defaultdict(set) # type: Dict[str, Set[str]]
|
2017-10-02 18:54:37 +02:00
|
|
|
for destfile in scripts:
|
|
|
|
parent_dir = os.path.dirname(destfile)
|
|
|
|
script_name = os.path.basename(destfile)
|
|
|
|
grouped_by_dir[parent_dir].add(script_name)
|
|
|
|
|
2018-04-04 09:51:57 +02:00
|
|
|
# We don't want to warn for directories that are on PATH.
|
2018-04-07 13:55:01 +02:00
|
|
|
not_warn_dirs = [
|
2018-04-19 03:10:45 +02:00
|
|
|
os.path.normcase(i).rstrip(os.sep) for i in
|
2018-07-02 14:14:28 +02:00
|
|
|
os.environ.get("PATH", "").split(os.pathsep)
|
2018-04-07 13:55:01 +02:00
|
|
|
]
|
2018-04-04 09:51:57 +02:00
|
|
|
# If an executable sits with sys.executable, we don't warn for it.
|
|
|
|
# This covers the case of venv invocations without activating the venv.
|
2018-04-06 10:54:16 +02:00
|
|
|
not_warn_dirs.append(os.path.normcase(os.path.dirname(sys.executable)))
|
2017-10-02 18:54:37 +02:00
|
|
|
warn_for = {
|
|
|
|
parent_dir: scripts for parent_dir, scripts in grouped_by_dir.items()
|
2018-04-06 10:54:16 +02:00
|
|
|
if os.path.normcase(parent_dir) not in not_warn_dirs
|
2019-09-28 16:42:27 +02:00
|
|
|
} # type: Dict[str, Set[str]]
|
2017-10-02 18:54:37 +02:00
|
|
|
if not warn_for:
|
|
|
|
return None
|
|
|
|
|
|
|
|
# Format a message
|
|
|
|
msg_lines = []
|
2019-09-28 16:42:27 +02:00
|
|
|
for parent_dir, dir_scripts in warn_for.items():
|
|
|
|
sorted_scripts = sorted(dir_scripts) # type: List[str]
|
2019-02-21 22:40:29 +01:00
|
|
|
if len(sorted_scripts) == 1:
|
|
|
|
start_text = "script {} is".format(sorted_scripts[0])
|
2017-10-02 18:54:37 +02:00
|
|
|
else:
|
|
|
|
start_text = "scripts {} are".format(
|
2019-02-21 22:40:29 +01:00
|
|
|
", ".join(sorted_scripts[:-1]) + " and " + sorted_scripts[-1]
|
2017-10-02 18:54:37 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
msg_lines.append(
|
|
|
|
"The {} installed in '{}' which is not on PATH."
|
|
|
|
.format(start_text, parent_dir)
|
|
|
|
)
|
|
|
|
|
|
|
|
last_line_fmt = (
|
|
|
|
"Consider adding {} to PATH or, if you prefer "
|
|
|
|
"to suppress this warning, use --no-warn-script-location."
|
|
|
|
)
|
|
|
|
if len(msg_lines) == 1:
|
|
|
|
msg_lines.append(last_line_fmt.format("this directory"))
|
|
|
|
else:
|
|
|
|
msg_lines.append(last_line_fmt.format("these directories"))
|
|
|
|
|
|
|
|
# Returns the formatted multiline message
|
|
|
|
return "\n".join(msg_lines)
|
|
|
|
|
|
|
|
|
2018-10-24 18:19:58 +02:00
|
|
|
def sorted_outrows(outrows):
|
2018-12-16 10:16:39 +01:00
|
|
|
# type: (Iterable[InstalledCSVRow]) -> List[InstalledCSVRow]
|
2018-10-24 18:19:58 +02:00
|
|
|
"""
|
|
|
|
Return the given rows of a RECORD file in sorted order.
|
|
|
|
|
|
|
|
Each row is a 3-tuple (path, hash, size) and corresponds to a record of
|
|
|
|
a RECORD file (see PEP 376 and PEP 427 for details). For the rows
|
|
|
|
passed to this function, the size can be an integer as an int or string,
|
|
|
|
or the empty string.
|
|
|
|
"""
|
|
|
|
# Normally, there should only be one row per path, in which case the
|
|
|
|
# second and third elements don't come into play when sorting.
|
|
|
|
# However, in cases in the wild where a path might happen to occur twice,
|
|
|
|
# we don't want the sort operation to trigger an error (but still want
|
|
|
|
# determinism). Since the third element can be an int or string, we
|
|
|
|
# coerce each element to a string to avoid a TypeError in this case.
|
|
|
|
# For additional background, see--
|
|
|
|
# https://github.com/pypa/pip/issues/5868
|
|
|
|
return sorted(outrows, key=lambda row: tuple(str(x) for x in row))
|
|
|
|
|
|
|
|
|
2019-01-24 03:44:54 +01:00
|
|
|
def get_csv_rows_for_installed(
|
|
|
|
old_csv_rows, # type: Iterable[List[str]]
|
|
|
|
installed, # type: Dict[str, str]
|
2019-01-24 04:16:10 +01:00
|
|
|
changed, # type: set
|
2019-01-24 03:44:54 +01:00
|
|
|
generated, # type: List[str]
|
2019-01-24 04:16:10 +01:00
|
|
|
lib_dir, # type: str
|
2019-01-24 03:44:54 +01:00
|
|
|
):
|
|
|
|
# type: (...) -> List[InstalledCSVRow]
|
2019-02-15 11:26:18 +01:00
|
|
|
"""
|
|
|
|
:param installed: A map from archive RECORD path to installation RECORD
|
|
|
|
path.
|
|
|
|
"""
|
2019-01-24 03:44:54 +01:00
|
|
|
installed_rows = [] # type: List[InstalledCSVRow]
|
2019-01-24 04:16:10 +01:00
|
|
|
for row in old_csv_rows:
|
|
|
|
if len(row) > 3:
|
|
|
|
logger.warning(
|
|
|
|
'RECORD line has more than three elements: {}'.format(row)
|
|
|
|
)
|
2019-02-15 11:26:18 +01:00
|
|
|
# Make a copy because we are mutating the row.
|
|
|
|
row = list(row)
|
|
|
|
old_path = row[0]
|
|
|
|
new_path = installed.pop(old_path, old_path)
|
|
|
|
row[0] = new_path
|
|
|
|
if new_path in changed:
|
|
|
|
digest, length = rehash(new_path)
|
2019-01-24 04:16:10 +01:00
|
|
|
row[1] = digest
|
|
|
|
row[2] = length
|
|
|
|
installed_rows.append(tuple(row))
|
2019-01-24 03:44:54 +01:00
|
|
|
for f in generated:
|
|
|
|
digest, length = rehash(f)
|
|
|
|
installed_rows.append((normpath(f, lib_dir), digest, str(length)))
|
|
|
|
for f in installed:
|
|
|
|
installed_rows.append((installed[f], '', ''))
|
|
|
|
return installed_rows
|
|
|
|
|
|
|
|
|
2019-07-22 04:49:51 +02:00
|
|
|
class MissingCallableSuffix(Exception):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2019-09-07 15:59:41 +02:00
|
|
|
def _raise_for_invalid_entrypoint(specification):
|
2019-07-22 04:49:51 +02:00
|
|
|
entry = get_export_entry(specification)
|
|
|
|
if entry is not None and entry.suffix is None:
|
|
|
|
raise MissingCallableSuffix(str(entry))
|
|
|
|
|
|
|
|
|
|
|
|
class PipScriptMaker(ScriptMaker):
|
|
|
|
def make(self, specification, options=None):
|
2019-09-07 15:59:41 +02:00
|
|
|
_raise_for_invalid_entrypoint(specification)
|
2019-07-22 04:49:51 +02:00
|
|
|
return super(PipScriptMaker, self).make(specification, options)
|
|
|
|
|
|
|
|
|
2019-10-12 03:31:35 +02:00
|
|
|
def install_unpacked_wheel(
|
2018-12-16 10:16:39 +01:00
|
|
|
name, # type: str
|
|
|
|
wheeldir, # type: str
|
2019-10-12 03:49:39 +02:00
|
|
|
scheme, # type: Mapping[str, str]
|
2019-10-12 04:24:50 +02:00
|
|
|
req_description, # type: str
|
2018-12-16 10:16:39 +01:00
|
|
|
pycompile=True, # type: bool
|
|
|
|
warn_script_location=True # type: bool
|
|
|
|
):
|
|
|
|
# type: (...) -> None
|
2019-10-12 04:24:50 +02:00
|
|
|
"""Install a wheel.
|
|
|
|
|
|
|
|
:param name: Name of the project to install
|
|
|
|
:param wheeldir: Base directory of the unpacked wheel
|
|
|
|
:param scheme: Distutils scheme dictating the install directories
|
|
|
|
:param req_description: String used in place of the requirement, for
|
|
|
|
logging
|
|
|
|
:param pycompile: Whether to byte-compile installed Python files
|
|
|
|
:param warn_script_location: Whether to check that scripts are installed
|
|
|
|
into a directory on PATH
|
|
|
|
"""
|
2018-09-19 13:00:33 +02:00
|
|
|
# TODO: Investigate and break this up.
|
|
|
|
# TODO: Look into moving this into a dedicated class for representing an
|
|
|
|
# installation.
|
|
|
|
|
2013-06-30 19:58:54 +02:00
|
|
|
if root_is_purelib(name, wheeldir):
|
|
|
|
lib_dir = scheme['purelib']
|
|
|
|
else:
|
|
|
|
lib_dir = scheme['platlib']
|
2012-10-02 07:50:24 +02:00
|
|
|
|
2018-12-16 10:16:39 +01:00
|
|
|
info_dir = [] # type: List[str]
|
2012-10-02 07:50:24 +02:00
|
|
|
data_dirs = []
|
|
|
|
source = wheeldir.rstrip(os.path.sep) + os.path.sep
|
2013-11-01 18:28:35 +01:00
|
|
|
|
|
|
|
# Record details of the files moved
|
|
|
|
# installed = files copied from the wheel to the destination
|
|
|
|
# changed = files changed while installing (scripts #! line typically)
|
|
|
|
# generated = files newly generated during the install (script wrappers)
|
2018-12-16 10:16:39 +01:00
|
|
|
installed = {} # type: Dict[str, str]
|
2012-10-02 07:50:24 +02:00
|
|
|
changed = set()
|
2018-12-16 10:16:39 +01:00
|
|
|
generated = [] # type: List[str]
|
2012-10-02 07:50:24 +02:00
|
|
|
|
2013-11-22 03:59:31 +01:00
|
|
|
# Compile all of the pyc files that we're going to be installing
|
|
|
|
if pycompile:
|
2014-08-21 15:40:02 +02:00
|
|
|
with captured_stdout() as stdout:
|
2015-02-28 11:10:42 +01:00
|
|
|
with warnings.catch_warnings():
|
|
|
|
warnings.filterwarnings('ignore')
|
|
|
|
compileall.compile_dir(source, force=True, quiet=True)
|
2015-02-28 00:17:21 +01:00
|
|
|
logger.debug(stdout.getvalue())
|
2013-11-22 03:59:31 +01:00
|
|
|
|
2012-10-02 07:50:24 +02:00
|
|
|
def record_installed(srcfile, destfile, modified=False):
|
|
|
|
"""Map archive RECORD paths to installation RECORD paths."""
|
|
|
|
oldpath = normpath(srcfile, wheeldir)
|
2013-06-30 19:58:54 +02:00
|
|
|
newpath = normpath(destfile, lib_dir)
|
2012-10-02 07:50:24 +02:00
|
|
|
installed[oldpath] = newpath
|
|
|
|
if modified:
|
|
|
|
changed.add(destfile)
|
|
|
|
|
2013-10-23 16:57:12 +02:00
|
|
|
def clobber(source, dest, is_base, fixer=None, filter=None):
|
2015-03-31 03:40:30 +02:00
|
|
|
ensure_dir(dest) # common for the 'include' path
|
2013-05-24 04:42:03 +02:00
|
|
|
|
2012-10-02 07:50:24 +02:00
|
|
|
for dir, subdirs, files in os.walk(source):
|
|
|
|
basedir = dir[len(source):].lstrip(os.path.sep)
|
2014-04-22 08:07:25 +02:00
|
|
|
destdir = os.path.join(dest, basedir)
|
2012-10-02 07:50:24 +02:00
|
|
|
if is_base and basedir.split(os.path.sep, 1)[0].endswith('.data'):
|
|
|
|
continue
|
|
|
|
for s in subdirs:
|
|
|
|
destsubdir = os.path.join(dest, basedir, s)
|
|
|
|
if is_base and basedir == '' and destsubdir.endswith('.data'):
|
|
|
|
data_dirs.append(s)
|
|
|
|
continue
|
2019-10-12 04:24:50 +02:00
|
|
|
elif (
|
|
|
|
is_base and
|
|
|
|
s.endswith('.dist-info') and
|
|
|
|
canonicalize_name(s).startswith(canonicalize_name(name))
|
|
|
|
):
|
|
|
|
assert not info_dir, (
|
|
|
|
'Multiple .dist-info directories: {}, '.format(
|
|
|
|
destsubdir
|
|
|
|
) + ', '.join(info_dir)
|
|
|
|
)
|
2012-10-02 07:50:24 +02:00
|
|
|
info_dir.append(destsubdir)
|
|
|
|
for f in files:
|
2013-10-23 16:57:12 +02:00
|
|
|
# Skip unwanted files
|
|
|
|
if filter and filter(f):
|
|
|
|
continue
|
2012-10-02 07:50:24 +02:00
|
|
|
srcfile = os.path.join(dir, f)
|
|
|
|
destfile = os.path.join(dest, basedir, f)
|
2014-04-23 01:09:04 +02:00
|
|
|
# directory creation is lazy and after the file filtering above
|
2014-04-23 22:08:07 +02:00
|
|
|
# to ensure we don't install empty dirs; empty dirs can't be
|
|
|
|
# uninstalled.
|
2015-03-31 03:40:30 +02:00
|
|
|
ensure_dir(destdir)
|
2014-11-20 17:17:42 +01:00
|
|
|
|
2018-05-14 19:13:35 +02:00
|
|
|
# copyfile (called below) truncates the destination if it
|
|
|
|
# exists and then writes the new contents. This is fine in most
|
|
|
|
# cases, but can cause a segfault if pip has loaded a shared
|
|
|
|
# object (e.g. from pyopenssl through its vendored urllib3)
|
|
|
|
# Since the shared object is mmap'd an attempt to call a
|
|
|
|
# symbol in it will then cause a segfault. Unlinking the file
|
|
|
|
# allows writing of new contents while allowing the process to
|
|
|
|
# continue to use the old copy.
|
|
|
|
if os.path.exists(destfile):
|
|
|
|
os.unlink(destfile)
|
|
|
|
|
2014-11-20 17:17:42 +01:00
|
|
|
# We use copyfile (not move, copy, or copy2) to be extra sure
|
|
|
|
# that we are not moving directories over (copyfile fails for
|
|
|
|
# directories) as well as to ensure that we are not copying
|
|
|
|
# over any metadata because we want more control over what
|
|
|
|
# metadata we actually copy over.
|
|
|
|
shutil.copyfile(srcfile, destfile)
|
|
|
|
|
|
|
|
# Copy over the metadata for the file, currently this only
|
|
|
|
# includes the atime and mtime.
|
|
|
|
st = os.stat(srcfile)
|
|
|
|
if hasattr(os, "utime"):
|
|
|
|
os.utime(destfile, (st.st_atime, st.st_mtime))
|
|
|
|
|
2014-12-22 19:47:37 +01:00
|
|
|
# If our file is executable, then make our destination file
|
|
|
|
# executable.
|
|
|
|
if os.access(srcfile, os.X_OK):
|
|
|
|
st = os.stat(srcfile)
|
|
|
|
permissions = (
|
|
|
|
st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
|
|
|
|
)
|
|
|
|
os.chmod(destfile, permissions)
|
|
|
|
|
2012-10-02 07:50:24 +02:00
|
|
|
changed = False
|
|
|
|
if fixer:
|
|
|
|
changed = fixer(destfile)
|
|
|
|
record_installed(srcfile, destfile, changed)
|
|
|
|
|
2013-06-30 19:58:54 +02:00
|
|
|
clobber(source, lib_dir, True)
|
2012-10-02 07:50:24 +02:00
|
|
|
|
2019-10-12 04:24:50 +02:00
|
|
|
assert info_dir, "{} .dist-info directory not found".format(
|
|
|
|
req_description
|
|
|
|
)
|
2012-10-02 07:50:24 +02:00
|
|
|
|
2013-10-23 16:57:12 +02:00
|
|
|
# Get the defined entry points
|
|
|
|
ep_file = os.path.join(info_dir[0], 'entry_points.txt')
|
|
|
|
console, gui = get_entrypoints(ep_file)
|
|
|
|
|
|
|
|
def is_entrypoint_wrapper(name):
|
|
|
|
# EP, EP.exe and EP-script.py are scripts generated for
|
|
|
|
# entry point EP by setuptools
|
|
|
|
if name.lower().endswith('.exe'):
|
|
|
|
matchname = name[:-4]
|
|
|
|
elif name.lower().endswith('-script.py'):
|
|
|
|
matchname = name[:-10]
|
2013-11-02 18:04:24 +01:00
|
|
|
elif name.lower().endswith(".pya"):
|
|
|
|
matchname = name[:-4]
|
2013-10-23 16:57:12 +02:00
|
|
|
else:
|
|
|
|
matchname = name
|
|
|
|
# Ignore setuptools-generated scripts
|
|
|
|
return (matchname in console or matchname in gui)
|
|
|
|
|
2012-10-02 07:50:24 +02:00
|
|
|
for datadir in data_dirs:
|
|
|
|
fixer = None
|
2013-10-23 16:57:12 +02:00
|
|
|
filter = None
|
2012-10-02 07:50:24 +02:00
|
|
|
for subdir in os.listdir(os.path.join(wheeldir, datadir)):
|
|
|
|
fixer = None
|
|
|
|
if subdir == 'scripts':
|
|
|
|
fixer = fix_script
|
2013-10-23 16:57:12 +02:00
|
|
|
filter = is_entrypoint_wrapper
|
2012-10-02 07:50:24 +02:00
|
|
|
source = os.path.join(wheeldir, datadir, subdir)
|
2012-11-15 01:53:22 +01:00
|
|
|
dest = scheme[subdir]
|
2013-10-23 16:57:12 +02:00
|
|
|
clobber(source, dest, False, fixer=fixer, filter=filter)
|
|
|
|
|
2019-07-22 04:49:51 +02:00
|
|
|
maker = PipScriptMaker(None, scheme['scripts'])
|
2014-06-07 05:20:42 +02:00
|
|
|
|
2014-06-07 05:07:43 +02:00
|
|
|
# Ensure old scripts are overwritten.
|
|
|
|
# See https://github.com/pypa/pip/issues/1800
|
|
|
|
maker.clobber = True
|
2013-11-03 02:00:02 +01:00
|
|
|
|
|
|
|
# Ensure we don't generate any variants for scripts because this is almost
|
|
|
|
# never what somebody wants.
|
|
|
|
# See https://bitbucket.org/pypa/distlib/issue/35/
|
2017-12-15 06:58:30 +01:00
|
|
|
maker.variants = {''}
|
2013-10-23 16:57:12 +02:00
|
|
|
|
2013-11-02 17:03:29 +01:00
|
|
|
# This is required because otherwise distlib creates scripts that are not
|
|
|
|
# executable.
|
|
|
|
# See https://bitbucket.org/pypa/distlib/issue/32/
|
|
|
|
maker.set_mode = True
|
|
|
|
|
2019-07-22 04:49:51 +02:00
|
|
|
scripts_to_generate = []
|
2013-11-03 02:00:02 +01:00
|
|
|
|
2013-10-23 16:57:12 +02:00
|
|
|
# Special case pip and setuptools to generate versioned wrappers
|
2013-11-01 18:28:35 +01:00
|
|
|
#
|
|
|
|
# The issue is that some projects (specifically, pip and setuptools) use
|
|
|
|
# code in setup.py to create "versioned" entry points - pip2.7 on Python
|
|
|
|
# 2.7, pip3.3 on Python 3.3, etc. But these entry points are baked into
|
|
|
|
# the wheel metadata at build time, and so if the wheel is installed with
|
|
|
|
# a *different* version of Python the entry points will be wrong. The
|
|
|
|
# correct fix for this is to enhance the metadata to be able to describe
|
|
|
|
# such versioned entry points, but that won't happen till Metadata 2.0 is
|
|
|
|
# available.
|
|
|
|
# In the meantime, projects using versioned entry points will either have
|
|
|
|
# incorrect versioned entry points, or they will not be able to distribute
|
|
|
|
# "universal" wheels (i.e., they will need a wheel per Python version).
|
|
|
|
#
|
|
|
|
# Because setuptools and pip are bundled with _ensurepip and virtualenv,
|
|
|
|
# we need to use universal wheels. So, as a stopgap until Metadata 2.0, we
|
|
|
|
# override the versioned entry points in the wheel and generate the
|
2016-06-10 21:27:07 +02:00
|
|
|
# correct ones. This code is purely a short-term measure until Metadata 2.0
|
2013-11-01 18:28:35 +01:00
|
|
|
# is available.
|
2013-11-07 18:28:10 +01:00
|
|
|
#
|
|
|
|
# To add the level of hack in this section of code, in order to support
|
|
|
|
# ensurepip this code will look for an ``ENSUREPIP_OPTIONS`` environment
|
|
|
|
# variable which will control which version scripts get installed.
|
|
|
|
#
|
|
|
|
# ENSUREPIP_OPTIONS=altinstall
|
|
|
|
# - Only pipX.Y and easy_install-X.Y will be generated and installed
|
|
|
|
# ENSUREPIP_OPTIONS=install
|
|
|
|
# - pipX.Y, pipX, easy_install-X.Y will be generated and installed. Note
|
|
|
|
# that this option is technically if ENSUREPIP_OPTIONS is set and is
|
|
|
|
# not altinstall
|
|
|
|
# DEFAULT
|
|
|
|
# - The default behavior is to install pip, pipX, pipX.Y, easy_install
|
|
|
|
# and easy_install-X.Y.
|
2013-10-23 16:57:12 +02:00
|
|
|
pip_script = console.pop('pip', None)
|
|
|
|
if pip_script:
|
2013-11-05 01:12:59 +01:00
|
|
|
if "ENSUREPIP_OPTIONS" not in os.environ:
|
2019-07-22 04:49:51 +02:00
|
|
|
scripts_to_generate.append('pip = ' + pip_script)
|
2013-11-05 01:12:59 +01:00
|
|
|
|
|
|
|
if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall":
|
2019-07-22 04:49:51 +02:00
|
|
|
scripts_to_generate.append(
|
|
|
|
'pip%s = %s' % (sys.version_info[0], pip_script)
|
|
|
|
)
|
2013-11-05 01:12:59 +01:00
|
|
|
|
2019-07-22 04:49:51 +02:00
|
|
|
scripts_to_generate.append(
|
|
|
|
'pip%s = %s' % (get_major_minor_version(), pip_script)
|
|
|
|
)
|
2013-10-23 16:57:12 +02:00
|
|
|
# Delete any other versioned pip entry points
|
|
|
|
pip_ep = [k for k in console if re.match(r'pip(\d(\.\d)?)?$', k)]
|
|
|
|
for k in pip_ep:
|
|
|
|
del console[k]
|
|
|
|
easy_install_script = console.pop('easy_install', None)
|
|
|
|
if easy_install_script:
|
2013-11-05 01:12:59 +01:00
|
|
|
if "ENSUREPIP_OPTIONS" not in os.environ:
|
2019-07-22 04:49:51 +02:00
|
|
|
scripts_to_generate.append(
|
|
|
|
'easy_install = ' + easy_install_script
|
|
|
|
)
|
2013-11-05 01:12:59 +01:00
|
|
|
|
2019-07-22 04:49:51 +02:00
|
|
|
scripts_to_generate.append(
|
|
|
|
'easy_install-%s = %s' % (
|
|
|
|
get_major_minor_version(), easy_install_script
|
|
|
|
)
|
2019-07-18 09:12:05 +02:00
|
|
|
)
|
2013-10-23 16:57:12 +02:00
|
|
|
# Delete any other versioned easy_install entry points
|
2014-01-27 15:07:10 +01:00
|
|
|
easy_install_ep = [
|
|
|
|
k for k in console if re.match(r'easy_install(-\d\.\d)?$', k)
|
|
|
|
]
|
2013-10-23 16:57:12 +02:00
|
|
|
for k in easy_install_ep:
|
|
|
|
del console[k]
|
|
|
|
|
2013-11-01 18:28:35 +01:00
|
|
|
# Generate the console and GUI entry points specified in the wheel
|
2019-07-22 04:49:51 +02:00
|
|
|
scripts_to_generate.extend(
|
|
|
|
'%s = %s' % kv for kv in console.items()
|
|
|
|
)
|
2017-10-02 18:54:37 +02:00
|
|
|
|
2019-07-22 04:49:51 +02:00
|
|
|
gui_scripts_to_generate = [
|
|
|
|
'%s = %s' % kv for kv in gui.items()
|
|
|
|
]
|
|
|
|
|
|
|
|
generated_console_scripts = [] # type: List[str]
|
|
|
|
|
|
|
|
try:
|
|
|
|
generated_console_scripts = maker.make_multiple(scripts_to_generate)
|
|
|
|
generated.extend(generated_console_scripts)
|
2017-10-02 18:54:37 +02:00
|
|
|
|
2014-01-27 15:07:10 +01:00
|
|
|
generated.extend(
|
2019-07-22 04:49:51 +02:00
|
|
|
maker.make_multiple(gui_scripts_to_generate, {'gui': True})
|
|
|
|
)
|
|
|
|
except MissingCallableSuffix as e:
|
|
|
|
entry = e.args[0]
|
|
|
|
raise InstallationError(
|
2019-09-07 15:56:36 +02:00
|
|
|
"Invalid script entry point: {} for req: {} - A callable "
|
2019-07-22 04:49:51 +02:00
|
|
|
"suffix is required. Cf https://packaging.python.org/en/"
|
|
|
|
"latest/distributing.html#console-scripts for more "
|
2019-10-12 04:24:50 +02:00
|
|
|
"information.".format(entry, req_description)
|
2014-01-27 15:07:10 +01:00
|
|
|
)
|
2012-10-02 07:50:24 +02:00
|
|
|
|
2019-07-22 04:49:51 +02:00
|
|
|
if warn_script_location:
|
|
|
|
msg = message_about_scripts_not_on_PATH(generated_console_scripts)
|
|
|
|
if msg is not None:
|
|
|
|
logger.warning(msg)
|
|
|
|
|
2015-12-04 10:19:17 +01:00
|
|
|
# Record pip as the installer
|
|
|
|
installer = os.path.join(info_dir[0], 'INSTALLER')
|
|
|
|
temp_installer = os.path.join(info_dir[0], 'INSTALLER.pip')
|
|
|
|
with open(temp_installer, 'wb') as installer_file:
|
|
|
|
installer_file.write(b'pip\n')
|
|
|
|
shutil.move(temp_installer, installer)
|
|
|
|
generated.append(installer)
|
|
|
|
|
|
|
|
# Record details of all files installed
|
2012-10-02 07:50:24 +02:00
|
|
|
record = os.path.join(info_dir[0], 'RECORD')
|
|
|
|
temp_record = os.path.join(info_dir[0], 'RECORD.pip')
|
|
|
|
with open_for_csv(record, 'r') as record_in:
|
|
|
|
with open_for_csv(temp_record, 'w+') as record_out:
|
|
|
|
reader = csv.reader(record_in)
|
2019-01-24 03:44:54 +01:00
|
|
|
outrows = get_csv_rows_for_installed(
|
|
|
|
reader, installed=installed, changed=changed,
|
|
|
|
generated=generated, lib_dir=lib_dir,
|
|
|
|
)
|
2012-10-02 07:50:24 +02:00
|
|
|
writer = csv.writer(record_out)
|
2018-10-24 18:19:58 +02:00
|
|
|
# Sort to simplify testing.
|
|
|
|
for row in sorted_outrows(outrows):
|
2018-06-21 22:05:37 +02:00
|
|
|
writer.writerow(row)
|
2012-10-02 07:50:24 +02:00
|
|
|
shutil.move(temp_record, record)
|
|
|
|
|
2014-01-27 15:07:10 +01:00
|
|
|
|
2014-02-15 07:21:22 +01:00
|
|
|
def wheel_version(source_dir):
|
2018-12-16 10:16:39 +01:00
|
|
|
# type: (Optional[str]) -> Optional[Tuple[int, ...]]
|
2014-02-15 07:21:22 +01:00
|
|
|
"""
|
|
|
|
Return the Wheel-Version of an extracted wheel, if possible.
|
|
|
|
|
2018-12-16 10:16:39 +01:00
|
|
|
Otherwise, return None if we couldn't parse / extract it.
|
2014-02-15 07:21:22 +01:00
|
|
|
"""
|
|
|
|
try:
|
2014-02-18 05:16:54 +01:00
|
|
|
dist = [d for d in pkg_resources.find_on_path(None, source_dir)][0]
|
2014-02-17 00:01:32 +01:00
|
|
|
|
2014-02-15 07:21:22 +01:00
|
|
|
wheel_data = dist.get_metadata('WHEEL')
|
2014-02-17 00:01:32 +01:00
|
|
|
wheel_data = Parser().parsestr(wheel_data)
|
2014-02-18 05:16:54 +01:00
|
|
|
|
2014-02-17 00:01:32 +01:00
|
|
|
version = wheel_data['Wheel-Version'].strip()
|
2014-02-15 07:21:22 +01:00
|
|
|
version = tuple(map(int, version.split('.')))
|
|
|
|
return version
|
2018-06-25 13:51:41 +02:00
|
|
|
except Exception:
|
2018-12-16 10:16:39 +01:00
|
|
|
return None
|
2014-02-15 07:21:22 +01:00
|
|
|
|
|
|
|
|
2014-02-18 05:16:54 +01:00
|
|
|
def check_compatibility(version, name):
|
2018-12-16 10:16:39 +01:00
|
|
|
# type: (Optional[Tuple[int, ...]], str) -> None
|
2014-02-18 05:16:54 +01:00
|
|
|
"""
|
2014-02-19 08:44:44 +01:00
|
|
|
Raises errors or warns if called with an incompatible Wheel-Version.
|
2014-02-18 05:16:54 +01:00
|
|
|
|
2014-02-19 08:44:44 +01:00
|
|
|
Pip should refuse to install a Wheel-Version that's a major series
|
|
|
|
ahead of what it's compatible with (e.g 2.0 > 1.1); and warn when
|
|
|
|
installing a version only minor version ahead (e.g 1.2 > 1.1).
|
|
|
|
|
|
|
|
version: a 2-tuple representing a Wheel-Version (Major, Minor)
|
2014-02-18 05:16:54 +01:00
|
|
|
name: name of wheel or package to raise exception about
|
2014-02-19 08:44:44 +01:00
|
|
|
|
|
|
|
:raises UnsupportedWheel: when an incompatible Wheel-Version is given
|
2014-02-18 05:16:54 +01:00
|
|
|
"""
|
|
|
|
if not version:
|
|
|
|
raise UnsupportedWheel(
|
|
|
|
"%s is in an unsupported or invalid wheel" % name
|
|
|
|
)
|
|
|
|
if version[0] > VERSION_COMPATIBLE[0]:
|
|
|
|
raise UnsupportedWheel(
|
|
|
|
"%s's Wheel-Version (%s) is not compatible with this version "
|
|
|
|
"of pip" % (name, '.'.join(map(str, version)))
|
|
|
|
)
|
|
|
|
elif version > VERSION_COMPATIBLE:
|
2014-08-31 01:52:28 +02:00
|
|
|
logger.warning(
|
|
|
|
'Installing from a newer Wheel-Version (%s)',
|
|
|
|
'.'.join(map(str, version)),
|
|
|
|
)
|
2014-02-18 05:16:54 +01:00
|
|
|
|
|
|
|
|
2019-05-26 05:31:41 +02:00
|
|
|
def format_tag(file_tag):
|
|
|
|
# type: (Tuple[str, ...]) -> str
|
|
|
|
"""
|
|
|
|
Format three tags in the form "<python_tag>-<abi_tag>-<platform_tag>".
|
|
|
|
|
|
|
|
:param file_tag: A 3-tuple of tags (python_tag, abi_tag, platform_tag).
|
|
|
|
"""
|
|
|
|
return '-'.join(file_tag)
|
|
|
|
|
|
|
|
|
2013-04-02 07:44:46 +02:00
|
|
|
class Wheel(object):
|
|
|
|
"""A wheel file"""
|
|
|
|
|
2018-09-19 13:00:33 +02:00
|
|
|
# TODO: Maybe move the class into the models sub-package
|
|
|
|
# TODO: Maybe move the install code into this class
|
2013-04-02 07:44:46 +02:00
|
|
|
|
|
|
|
wheel_file_re = re.compile(
|
2017-03-28 04:49:21 +02:00
|
|
|
r"""^(?P<namever>(?P<name>.+?)-(?P<ver>.*?))
|
2017-08-31 13:58:02 +02:00
|
|
|
((-(?P<build>\d[^-]*?))?-(?P<pyver>.+?)-(?P<abi>.+?)-(?P<plat>.+?)
|
2014-01-27 15:07:10 +01:00
|
|
|
\.whl|\.dist-info)$""",
|
|
|
|
re.VERBOSE
|
|
|
|
)
|
2013-04-02 07:44:46 +02:00
|
|
|
|
|
|
|
def __init__(self, filename):
|
2018-12-16 10:16:39 +01:00
|
|
|
# type: (str) -> None
|
2013-11-15 01:35:24 +01:00
|
|
|
"""
|
|
|
|
:raises InvalidWheelFilename: when the filename is invalid for a wheel
|
|
|
|
"""
|
2013-04-02 07:44:46 +02:00
|
|
|
wheel_info = self.wheel_file_re.match(filename)
|
2013-11-15 01:35:24 +01:00
|
|
|
if not wheel_info:
|
2014-01-27 15:07:10 +01:00
|
|
|
raise InvalidWheelFilename(
|
|
|
|
"%s is not a valid wheel filename." % filename
|
|
|
|
)
|
2013-04-02 07:44:46 +02:00
|
|
|
self.filename = filename
|
|
|
|
self.name = wheel_info.group('name').replace('_', '-')
|
2013-08-22 08:30:15 +02:00
|
|
|
# we'll assume "_" means "-" due to wheel naming scheme
|
|
|
|
# (https://github.com/pypa/pip/issues/1150)
|
|
|
|
self.version = wheel_info.group('ver').replace('_', '-')
|
2017-08-31 13:58:02 +02:00
|
|
|
self.build_tag = wheel_info.group('build')
|
2013-04-02 07:44:46 +02:00
|
|
|
self.pyversions = wheel_info.group('pyver').split('.')
|
|
|
|
self.abis = wheel_info.group('abi').split('.')
|
|
|
|
self.plats = wheel_info.group('plat').split('.')
|
|
|
|
|
|
|
|
# All the tag combinations from this file
|
2017-12-15 06:58:30 +01:00
|
|
|
self.file_tags = {
|
2014-01-27 15:07:10 +01:00
|
|
|
(x, y, z) for x in self.pyversions
|
|
|
|
for y in self.abis for z in self.plats
|
2017-12-15 06:58:30 +01:00
|
|
|
}
|
2013-04-02 07:44:46 +02:00
|
|
|
|
2019-05-26 05:31:41 +02:00
|
|
|
def get_formatted_file_tags(self):
|
|
|
|
# type: () -> List[str]
|
|
|
|
"""
|
|
|
|
Return the wheel's tags as a sorted list of strings.
|
|
|
|
"""
|
|
|
|
return sorted(format_tag(tag) for tag in self.file_tags)
|
|
|
|
|
2019-08-10 08:19:19 +02:00
|
|
|
def support_index_min(self, tags):
|
|
|
|
# type: (List[Pep425Tag]) -> int
|
2013-04-02 07:44:46 +02:00
|
|
|
"""
|
2013-11-15 01:35:24 +01:00
|
|
|
Return the lowest index that one of the wheel's file_tag combinations
|
2019-08-09 07:58:17 +02:00
|
|
|
achieves in the given list of supported tags.
|
|
|
|
|
|
|
|
For example, if there are 8 supported tags and one of the file tags
|
|
|
|
is first in the list, then return 0.
|
|
|
|
|
2019-08-10 08:19:19 +02:00
|
|
|
:param tags: the PEP 425 tags to check the wheel against, in order
|
|
|
|
with most preferred first.
|
|
|
|
|
2019-08-09 07:58:17 +02:00
|
|
|
:raises ValueError: If none of the wheel's file tags match one of
|
|
|
|
the supported tags.
|
2013-04-02 07:44:46 +02:00
|
|
|
"""
|
2019-08-09 07:58:17 +02:00
|
|
|
return min(tags.index(tag) for tag in self.file_tags if tag in tags)
|
2013-04-02 07:44:46 +02:00
|
|
|
|
2019-08-10 08:19:19 +02:00
|
|
|
def supported(self, tags):
|
|
|
|
# type: (List[Pep425Tag]) -> bool
|
|
|
|
"""
|
|
|
|
Return whether the wheel is compatible with one of the given tags.
|
|
|
|
|
|
|
|
:param tags: the PEP 425 tags to check the wheel against.
|
|
|
|
"""
|
2019-08-09 07:49:48 +02:00
|
|
|
return not self.file_tags.isdisjoint(tags)
|
2013-04-02 07:44:46 +02:00
|
|
|
|
2012-10-17 00:57:10 +02:00
|
|
|
|
2018-10-12 21:16:38 +02:00
|
|
|
def _contains_egg_info(
|
|
|
|
s, _egg_info_re=re.compile(r'([a-z0-9_.]+)-([a-z0-9_.!+-]+)', re.I)):
|
|
|
|
"""Determine whether the string looks like an egg_info.
|
|
|
|
|
|
|
|
:param s: The string to parse. E.g. foo-2.1
|
|
|
|
"""
|
|
|
|
return bool(_egg_info_re.search(s))
|
|
|
|
|
|
|
|
|
2019-02-01 11:23:58 +01:00
|
|
|
def should_use_ephemeral_cache(
|
|
|
|
req, # type: InstallRequirement
|
2019-08-13 14:14:23 +02:00
|
|
|
should_unpack, # type: bool
|
2019-09-05 02:55:11 +02:00
|
|
|
cache_available, # type: bool
|
|
|
|
check_binary_allowed, # type: BinaryAllowedPredicate
|
2019-02-01 11:23:58 +01:00
|
|
|
):
|
|
|
|
# type: (...) -> Optional[bool]
|
|
|
|
"""
|
|
|
|
Return whether to build an InstallRequirement object using the
|
|
|
|
ephemeral cache.
|
|
|
|
|
2019-02-01 11:24:16 +01:00
|
|
|
:param cache_available: whether a cache directory is available for the
|
2019-08-13 14:14:23 +02:00
|
|
|
should_unpack=True case.
|
2019-02-01 11:24:16 +01:00
|
|
|
|
|
|
|
:return: True or False to build the requirement with ephem_cache=True
|
|
|
|
or False, respectively; or None not to build the requirement.
|
2019-02-01 11:23:58 +01:00
|
|
|
"""
|
|
|
|
if req.constraint:
|
2019-08-13 14:14:23 +02:00
|
|
|
# never build requirements that are merely constraints
|
2019-02-01 11:23:58 +01:00
|
|
|
return None
|
|
|
|
if req.is_wheel:
|
2019-08-13 14:14:23 +02:00
|
|
|
if not should_unpack:
|
2019-02-01 11:23:58 +01:00
|
|
|
logger.info(
|
|
|
|
'Skipping %s, due to already being wheel.', req.name,
|
|
|
|
)
|
|
|
|
return None
|
2019-08-13 14:14:23 +02:00
|
|
|
if not should_unpack:
|
|
|
|
# i.e. pip wheel, not pip install;
|
|
|
|
# return False, knowing that the caller will never cache
|
|
|
|
# in this case anyway, so this return merely means "build it".
|
|
|
|
# TODO improve this behavior
|
2019-01-31 10:58:38 +01:00
|
|
|
return False
|
|
|
|
|
|
|
|
if req.editable or not req.source_dir:
|
2019-02-01 11:23:58 +01:00
|
|
|
return None
|
|
|
|
|
2019-09-05 02:55:11 +02:00
|
|
|
if not check_binary_allowed(req):
|
2019-02-01 11:23:58 +01:00
|
|
|
logger.info(
|
2019-09-05 02:13:42 +02:00
|
|
|
"Skipping wheel build for %s, due to binaries "
|
2019-02-01 11:23:58 +01:00
|
|
|
"being disabled for it.", req.name,
|
|
|
|
)
|
|
|
|
return None
|
|
|
|
|
2019-08-15 23:00:55 +02:00
|
|
|
if req.link and req.link.is_vcs:
|
2019-04-08 11:00:18 +02:00
|
|
|
# VCS checkout. Build wheel just for this run.
|
|
|
|
return True
|
|
|
|
|
2019-02-01 11:23:58 +01:00
|
|
|
link = req.link
|
|
|
|
base, ext = link.splitext()
|
2019-02-01 11:24:16 +01:00
|
|
|
if cache_available and _contains_egg_info(base):
|
2019-02-01 11:23:58 +01:00
|
|
|
return False
|
|
|
|
|
2019-02-01 11:24:16 +01:00
|
|
|
# Otherwise, build the wheel just for this run using the ephemeral
|
|
|
|
# cache since we are either in the case of e.g. a local directory, or
|
|
|
|
# no cache directory is available to use.
|
2019-02-01 11:23:58 +01:00
|
|
|
return True
|
|
|
|
|
|
|
|
|
2019-02-11 17:27:26 +01:00
|
|
|
def format_command_result(
|
2019-02-12 21:02:37 +01:00
|
|
|
command_args, # type: List[str]
|
|
|
|
command_output, # type: str
|
|
|
|
):
|
|
|
|
# type: (...) -> str
|
|
|
|
"""
|
|
|
|
Format command information for logging.
|
|
|
|
"""
|
2019-02-22 09:30:53 +01:00
|
|
|
command_desc = format_command_args(command_args)
|
|
|
|
text = 'Command arguments: {}\n'.format(command_desc)
|
2019-02-12 21:02:37 +01:00
|
|
|
|
|
|
|
if not command_output:
|
|
|
|
text += 'Command output: None'
|
|
|
|
elif logger.getEffectiveLevel() > logging.DEBUG:
|
|
|
|
text += 'Command output: [use --verbose to show]'
|
|
|
|
else:
|
|
|
|
if not command_output.endswith('\n'):
|
|
|
|
command_output += '\n'
|
2019-02-11 18:03:32 +01:00
|
|
|
text += 'Command output:\n{}{}'.format(command_output, LOG_DIVIDER)
|
2019-02-12 21:02:37 +01:00
|
|
|
|
|
|
|
return text
|
|
|
|
|
|
|
|
|
2019-02-10 21:36:59 +01:00
|
|
|
def get_legacy_build_wheel_path(
|
|
|
|
names, # type: List[str]
|
|
|
|
temp_dir, # type: str
|
|
|
|
req, # type: InstallRequirement
|
|
|
|
command_args, # type: List[str]
|
|
|
|
command_output, # type: str
|
|
|
|
):
|
|
|
|
# type: (...) -> Optional[str]
|
|
|
|
"""
|
|
|
|
Return the path to the wheel in the temporary build directory.
|
|
|
|
"""
|
|
|
|
# Sort for determinism.
|
|
|
|
names = sorted(names)
|
|
|
|
if not names:
|
|
|
|
msg = (
|
2019-02-12 21:02:37 +01:00
|
|
|
'Legacy build of wheel for {!r} created no files.\n'
|
|
|
|
).format(req.name)
|
2019-02-11 17:27:26 +01:00
|
|
|
msg += format_command_result(command_args, command_output)
|
2019-02-12 21:02:37 +01:00
|
|
|
logger.warning(msg)
|
2019-02-10 21:36:59 +01:00
|
|
|
return None
|
2019-02-12 21:02:37 +01:00
|
|
|
|
2019-02-10 21:36:59 +01:00
|
|
|
if len(names) > 1:
|
|
|
|
msg = (
|
2019-02-12 21:02:37 +01:00
|
|
|
'Legacy build of wheel for {!r} created more than one file.\n'
|
|
|
|
'Filenames (choosing first): {}\n'
|
|
|
|
).format(req.name, names)
|
2019-02-11 17:27:26 +01:00
|
|
|
msg += format_command_result(command_args, command_output)
|
2019-02-10 21:36:59 +01:00
|
|
|
logger.warning(msg)
|
|
|
|
|
|
|
|
return os.path.join(temp_dir, names[0])
|
|
|
|
|
|
|
|
|
2019-09-05 02:55:11 +02:00
|
|
|
def _always_true(_):
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
2012-10-17 00:57:10 +02:00
|
|
|
class WheelBuilder(object):
|
|
|
|
"""Build wheels from a RequirementSet."""
|
|
|
|
|
2018-12-16 10:16:39 +01:00
|
|
|
def __init__(
|
|
|
|
self,
|
|
|
|
preparer, # type: RequirementPreparer
|
|
|
|
wheel_cache, # type: WheelCache
|
|
|
|
build_options=None, # type: Optional[List[str]]
|
|
|
|
global_options=None, # type: Optional[List[str]]
|
2019-09-05 02:55:11 +02:00
|
|
|
check_binary_allowed=None, # type: Optional[BinaryAllowedPredicate]
|
2019-09-25 13:37:53 +02:00
|
|
|
no_clean=False, # type: bool
|
|
|
|
path_to_wheelnames=None, # type: Optional[Union[bytes, Text]]
|
2018-12-16 10:16:39 +01:00
|
|
|
):
|
|
|
|
# type: (...) -> None
|
2019-09-05 02:55:11 +02:00
|
|
|
if check_binary_allowed is None:
|
|
|
|
# Binaries allowed by default.
|
|
|
|
check_binary_allowed = _always_true
|
|
|
|
|
2017-07-05 20:41:45 +02:00
|
|
|
self.preparer = preparer
|
2017-07-06 01:57:35 +02:00
|
|
|
self.wheel_cache = wheel_cache
|
2017-07-05 20:41:45 +02:00
|
|
|
|
|
|
|
self._wheel_dir = preparer.wheel_download_dir
|
|
|
|
|
2014-12-10 17:56:51 +01:00
|
|
|
self.build_options = build_options or []
|
|
|
|
self.global_options = global_options or []
|
2019-09-05 02:55:11 +02:00
|
|
|
self.check_binary_allowed = check_binary_allowed
|
2017-01-18 11:20:03 +01:00
|
|
|
self.no_clean = no_clean
|
2019-09-25 13:37:53 +02:00
|
|
|
# path where to save built names of built wheels
|
|
|
|
self.path_to_wheelnames = path_to_wheelnames
|
|
|
|
# file names of built wheel names
|
|
|
|
self.wheel_filenames = [] # type: List[Union[bytes, Text]]
|
2012-10-17 00:57:10 +02:00
|
|
|
|
2015-11-04 14:11:13 +01:00
|
|
|
def _build_one(self, req, output_dir, python_tag=None):
|
2015-04-01 03:39:45 +02:00
|
|
|
"""Build one wheel.
|
2012-10-17 00:57:10 +02:00
|
|
|
|
2015-04-01 03:39:45 +02:00
|
|
|
:return: The filename of the built wheel, or None if the build failed.
|
|
|
|
"""
|
2017-06-01 14:54:29 +02:00
|
|
|
# Install build deps into temporary directory (PEP 518)
|
2018-03-01 09:50:06 +01:00
|
|
|
with req.build_env:
|
2016-11-29 14:41:39 +01:00
|
|
|
return self._build_one_inside_env(req, output_dir,
|
2018-03-02 21:56:14 +01:00
|
|
|
python_tag=python_tag)
|
2016-11-29 14:41:39 +01:00
|
|
|
|
2018-03-02 21:56:14 +01:00
|
|
|
def _build_one_inside_env(self, req, output_dir, python_tag=None):
|
2017-06-01 14:54:29 +02:00
|
|
|
with TempDirectory(kind="wheel") as temp_dir:
|
2018-08-16 16:07:50 +02:00
|
|
|
if req.use_pep517:
|
|
|
|
builder = self._build_one_pep517
|
|
|
|
else:
|
|
|
|
builder = self._build_one_legacy
|
2019-02-04 05:42:05 +01:00
|
|
|
wheel_path = builder(req, temp_dir.path, python_tag=python_tag)
|
|
|
|
if wheel_path is not None:
|
|
|
|
wheel_name = os.path.basename(wheel_path)
|
|
|
|
dest_path = os.path.join(output_dir, wheel_name)
|
2015-04-01 03:39:45 +02:00
|
|
|
try:
|
2019-06-26 11:44:43 +02:00
|
|
|
wheel_hash, length = hash_file(wheel_path)
|
2019-02-04 05:42:05 +01:00
|
|
|
shutil.move(wheel_path, dest_path)
|
2019-06-26 11:44:43 +02:00
|
|
|
logger.info('Created wheel for %s: '
|
|
|
|
'filename=%s size=%d sha256=%s',
|
|
|
|
req.name, wheel_name, length,
|
|
|
|
wheel_hash.hexdigest())
|
2015-04-01 03:39:45 +02:00
|
|
|
logger.info('Stored in directory: %s', output_dir)
|
2019-02-04 05:42:05 +01:00
|
|
|
return dest_path
|
2018-06-25 13:51:41 +02:00
|
|
|
except Exception:
|
2015-08-21 23:59:07 +02:00
|
|
|
pass
|
|
|
|
# Ignore return, we can't do anything else useful.
|
|
|
|
self._clean_one(req)
|
2015-04-01 03:39:45 +02:00
|
|
|
return None
|
|
|
|
|
2018-08-16 16:07:50 +02:00
|
|
|
def _build_one_pep517(self, req, tempd, python_tag=None):
|
2019-02-04 05:42:05 +01:00
|
|
|
"""Build one InstallRequirement using the PEP 517 build process.
|
|
|
|
|
|
|
|
Returns path to wheel if successfully built. Otherwise, returns None.
|
|
|
|
"""
|
2018-10-19 21:57:47 +02:00
|
|
|
assert req.metadata_directory is not None
|
2019-02-27 18:40:52 +01:00
|
|
|
if self.build_options:
|
|
|
|
# PEP 517 does not support --build-options
|
|
|
|
logger.error('Cannot build wheel for %s using PEP 517 when '
|
|
|
|
'--build-options is present' % (req.name,))
|
|
|
|
return None
|
2018-10-19 21:57:47 +02:00
|
|
|
try:
|
2018-08-16 16:07:50 +02:00
|
|
|
logger.debug('Destination directory: %s', tempd)
|
2019-09-28 09:08:22 +02:00
|
|
|
|
2019-09-30 08:36:05 +02:00
|
|
|
runner = runner_with_spinner_message(
|
2019-09-28 09:08:22 +02:00
|
|
|
'Building wheel for {} (PEP 517)'.format(req.name)
|
2018-10-19 21:57:47 +02:00
|
|
|
)
|
2019-09-28 09:08:22 +02:00
|
|
|
backend = req.pep517_backend
|
|
|
|
with backend.subprocess_runner(runner):
|
|
|
|
wheel_name = backend.build_wheel(
|
|
|
|
tempd,
|
|
|
|
metadata_directory=req.metadata_directory,
|
|
|
|
)
|
2018-10-19 21:57:47 +02:00
|
|
|
if python_tag:
|
|
|
|
# General PEP 517 backends don't necessarily support
|
|
|
|
# a "--python-tag" option, so we rename the wheel
|
|
|
|
# file directly.
|
2019-02-04 05:42:05 +01:00
|
|
|
new_name = replace_python_tag(wheel_name, python_tag)
|
2018-10-19 21:57:47 +02:00
|
|
|
os.rename(
|
2019-02-04 05:42:05 +01:00
|
|
|
os.path.join(tempd, wheel_name),
|
|
|
|
os.path.join(tempd, new_name)
|
2018-10-19 21:57:47 +02:00
|
|
|
)
|
2019-02-04 05:42:05 +01:00
|
|
|
# Reassign to simplify the return at the end of function
|
|
|
|
wheel_name = new_name
|
2018-10-19 21:57:47 +02:00
|
|
|
except Exception:
|
|
|
|
logger.error('Failed building wheel for %s', req.name)
|
2019-02-04 05:42:05 +01:00
|
|
|
return None
|
|
|
|
return os.path.join(tempd, wheel_name)
|
2018-08-16 16:07:50 +02:00
|
|
|
|
|
|
|
def _build_one_legacy(self, req, tempd, python_tag=None):
|
2019-02-04 05:42:05 +01:00
|
|
|
"""Build one InstallRequirement using the "legacy" build process.
|
|
|
|
|
|
|
|
Returns path to wheel if successfully built. Otherwise, returns None.
|
|
|
|
"""
|
2019-10-13 03:20:10 +02:00
|
|
|
wheel_args = make_setuptools_bdist_wheel_args(
|
2019-10-13 03:12:32 +02:00
|
|
|
req.setup_py_path,
|
|
|
|
global_options=self.global_options,
|
|
|
|
build_options=self.build_options,
|
|
|
|
destination_dir=tempd,
|
|
|
|
python_tag=python_tag,
|
|
|
|
)
|
2015-08-21 23:59:07 +02:00
|
|
|
|
2018-08-28 11:19:46 +02:00
|
|
|
spin_message = 'Building wheel for %s (setup.py)' % (req.name,)
|
2015-11-04 09:06:50 +01:00
|
|
|
with open_spinner(spin_message) as spinner:
|
|
|
|
logger.debug('Destination directory: %s', tempd)
|
|
|
|
|
|
|
|
try:
|
2019-09-26 21:23:24 +02:00
|
|
|
output = call_subprocess(
|
|
|
|
wheel_args,
|
|
|
|
cwd=req.unpacked_source_directory,
|
|
|
|
spinner=spinner,
|
|
|
|
)
|
2018-06-25 13:51:41 +02:00
|
|
|
except Exception:
|
2015-11-04 09:06:50 +01:00
|
|
|
spinner.finish("error")
|
|
|
|
logger.error('Failed building wheel for %s', req.name)
|
2019-02-04 05:42:05 +01:00
|
|
|
return None
|
2019-09-26 21:23:24 +02:00
|
|
|
|
2019-02-10 21:36:59 +01:00
|
|
|
names = os.listdir(tempd)
|
|
|
|
wheel_path = get_legacy_build_wheel_path(
|
|
|
|
names=names,
|
|
|
|
temp_dir=tempd,
|
|
|
|
req=req,
|
|
|
|
command_args=wheel_args,
|
|
|
|
command_output=output,
|
|
|
|
)
|
|
|
|
return wheel_path
|
2012-10-17 00:57:10 +02:00
|
|
|
|
2015-08-21 23:59:07 +02:00
|
|
|
def _clean_one(self, req):
|
2019-10-13 03:20:10 +02:00
|
|
|
clean_args = make_setuptools_clean_args(
|
2019-10-13 03:12:32 +02:00
|
|
|
req.setup_py_path,
|
|
|
|
global_options=self.global_options,
|
|
|
|
)
|
2015-08-21 23:59:07 +02:00
|
|
|
|
|
|
|
logger.info('Running setup.py clean for %s', req.name)
|
|
|
|
try:
|
2019-01-21 22:25:22 +01:00
|
|
|
call_subprocess(clean_args, cwd=req.source_dir)
|
2015-08-21 23:59:07 +02:00
|
|
|
return True
|
2018-06-25 13:51:41 +02:00
|
|
|
except Exception:
|
2015-08-21 23:59:07 +02:00
|
|
|
logger.error('Failed cleaning build dir for %s', req.name)
|
|
|
|
return False
|
|
|
|
|
2018-12-16 10:16:39 +01:00
|
|
|
def build(
|
|
|
|
self,
|
|
|
|
requirements, # type: Iterable[InstallRequirement]
|
2019-08-13 14:14:23 +02:00
|
|
|
should_unpack=False # type: bool
|
2018-12-16 10:16:39 +01:00
|
|
|
):
|
|
|
|
# type: (...) -> List[InstallRequirement]
|
2015-04-01 03:39:45 +02:00
|
|
|
"""Build wheels.
|
2012-10-17 00:57:10 +02:00
|
|
|
|
2019-08-13 14:14:23 +02:00
|
|
|
:param should_unpack: If True, after building the wheel, unpack it
|
|
|
|
and replace the sdist with the unpacked version in preparation
|
|
|
|
for installation.
|
2015-04-01 03:39:45 +02:00
|
|
|
:return: True if all the wheels built correctly.
|
|
|
|
"""
|
2019-08-13 14:14:23 +02:00
|
|
|
# pip install uses should_unpack=True.
|
|
|
|
# pip install never provides a _wheel_dir.
|
|
|
|
# pip wheel uses should_unpack=False.
|
|
|
|
# pip wheel always provides a _wheel_dir (via the preparer).
|
|
|
|
assert (
|
|
|
|
(should_unpack and not self._wheel_dir) or
|
|
|
|
(not should_unpack and self._wheel_dir)
|
|
|
|
)
|
|
|
|
|
2014-05-03 19:02:23 +02:00
|
|
|
buildset = []
|
2019-08-13 14:14:23 +02:00
|
|
|
cache_available = bool(self.wheel_cache.cache_dir)
|
2019-02-01 11:24:16 +01:00
|
|
|
|
2017-11-16 10:46:21 +01:00
|
|
|
for req in requirements:
|
2019-02-01 11:23:58 +01:00
|
|
|
ephem_cache = should_use_ephemeral_cache(
|
2019-08-13 14:14:23 +02:00
|
|
|
req,
|
|
|
|
should_unpack=should_unpack,
|
2019-02-01 11:24:16 +01:00
|
|
|
cache_available=cache_available,
|
2019-09-05 02:55:11 +02:00
|
|
|
check_binary_allowed=self.check_binary_allowed,
|
2019-02-01 11:23:58 +01:00
|
|
|
)
|
|
|
|
if ephem_cache is None:
|
2015-06-02 05:39:10 +02:00
|
|
|
continue
|
2019-02-01 11:23:58 +01:00
|
|
|
|
2019-09-07 15:39:32 +02:00
|
|
|
# Determine where the wheel should go.
|
2019-09-05 04:29:16 +02:00
|
|
|
if should_unpack:
|
|
|
|
if ephem_cache:
|
2019-09-05 04:33:09 +02:00
|
|
|
output_dir = self.wheel_cache.get_ephem_path_for_link(
|
|
|
|
req.link
|
|
|
|
)
|
2019-09-05 04:29:16 +02:00
|
|
|
else:
|
|
|
|
output_dir = self.wheel_cache.get_path_for_link(req.link)
|
|
|
|
else:
|
|
|
|
output_dir = self._wheel_dir
|
|
|
|
|
|
|
|
buildset.append((req, output_dir))
|
2014-02-01 20:41:55 +01:00
|
|
|
|
|
|
|
if not buildset:
|
2018-08-27 17:38:56 +02:00
|
|
|
return []
|
2012-10-17 00:57:10 +02:00
|
|
|
|
2018-09-19 13:00:33 +02:00
|
|
|
# TODO by @pradyunsg
|
|
|
|
# Should break up this method into 2 separate methods.
|
|
|
|
|
2014-03-04 07:44:28 +01:00
|
|
|
# Build the wheels.
|
2014-08-31 01:52:28 +02:00
|
|
|
logger.info(
|
|
|
|
'Building wheels for collected packages: %s',
|
2017-05-19 18:08:45 +02:00
|
|
|
', '.join([req.name for (req, _) in buildset]),
|
2014-01-27 15:07:10 +01:00
|
|
|
)
|
2019-09-05 04:21:40 +02:00
|
|
|
|
|
|
|
python_tag = None
|
|
|
|
if should_unpack:
|
|
|
|
python_tag = pep425tags.implementation_tag
|
|
|
|
|
2014-08-31 01:52:28 +02:00
|
|
|
with indent_log():
|
|
|
|
build_success, build_failure = [], []
|
2019-09-05 04:29:16 +02:00
|
|
|
for req, output_dir in buildset:
|
2019-09-05 04:17:19 +02:00
|
|
|
try:
|
|
|
|
ensure_dir(output_dir)
|
|
|
|
except OSError as e:
|
2019-09-07 15:39:32 +02:00
|
|
|
logger.warning(
|
|
|
|
"Building wheel for %s failed: %s",
|
|
|
|
req.name, e,
|
|
|
|
)
|
2019-09-05 04:17:19 +02:00
|
|
|
build_failure.append(req)
|
|
|
|
continue
|
|
|
|
|
2015-11-04 14:11:13 +01:00
|
|
|
wheel_file = self._build_one(
|
|
|
|
req, output_dir,
|
|
|
|
python_tag=python_tag,
|
|
|
|
)
|
2015-04-01 03:39:45 +02:00
|
|
|
if wheel_file:
|
2014-08-31 01:52:28 +02:00
|
|
|
build_success.append(req)
|
2019-09-25 13:37:53 +02:00
|
|
|
self.wheel_filenames.append(
|
|
|
|
os.path.relpath(wheel_file, output_dir)
|
|
|
|
)
|
2019-08-13 14:14:23 +02:00
|
|
|
if should_unpack:
|
2015-04-01 03:39:45 +02:00
|
|
|
# XXX: This is mildly duplicative with prepare_files,
|
|
|
|
# but not close enough to pull out to a single common
|
|
|
|
# method.
|
|
|
|
# The code below assumes temporary source dirs -
|
|
|
|
# prevent it doing bad things.
|
2019-09-02 21:47:32 +02:00
|
|
|
if (
|
|
|
|
req.source_dir and
|
|
|
|
not has_delete_marker_file(req.source_dir)
|
|
|
|
):
|
2015-04-01 03:39:45 +02:00
|
|
|
raise AssertionError(
|
|
|
|
"bad source dir - missing marker")
|
|
|
|
# Delete the source we built the wheel from
|
|
|
|
req.remove_temporary_source()
|
|
|
|
# set the build directory again - name is known from
|
|
|
|
# the work prepare_files did.
|
2019-09-15 18:39:34 +02:00
|
|
|
req.source_dir = req.ensure_build_location(
|
2017-07-05 20:41:45 +02:00
|
|
|
self.preparer.build_dir
|
2017-06-27 18:37:38 +02:00
|
|
|
)
|
2015-04-01 03:39:45 +02:00
|
|
|
# Update the link for this.
|
2018-06-18 12:59:56 +02:00
|
|
|
req.link = Link(path_to_url(wheel_file))
|
2015-04-01 03:39:45 +02:00
|
|
|
assert req.link.is_wheel
|
|
|
|
# extract the wheel into the dir
|
2019-09-20 02:19:24 +02:00
|
|
|
unpack_file(req.link.file_path, req.source_dir)
|
2014-08-31 01:52:28 +02:00
|
|
|
else:
|
|
|
|
build_failure.append(req)
|
2012-10-17 00:57:10 +02:00
|
|
|
|
2014-04-30 07:33:04 +02:00
|
|
|
# notify success/failure
|
2012-10-17 00:57:10 +02:00
|
|
|
if build_success:
|
2014-08-31 01:52:28 +02:00
|
|
|
logger.info(
|
|
|
|
'Successfully built %s',
|
|
|
|
' '.join([req.name for req in build_success]),
|
2014-01-27 15:07:10 +01:00
|
|
|
)
|
2012-10-17 00:57:10 +02:00
|
|
|
if build_failure:
|
2014-08-31 01:52:28 +02:00
|
|
|
logger.info(
|
|
|
|
'Failed to build %s',
|
|
|
|
' '.join([req.name for req in build_failure]),
|
2014-01-27 15:07:10 +01:00
|
|
|
)
|
2018-08-28 13:22:44 +02:00
|
|
|
# Return a list of requirements that failed to build
|
2018-08-27 17:38:56 +02:00
|
|
|
return build_failure
|