2014-08-31 01:52:28 +02:00
|
|
|
from __future__ import absolute_import
|
|
|
|
|
|
|
|
import logging
|
2014-01-12 07:50:07 +01:00
|
|
|
import os
|
|
|
|
import shutil
|
|
|
|
import sys
|
2017-03-18 18:32:10 +01:00
|
|
|
import sysconfig
|
2014-01-12 07:50:07 +01:00
|
|
|
import zipfile
|
|
|
|
from distutils.util import change_root
|
|
|
|
|
2018-08-05 15:23:41 +02:00
|
|
|
from pip._vendor import pkg_resources, six
|
2018-08-21 17:11:36 +02:00
|
|
|
from pip._vendor.packaging.requirements import Requirement
|
2016-02-09 23:58:08 +01:00
|
|
|
from pip._vendor.packaging.utils import canonicalize_name
|
2017-05-16 12:16:30 +02:00
|
|
|
from pip._vendor.packaging.version import Version
|
2018-06-25 13:53:15 +02:00
|
|
|
from pip._vendor.packaging.version import parse as parse_version
|
2018-08-01 12:34:11 +02:00
|
|
|
from pip._vendor.pep517.wrappers import Pep517HookCaller
|
2014-09-12 00:40:45 +02:00
|
|
|
|
2017-08-31 17:48:18 +02:00
|
|
|
from pip._internal import wheel
|
2018-04-19 09:45:30 +02:00
|
|
|
from pip._internal.build_env import NoOpBuildEnvironment
|
2018-05-30 09:19:05 +02:00
|
|
|
from pip._internal.exceptions import InstallationError
|
2017-08-31 17:48:18 +02:00
|
|
|
from pip._internal.locations import (
|
2017-11-21 08:50:32 +01:00
|
|
|
PIP_DELETE_MARKER_FILENAME, running_under_virtualenv,
|
2017-08-31 17:48:18 +02:00
|
|
|
)
|
2018-07-24 04:39:17 +02:00
|
|
|
from pip._internal.models.link import Link
|
2019-01-26 15:15:23 +01:00
|
|
|
from pip._internal.pyproject import load_pyproject_toml, make_pyproject_path
|
2017-08-31 17:48:18 +02:00
|
|
|
from pip._internal.req.req_uninstall import UninstallPathSet
|
2018-07-29 13:11:37 +02:00
|
|
|
from pip._internal.utils.compat import native_str
|
2017-08-31 17:48:18 +02:00
|
|
|
from pip._internal.utils.hashes import Hashes
|
|
|
|
from pip._internal.utils.logging import indent_log
|
|
|
|
from pip._internal.utils.misc import (
|
2017-05-16 12:16:30 +02:00
|
|
|
_make_build_dir, ask_path_exists, backup_dir, call_subprocess,
|
|
|
|
display_path, dist_in_site_packages, dist_in_usersite, ensure_dir,
|
2018-10-19 11:06:10 +02:00
|
|
|
get_installed_version, redact_password_from_url, rmtree,
|
2014-01-27 15:07:10 +01:00
|
|
|
)
|
2018-07-31 21:59:34 +02:00
|
|
|
from pip._internal.utils.packaging import get_metadata
|
2017-08-31 17:48:18 +02:00
|
|
|
from pip._internal.utils.setuptools_build import SETUPTOOLS_SHIM
|
|
|
|
from pip._internal.utils.temp_dir import TempDirectory
|
2018-12-17 12:13:00 +01:00
|
|
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
2017-08-31 17:48:18 +02:00
|
|
|
from pip._internal.utils.ui import open_spinner
|
|
|
|
from pip._internal.vcs import vcs
|
2018-08-21 17:07:40 +02:00
|
|
|
from pip._internal.wheel import move_wheel_files
|
2014-01-12 01:50:11 +01:00
|
|
|
|
2018-12-17 12:13:00 +01:00
|
|
|
if MYPY_CHECK_RUNNING:
|
2019-02-22 12:17:07 +01:00
|
|
|
from typing import (
|
2019-02-25 12:24:55 +01:00
|
|
|
Any, Dict, Iterable, List, Mapping, Optional, Sequence, Union,
|
2018-12-17 12:13:00 +01:00
|
|
|
)
|
2019-02-22 12:17:07 +01:00
|
|
|
from pip._internal.build_env import BuildEnvironment
|
|
|
|
from pip._internal.cache import WheelCache
|
|
|
|
from pip._internal.index import PackageFinder
|
|
|
|
from pip._vendor.pkg_resources import Distribution
|
|
|
|
from pip._vendor.packaging.specifiers import SpecifierSet
|
|
|
|
from pip._vendor.packaging.markers import Marker
|
2018-12-17 12:13:00 +01:00
|
|
|
|
|
|
|
|
2014-08-31 01:52:28 +02:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2015-05-14 18:24:49 +02:00
|
|
|
|
2014-01-12 01:50:11 +01:00
|
|
|
class InstallRequirement(object):
|
2017-12-16 19:13:46 +01:00
|
|
|
"""
|
|
|
|
Represents something that may be installed later on, may have information
|
2019-03-12 20:25:09 +01:00
|
|
|
about where to fetch the relevant requirement and also contains logic for
|
2017-12-16 19:13:46 +01:00
|
|
|
installing the said requirement.
|
|
|
|
"""
|
2014-01-12 01:50:11 +01:00
|
|
|
|
2018-12-17 12:13:00 +01:00
|
|
|
def __init__(
|
|
|
|
self,
|
|
|
|
req, # type: Optional[Requirement]
|
|
|
|
comes_from, # type: Optional[Union[str, InstallRequirement]]
|
|
|
|
source_dir=None, # type: Optional[str]
|
|
|
|
editable=False, # type: bool
|
|
|
|
link=None, # type: Optional[Link]
|
|
|
|
update=True, # type: bool
|
|
|
|
markers=None, # type: Optional[Marker]
|
|
|
|
use_pep517=None, # type: Optional[bool]
|
|
|
|
isolated=False, # type: bool
|
2018-12-18 06:46:22 +01:00
|
|
|
options=None, # type: Optional[Dict[str, Any]]
|
2018-12-17 12:13:00 +01:00
|
|
|
wheel_cache=None, # type: Optional[WheelCache]
|
|
|
|
constraint=False, # type: bool
|
|
|
|
extras=() # type: Iterable[str]
|
|
|
|
):
|
|
|
|
# type: (...) -> None
|
2016-12-04 17:47:04 +01:00
|
|
|
assert req is None or isinstance(req, Requirement), req
|
2014-01-12 01:50:11 +01:00
|
|
|
self.req = req
|
|
|
|
self.comes_from = comes_from
|
2015-06-02 05:39:10 +02:00
|
|
|
self.constraint = constraint
|
2016-12-28 15:56:58 +01:00
|
|
|
if source_dir is not None:
|
|
|
|
self.source_dir = os.path.normpath(os.path.abspath(source_dir))
|
|
|
|
else:
|
|
|
|
self.source_dir = None
|
2014-01-12 01:50:11 +01:00
|
|
|
self.editable = editable
|
|
|
|
|
2015-04-20 06:43:02 +02:00
|
|
|
self._wheel_cache = wheel_cache
|
2018-12-17 12:13:00 +01:00
|
|
|
if link is None and req and req.url:
|
|
|
|
# PEP 508 URL requirement
|
|
|
|
link = Link(req.url)
|
|
|
|
self.link = self.original_link = link
|
2017-02-05 12:22:36 +01:00
|
|
|
|
2016-12-04 17:47:04 +01:00
|
|
|
if extras:
|
|
|
|
self.extras = extras
|
|
|
|
elif req:
|
2017-12-15 06:58:30 +01:00
|
|
|
self.extras = {
|
2016-12-04 17:47:04 +01:00
|
|
|
pkg_resources.safe_extra(extra) for extra in req.extras
|
2017-12-15 06:58:30 +01:00
|
|
|
}
|
2016-12-04 17:47:04 +01:00
|
|
|
else:
|
|
|
|
self.extras = set()
|
2018-12-17 12:13:00 +01:00
|
|
|
if markers is None and req:
|
|
|
|
markers = req.marker
|
|
|
|
self.markers = markers
|
|
|
|
|
|
|
|
self._egg_info_path = None # type: Optional[str]
|
2014-01-12 01:50:11 +01:00
|
|
|
# This holds the pkg_resources.Distribution object if this requirement
|
|
|
|
# is already available:
|
|
|
|
self.satisfied_by = None
|
|
|
|
# This hold the pkg_resources.Distribution object if this requirement
|
|
|
|
# conflicts with another installed distribution:
|
|
|
|
self.conflicts_with = None
|
2014-12-21 14:17:37 +01:00
|
|
|
# Temporary build location
|
2017-06-01 14:54:29 +02:00
|
|
|
self._temp_build_dir = TempDirectory(kind="req-build")
|
2014-12-21 14:17:37 +01:00
|
|
|
# Used to store the global directory where the _temp_build_dir should
|
|
|
|
# have been created. Cf _correct_build_location method.
|
2018-12-17 12:13:00 +01:00
|
|
|
self._ideal_build_dir = None # type: Optional[str]
|
2014-01-12 01:50:11 +01:00
|
|
|
# True if the editable should be updated:
|
|
|
|
self.update = update
|
|
|
|
# Set to True after successful installation
|
2018-12-17 12:13:00 +01:00
|
|
|
self.install_succeeded = None # type: Optional[bool]
|
2014-01-12 01:50:11 +01:00
|
|
|
# UninstallPathSet of uninstalled distribution (for possible rollback)
|
2016-11-11 00:41:48 +01:00
|
|
|
self.uninstalled_pathset = None
|
2015-03-19 02:34:56 +01:00
|
|
|
self.options = options if options else {}
|
2015-06-18 08:58:20 +02:00
|
|
|
# Set to True after successful preparation of this requirement
|
|
|
|
self.prepared = False
|
2018-01-20 14:04:42 +01:00
|
|
|
self.is_direct = False
|
2014-01-12 01:50:11 +01:00
|
|
|
|
2014-12-11 15:44:53 +01:00
|
|
|
self.isolated = isolated
|
2018-12-17 19:26:00 +01:00
|
|
|
self.build_env = NoOpBuildEnvironment() # type: BuildEnvironment
|
2014-12-11 15:44:53 +01:00
|
|
|
|
2018-08-20 14:37:36 +02:00
|
|
|
# For PEP 517, the directory where we request the project metadata
|
|
|
|
# gets stored. We need this to pass to build_wheel, so the backend
|
|
|
|
# can ensure that the wheel matches the metadata (see the PEP for
|
|
|
|
# details).
|
2018-12-17 12:13:00 +01:00
|
|
|
self.metadata_directory = None # type: Optional[str]
|
2018-08-20 14:37:36 +02:00
|
|
|
|
2018-08-02 20:48:56 +02:00
|
|
|
# The static build requirements (from pyproject.toml)
|
2018-12-17 12:13:00 +01:00
|
|
|
self.pyproject_requires = None # type: Optional[List[str]]
|
2018-08-02 20:48:56 +02:00
|
|
|
|
|
|
|
# Build requirements that we will check are available
|
2018-12-17 12:13:00 +01:00
|
|
|
self.requirements_to_check = [] # type: List[str]
|
2018-08-02 20:48:56 +02:00
|
|
|
|
|
|
|
# The PEP 517 backend we should use to build the project
|
2018-12-17 12:13:00 +01:00
|
|
|
self.pep517_backend = None # type: Optional[Pep517HookCaller]
|
2018-07-25 23:34:13 +02:00
|
|
|
|
2018-07-26 15:41:50 +02:00
|
|
|
# Are we using PEP 517 for this requirement?
|
2018-07-31 20:23:59 +02:00
|
|
|
# After pyproject.toml has been loaded, the only valid values are True
|
|
|
|
# and False. Before loading, None is valid (meaning "use the default").
|
|
|
|
# Setting an explicit value before loading pyproject.toml is supported,
|
|
|
|
# but after loading this flag should be treated as read only.
|
2018-10-09 19:16:08 +02:00
|
|
|
self.use_pep517 = use_pep517
|
2018-07-26 15:41:50 +02:00
|
|
|
|
2014-01-12 01:50:11 +01:00
|
|
|
def __str__(self):
|
2018-12-19 17:39:35 +01:00
|
|
|
# type: () -> str
|
2014-01-12 01:50:11 +01:00
|
|
|
if self.req:
|
|
|
|
s = str(self.req)
|
2014-12-28 16:50:16 +01:00
|
|
|
if self.link:
|
2018-10-19 11:06:10 +02:00
|
|
|
s += ' from %s' % redact_password_from_url(self.link.url)
|
2018-08-04 09:50:35 +02:00
|
|
|
elif self.link:
|
2018-10-19 11:06:10 +02:00
|
|
|
s = redact_password_from_url(self.link.url)
|
2014-01-12 01:50:11 +01:00
|
|
|
else:
|
2018-08-04 09:50:35 +02:00
|
|
|
s = '<InstallRequirement>'
|
2014-01-12 01:50:11 +01:00
|
|
|
if self.satisfied_by is not None:
|
|
|
|
s += ' in %s' % display_path(self.satisfied_by.location)
|
|
|
|
if self.comes_from:
|
2014-06-06 20:21:51 +02:00
|
|
|
if isinstance(self.comes_from, six.string_types):
|
2014-01-12 01:50:11 +01:00
|
|
|
comes_from = self.comes_from
|
|
|
|
else:
|
|
|
|
comes_from = self.comes_from.from_path()
|
|
|
|
if comes_from:
|
|
|
|
s += ' (from %s)' % comes_from
|
|
|
|
return s
|
|
|
|
|
2015-03-06 16:58:34 +01:00
|
|
|
def __repr__(self):
|
2018-12-19 17:39:35 +01:00
|
|
|
# type: () -> str
|
2015-03-13 23:06:58 +01:00
|
|
|
return '<%s object: %s editable=%r>' % (
|
|
|
|
self.__class__.__name__, str(self), self.editable)
|
2015-03-06 16:58:34 +01:00
|
|
|
|
Fix false hash mismatches when installing a package that has a cached wheel.
This would occur when, for example, installing from a requirements file that references a certain hashed sdist, a common situation.
As of pip 7, pip always tries to build a wheel for each requirement (if one wasn't provided directly) and installs from that. The way this was implemented, InstallRequirement.link pointed to the cached wheel, which obviously had a different hash than the index-sourced archive, so spurious mismatch errors would result.
Now we no longer read from the wheel cache in hash-checking mode.
Make populate_link(), rather than the `link` setter, responsible for mapping InstallRequirement.link to a cached wheel. populate_link() isn't called until until prepare_files(). At that point, when we've examined all InstallRequirements and their potential --hash options, we know whether we should be requiring hashes and thus whether to use the wheel cache at all.
The only place that sets InstallRequirement.link other than InstallRequirement itself is pip.wheel, which does so long after hashes have been checked, when it's unpacking the wheel it just built, so it won't cause spurious hash mismatches.
2015-10-16 21:58:59 +02:00
|
|
|
def populate_link(self, finder, upgrade, require_hashes):
|
2018-12-17 12:13:00 +01:00
|
|
|
# type: (PackageFinder, bool, bool) -> None
|
2015-03-20 00:08:42 +01:00
|
|
|
"""Ensure that if a link can be found for this, that it is found.
|
|
|
|
|
|
|
|
Note that self.link may still be None - if Upgrade is False and the
|
|
|
|
requirement is already installed.
|
Fix false hash mismatches when installing a package that has a cached wheel.
This would occur when, for example, installing from a requirements file that references a certain hashed sdist, a common situation.
As of pip 7, pip always tries to build a wheel for each requirement (if one wasn't provided directly) and installs from that. The way this was implemented, InstallRequirement.link pointed to the cached wheel, which obviously had a different hash than the index-sourced archive, so spurious mismatch errors would result.
Now we no longer read from the wheel cache in hash-checking mode.
Make populate_link(), rather than the `link` setter, responsible for mapping InstallRequirement.link to a cached wheel. populate_link() isn't called until until prepare_files(). At that point, when we've examined all InstallRequirements and their potential --hash options, we know whether we should be requiring hashes and thus whether to use the wheel cache at all.
The only place that sets InstallRequirement.link other than InstallRequirement itself is pip.wheel, which does so long after hashes have been checked, when it's unpacking the wheel it just built, so it won't cause spurious hash mismatches.
2015-10-16 21:58:59 +02:00
|
|
|
|
|
|
|
If require_hashes is True, don't use the wheel cache, because cached
|
|
|
|
wheels, always built locally, have different hashes than the files
|
|
|
|
downloaded from the index server and thus throw false hash mismatches.
|
|
|
|
Furthermore, cached wheels at present have undeterministic contents due
|
|
|
|
to file modification times.
|
2015-03-20 00:08:42 +01:00
|
|
|
"""
|
|
|
|
if self.link is None:
|
|
|
|
self.link = finder.find_requirement(self, upgrade)
|
Fix false hash mismatches when installing a package that has a cached wheel.
This would occur when, for example, installing from a requirements file that references a certain hashed sdist, a common situation.
As of pip 7, pip always tries to build a wheel for each requirement (if one wasn't provided directly) and installs from that. The way this was implemented, InstallRequirement.link pointed to the cached wheel, which obviously had a different hash than the index-sourced archive, so spurious mismatch errors would result.
Now we no longer read from the wheel cache in hash-checking mode.
Make populate_link(), rather than the `link` setter, responsible for mapping InstallRequirement.link to a cached wheel. populate_link() isn't called until until prepare_files(). At that point, when we've examined all InstallRequirements and their potential --hash options, we know whether we should be requiring hashes and thus whether to use the wheel cache at all.
The only place that sets InstallRequirement.link other than InstallRequirement itself is pip.wheel, which does so long after hashes have been checked, when it's unpacking the wheel it just built, so it won't cause spurious hash mismatches.
2015-10-16 21:58:59 +02:00
|
|
|
if self._wheel_cache is not None and not require_hashes:
|
|
|
|
old_link = self.link
|
2017-07-20 20:59:44 +02:00
|
|
|
self.link = self._wheel_cache.get(self.link, self.name)
|
Fix false hash mismatches when installing a package that has a cached wheel.
This would occur when, for example, installing from a requirements file that references a certain hashed sdist, a common situation.
As of pip 7, pip always tries to build a wheel for each requirement (if one wasn't provided directly) and installs from that. The way this was implemented, InstallRequirement.link pointed to the cached wheel, which obviously had a different hash than the index-sourced archive, so spurious mismatch errors would result.
Now we no longer read from the wheel cache in hash-checking mode.
Make populate_link(), rather than the `link` setter, responsible for mapping InstallRequirement.link to a cached wheel. populate_link() isn't called until until prepare_files(). At that point, when we've examined all InstallRequirements and their potential --hash options, we know whether we should be requiring hashes and thus whether to use the wheel cache at all.
The only place that sets InstallRequirement.link other than InstallRequirement itself is pip.wheel, which does so long after hashes have been checked, when it's unpacking the wheel it just built, so it won't cause spurious hash mismatches.
2015-10-16 21:58:59 +02:00
|
|
|
if old_link != self.link:
|
|
|
|
logger.debug('Using cached wheel link: %s', self.link)
|
2015-03-30 23:44:02 +02:00
|
|
|
|
2018-05-29 22:37:19 +02:00
|
|
|
# Things that are valid for all kinds of requirements?
|
|
|
|
@property
|
|
|
|
def name(self):
|
2018-12-17 12:13:00 +01:00
|
|
|
# type: () -> Optional[str]
|
2018-05-29 22:37:19 +02:00
|
|
|
if self.req is None:
|
|
|
|
return None
|
|
|
|
return native_str(pkg_resources.safe_name(self.req.name))
|
|
|
|
|
2014-07-04 00:56:26 +02:00
|
|
|
@property
|
|
|
|
def specifier(self):
|
2018-12-17 12:13:00 +01:00
|
|
|
# type: () -> SpecifierSet
|
2014-07-04 00:56:26 +02:00
|
|
|
return self.req.specifier
|
|
|
|
|
Add checks against requirements-file-dwelling hashes for most kinds of packages. Close #1175.
* Add --require-hashes option. This is handy in deployment scripts to force application authors to hash their requirements. It is also a convenient way to get pip to show computed hashes for a virgin, unhashed requirements file. Eventually, additions to `pip freeze` should fill a superset of this use case.
* In --require-hashes mode, at least one hash is required to match for each requirement.
* Option-based requirements (--sha256=...) turn on --require-hashes mode implicitly.
* Internet-derived URL-based hashes are "necessary but not sufficient": they do not satisfy --require-hashes mode when they match, but they are still used to guard against transmission errors.
* Other URL-based requirements (#md5=...) are treated just like flag-based ones, except they don't turn on --require-hashes.
* Complain informatively, with the most devastating errors first so you don't chase your tail all day only to run up against a brick wall at the end. This also means we don't complain that a hash is missing, only for the user to find, after fixing it, that we have no idea how to even compute a hash for that type of requirement.
* Complain about unpinned requirements when hash-checking mode is on, lest they cause the user surprise later.
* Complain about missing hashes.
* Complain about requirement types we don't know how to hash (like VCS ones and local dirs).
* Have InstallRequirement keep its original Link around (original_link) so we can differentiate between URL hashes from requirements files and ones downloaded from the (untrustworthy) internet.
* Remove test_download_hashes, which is obsolete. Similar coverage is provided in test_utils.TestHashes and the various hash cases in test_req.py.
2015-09-09 19:01:53 +02:00
|
|
|
@property
|
|
|
|
def is_pinned(self):
|
2018-12-17 12:13:00 +01:00
|
|
|
# type: () -> bool
|
Add checks against requirements-file-dwelling hashes for most kinds of packages. Close #1175.
* Add --require-hashes option. This is handy in deployment scripts to force application authors to hash their requirements. It is also a convenient way to get pip to show computed hashes for a virgin, unhashed requirements file. Eventually, additions to `pip freeze` should fill a superset of this use case.
* In --require-hashes mode, at least one hash is required to match for each requirement.
* Option-based requirements (--sha256=...) turn on --require-hashes mode implicitly.
* Internet-derived URL-based hashes are "necessary but not sufficient": they do not satisfy --require-hashes mode when they match, but they are still used to guard against transmission errors.
* Other URL-based requirements (#md5=...) are treated just like flag-based ones, except they don't turn on --require-hashes.
* Complain informatively, with the most devastating errors first so you don't chase your tail all day only to run up against a brick wall at the end. This also means we don't complain that a hash is missing, only for the user to find, after fixing it, that we have no idea how to even compute a hash for that type of requirement.
* Complain about unpinned requirements when hash-checking mode is on, lest they cause the user surprise later.
* Complain about missing hashes.
* Complain about requirement types we don't know how to hash (like VCS ones and local dirs).
* Have InstallRequirement keep its original Link around (original_link) so we can differentiate between URL hashes from requirements files and ones downloaded from the (untrustworthy) internet.
* Remove test_download_hashes, which is obsolete. Similar coverage is provided in test_utils.TestHashes and the various hash cases in test_req.py.
2015-09-09 19:01:53 +02:00
|
|
|
"""Return whether I am pinned to an exact version.
|
|
|
|
|
|
|
|
For example, some-package==1.2 is pinned; some-package>1.2 is not.
|
|
|
|
"""
|
|
|
|
specifiers = self.specifier
|
2015-10-11 16:13:13 +02:00
|
|
|
return (len(specifiers) == 1 and
|
2017-03-18 19:26:30 +01:00
|
|
|
next(iter(specifiers)).operator in {'==', '==='})
|
Add checks against requirements-file-dwelling hashes for most kinds of packages. Close #1175.
* Add --require-hashes option. This is handy in deployment scripts to force application authors to hash their requirements. It is also a convenient way to get pip to show computed hashes for a virgin, unhashed requirements file. Eventually, additions to `pip freeze` should fill a superset of this use case.
* In --require-hashes mode, at least one hash is required to match for each requirement.
* Option-based requirements (--sha256=...) turn on --require-hashes mode implicitly.
* Internet-derived URL-based hashes are "necessary but not sufficient": they do not satisfy --require-hashes mode when they match, but they are still used to guard against transmission errors.
* Other URL-based requirements (#md5=...) are treated just like flag-based ones, except they don't turn on --require-hashes.
* Complain informatively, with the most devastating errors first so you don't chase your tail all day only to run up against a brick wall at the end. This also means we don't complain that a hash is missing, only for the user to find, after fixing it, that we have no idea how to even compute a hash for that type of requirement.
* Complain about unpinned requirements when hash-checking mode is on, lest they cause the user surprise later.
* Complain about missing hashes.
* Complain about requirement types we don't know how to hash (like VCS ones and local dirs).
* Have InstallRequirement keep its original Link around (original_link) so we can differentiate between URL hashes from requirements files and ones downloaded from the (untrustworthy) internet.
* Remove test_download_hashes, which is obsolete. Similar coverage is provided in test_utils.TestHashes and the various hash cases in test_req.py.
2015-09-09 19:01:53 +02:00
|
|
|
|
2018-05-29 22:37:19 +02:00
|
|
|
@property
|
|
|
|
def installed_version(self):
|
2018-12-19 17:39:35 +01:00
|
|
|
# type: () -> Optional[str]
|
2018-05-29 22:37:19 +02:00
|
|
|
return get_installed_version(self.name)
|
|
|
|
|
|
|
|
def match_markers(self, extras_requested=None):
|
2018-12-17 12:13:00 +01:00
|
|
|
# type: (Optional[Iterable[str]]) -> bool
|
2018-05-29 22:37:19 +02:00
|
|
|
if not extras_requested:
|
|
|
|
# Provide an extra to safely evaluate the markers
|
|
|
|
# without matching any extra
|
|
|
|
extras_requested = ('',)
|
|
|
|
if self.markers is not None:
|
|
|
|
return any(
|
|
|
|
self.markers.evaluate({'extra': extra})
|
|
|
|
for extra in extras_requested)
|
|
|
|
else:
|
|
|
|
return True
|
|
|
|
|
|
|
|
@property
|
|
|
|
def has_hash_options(self):
|
2018-12-17 12:13:00 +01:00
|
|
|
# type: () -> bool
|
2018-05-29 22:37:19 +02:00
|
|
|
"""Return whether any known-good hashes are specified as options.
|
|
|
|
|
|
|
|
These activate --require-hashes mode; hashes specified as part of a
|
|
|
|
URL do not.
|
|
|
|
|
|
|
|
"""
|
|
|
|
return bool(self.options.get('hashes', {}))
|
|
|
|
|
|
|
|
def hashes(self, trust_internet=True):
|
2018-12-17 12:13:00 +01:00
|
|
|
# type: (bool) -> Hashes
|
2018-05-29 22:37:19 +02:00
|
|
|
"""Return a hash-comparer that considers my option- and URL-based
|
|
|
|
hashes to be known-good.
|
|
|
|
|
|
|
|
Hashes in URLs--ones embedded in the requirements file, not ones
|
|
|
|
downloaded from an index server--are almost peers with ones from
|
|
|
|
flags. They satisfy --require-hashes (whether it was implicitly or
|
|
|
|
explicitly activated) but do not activate it. md5 and sha224 are not
|
|
|
|
allowed in flags, which should nudge people toward good algos. We
|
|
|
|
always OR all hashes together, even ones from URLs.
|
|
|
|
|
|
|
|
:param trust_internet: Whether to trust URL-based (#md5=...) hashes
|
|
|
|
downloaded from the internet, as by populate_link()
|
|
|
|
|
|
|
|
"""
|
|
|
|
good_hashes = self.options.get('hashes', {}).copy()
|
|
|
|
link = self.link if trust_internet else self.original_link
|
|
|
|
if link and link.hash:
|
|
|
|
good_hashes.setdefault(link.hash_name, []).append(link.hash)
|
|
|
|
return Hashes(good_hashes)
|
|
|
|
|
2014-01-12 01:50:11 +01:00
|
|
|
def from_path(self):
|
2018-12-17 12:13:00 +01:00
|
|
|
# type: () -> Optional[str]
|
2018-05-29 22:37:19 +02:00
|
|
|
"""Format a nice indicator to show where this "comes from"
|
|
|
|
"""
|
2014-01-12 01:50:11 +01:00
|
|
|
if self.req is None:
|
|
|
|
return None
|
|
|
|
s = str(self.req)
|
|
|
|
if self.comes_from:
|
2014-06-06 20:21:51 +02:00
|
|
|
if isinstance(self.comes_from, six.string_types):
|
2014-01-12 01:50:11 +01:00
|
|
|
comes_from = self.comes_from
|
|
|
|
else:
|
|
|
|
comes_from = self.comes_from.from_path()
|
|
|
|
if comes_from:
|
|
|
|
s += '->' + comes_from
|
|
|
|
return s
|
|
|
|
|
2014-12-17 22:39:14 +01:00
|
|
|
def build_location(self, build_dir):
|
2018-12-17 12:13:00 +01:00
|
|
|
# type: (str) -> Optional[str]
|
2017-06-01 14:54:29 +02:00
|
|
|
assert build_dir is not None
|
|
|
|
if self._temp_build_dir.path is not None:
|
|
|
|
return self._temp_build_dir.path
|
2014-01-12 01:50:11 +01:00
|
|
|
if self.req is None:
|
2014-12-21 14:17:37 +01:00
|
|
|
# for requirement via a path to a directory: the name of the
|
|
|
|
# package is not available yet so we create a temp directory
|
|
|
|
# Once run_egg_info will have run, we'll be able
|
|
|
|
# to fix it via _correct_build_location
|
2016-05-20 03:02:26 +02:00
|
|
|
# Some systems have /tmp as a symlink which confuses custom
|
|
|
|
# builds (such as numpy). Thus, we ensure that the real path
|
|
|
|
# is returned.
|
2017-06-01 14:54:29 +02:00
|
|
|
self._temp_build_dir.create()
|
2014-01-12 01:50:11 +01:00
|
|
|
self._ideal_build_dir = build_dir
|
2017-06-01 14:54:29 +02:00
|
|
|
|
|
|
|
return self._temp_build_dir.path
|
2014-01-12 01:50:11 +01:00
|
|
|
if self.editable:
|
|
|
|
name = self.name.lower()
|
|
|
|
else:
|
|
|
|
name = self.name
|
2014-01-27 15:07:10 +01:00
|
|
|
# FIXME: Is there a better place to create the build_dir? (hg and bzr
|
|
|
|
# need this)
|
2014-01-12 01:50:11 +01:00
|
|
|
if not os.path.exists(build_dir):
|
2014-12-21 14:17:37 +01:00
|
|
|
logger.debug('Creating directory %s', build_dir)
|
2014-01-12 01:50:11 +01:00
|
|
|
_make_build_dir(build_dir)
|
|
|
|
return os.path.join(build_dir, name)
|
|
|
|
|
2014-12-21 14:17:37 +01:00
|
|
|
def _correct_build_location(self):
|
2018-12-17 12:13:00 +01:00
|
|
|
# type: () -> None
|
2014-12-21 14:17:37 +01:00
|
|
|
"""Move self._temp_build_dir to self._ideal_build_dir/self.req.name
|
|
|
|
|
|
|
|
For some requirements (e.g. a path to a directory), the name of the
|
|
|
|
package is not available until we run egg_info, so the build_location
|
|
|
|
will return a temporary directory and store the _ideal_build_dir.
|
|
|
|
|
2018-07-31 21:59:34 +02:00
|
|
|
This is only called by self.run_egg_info to fix the temporary build
|
2014-12-21 14:17:37 +01:00
|
|
|
directory.
|
|
|
|
"""
|
2014-01-12 01:50:11 +01:00
|
|
|
if self.source_dir is not None:
|
|
|
|
return
|
|
|
|
assert self.req is not None
|
2017-06-01 14:54:29 +02:00
|
|
|
assert self._temp_build_dir.path
|
2018-12-17 12:13:00 +01:00
|
|
|
assert (self._ideal_build_dir is not None and
|
|
|
|
self._ideal_build_dir.path) # type: ignore
|
2017-06-01 14:54:29 +02:00
|
|
|
old_location = self._temp_build_dir.path
|
|
|
|
self._temp_build_dir.path = None
|
|
|
|
|
2014-12-21 14:17:37 +01:00
|
|
|
new_location = self.build_location(self._ideal_build_dir)
|
2014-01-12 01:50:11 +01:00
|
|
|
if os.path.exists(new_location):
|
|
|
|
raise InstallationError(
|
|
|
|
'A package already exists in %s; please remove it to continue'
|
|
|
|
% display_path(new_location))
|
2014-01-27 15:07:10 +01:00
|
|
|
logger.debug(
|
2014-08-31 01:52:28 +02:00
|
|
|
'Moving package %s from %s to new location %s',
|
|
|
|
self, display_path(old_location), display_path(new_location),
|
2014-01-27 15:07:10 +01:00
|
|
|
)
|
2014-01-12 01:50:11 +01:00
|
|
|
shutil.move(old_location, new_location)
|
2017-06-01 14:54:29 +02:00
|
|
|
self._temp_build_dir.path = new_location
|
2014-12-21 14:17:37 +01:00
|
|
|
self._ideal_build_dir = None
|
2016-12-28 15:56:58 +01:00
|
|
|
self.source_dir = os.path.normpath(os.path.abspath(new_location))
|
2014-01-12 01:50:11 +01:00
|
|
|
self._egg_info_path = None
|
|
|
|
|
2018-08-20 14:37:36 +02:00
|
|
|
# Correct the metadata directory, if it exists
|
|
|
|
if self.metadata_directory:
|
|
|
|
old_meta = self.metadata_directory
|
|
|
|
rel = os.path.relpath(old_meta, start=old_location)
|
|
|
|
new_meta = os.path.join(new_location, rel)
|
|
|
|
new_meta = os.path.normpath(os.path.abspath(new_meta))
|
|
|
|
self.metadata_directory = new_meta
|
|
|
|
|
2018-05-29 22:37:19 +02:00
|
|
|
def remove_temporary_source(self):
|
2018-12-17 12:13:00 +01:00
|
|
|
# type: () -> None
|
2018-05-29 22:37:19 +02:00
|
|
|
"""Remove the source files from this requirement, if they are marked
|
|
|
|
for deletion"""
|
|
|
|
if self.source_dir and os.path.exists(
|
|
|
|
os.path.join(self.source_dir, PIP_DELETE_MARKER_FILENAME)):
|
|
|
|
logger.debug('Removing source in %s', self.source_dir)
|
|
|
|
rmtree(self.source_dir)
|
|
|
|
self.source_dir = None
|
|
|
|
self._temp_build_dir.cleanup()
|
|
|
|
self.build_env.cleanup()
|
|
|
|
|
|
|
|
def check_if_exists(self, use_user_site):
|
2018-12-17 12:13:00 +01:00
|
|
|
# type: (bool) -> bool
|
2018-05-29 22:37:19 +02:00
|
|
|
"""Find an installed distribution that satisfies or conflicts
|
|
|
|
with this requirement, and set self.satisfied_by or
|
|
|
|
self.conflicts_with appropriately.
|
|
|
|
"""
|
2014-01-12 01:50:11 +01:00
|
|
|
if self.req is None:
|
2018-05-29 22:37:19 +02:00
|
|
|
return False
|
|
|
|
try:
|
|
|
|
# get_distribution() will resolve the entire list of requirements
|
|
|
|
# anyway, and we've already determined that we need the requirement
|
|
|
|
# in question, so strip the marker so that we don't try to
|
|
|
|
# evaluate it.
|
|
|
|
no_marker = Requirement(str(self.req))
|
|
|
|
no_marker.marker = None
|
|
|
|
self.satisfied_by = pkg_resources.get_distribution(str(no_marker))
|
|
|
|
if self.editable and self.satisfied_by:
|
|
|
|
self.conflicts_with = self.satisfied_by
|
|
|
|
# when installing editables, nothing pre-existing should ever
|
|
|
|
# satisfy
|
|
|
|
self.satisfied_by = None
|
|
|
|
return True
|
|
|
|
except pkg_resources.DistributionNotFound:
|
|
|
|
return False
|
|
|
|
except pkg_resources.VersionConflict:
|
|
|
|
existing_dist = pkg_resources.get_distribution(
|
|
|
|
self.req.name
|
|
|
|
)
|
|
|
|
if use_user_site:
|
|
|
|
if dist_in_usersite(existing_dist):
|
|
|
|
self.conflicts_with = existing_dist
|
|
|
|
elif (running_under_virtualenv() and
|
|
|
|
dist_in_site_packages(existing_dist)):
|
|
|
|
raise InstallationError(
|
|
|
|
"Will not install to the user site because it will "
|
|
|
|
"lack sys.path precedence to %s in %s" %
|
|
|
|
(existing_dist.project_name, existing_dist.location)
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
self.conflicts_with = existing_dist
|
|
|
|
return True
|
2018-05-29 22:41:42 +02:00
|
|
|
|
2018-05-29 22:37:19 +02:00
|
|
|
# Things valid for wheels
|
|
|
|
@property
|
|
|
|
def is_wheel(self):
|
2018-12-17 12:13:00 +01:00
|
|
|
# type: () -> bool
|
|
|
|
if not self.link:
|
|
|
|
return False
|
|
|
|
return self.link.is_wheel
|
|
|
|
|
|
|
|
def move_wheel_files(
|
|
|
|
self,
|
|
|
|
wheeldir, # type: str
|
|
|
|
root=None, # type: Optional[str]
|
|
|
|
home=None, # type: Optional[str]
|
|
|
|
prefix=None, # type: Optional[str]
|
|
|
|
warn_script_location=True, # type: bool
|
|
|
|
use_user_site=False, # type: bool
|
|
|
|
pycompile=True # type: bool
|
|
|
|
):
|
|
|
|
# type: (...) -> None
|
2018-05-29 22:37:19 +02:00
|
|
|
move_wheel_files(
|
|
|
|
self.name, self.req, wheeldir,
|
|
|
|
user=use_user_site,
|
|
|
|
home=home,
|
|
|
|
root=root,
|
|
|
|
prefix=prefix,
|
|
|
|
pycompile=pycompile,
|
|
|
|
isolated=self.isolated,
|
|
|
|
warn_script_location=warn_script_location,
|
|
|
|
)
|
2014-01-12 01:50:11 +01:00
|
|
|
|
2018-05-29 22:37:19 +02:00
|
|
|
# Things valid for sdists
|
2016-02-08 14:04:10 +01:00
|
|
|
@property
|
|
|
|
def setup_py_dir(self):
|
2018-12-17 12:13:00 +01:00
|
|
|
# type: () -> str
|
2016-02-08 14:04:10 +01:00
|
|
|
return os.path.join(
|
|
|
|
self.source_dir,
|
|
|
|
self.link and self.link.subdirectory_fragment or '')
|
|
|
|
|
2014-01-12 01:50:11 +01:00
|
|
|
@property
|
|
|
|
def setup_py(self):
|
2018-12-17 12:13:00 +01:00
|
|
|
# type: () -> str
|
2015-03-25 01:53:10 +01:00
|
|
|
assert self.source_dir, "No source dir for %s" % self
|
2014-01-12 01:50:11 +01:00
|
|
|
|
2016-02-08 14:04:10 +01:00
|
|
|
setup_py = os.path.join(self.setup_py_dir, 'setup.py')
|
2014-01-12 01:50:11 +01:00
|
|
|
|
2014-02-21 02:33:59 +01:00
|
|
|
# Python2 __file__ should not be unicode
|
|
|
|
if six.PY2 and isinstance(setup_py, six.text_type):
|
|
|
|
setup_py = setup_py.encode(sys.getfilesystemencoding())
|
|
|
|
|
2014-01-12 01:50:11 +01:00
|
|
|
return setup_py
|
|
|
|
|
2016-11-25 15:14:03 +01:00
|
|
|
@property
|
|
|
|
def pyproject_toml(self):
|
2018-12-17 12:13:00 +01:00
|
|
|
# type: () -> str
|
2016-11-25 15:14:03 +01:00
|
|
|
assert self.source_dir, "No source dir for %s" % self
|
|
|
|
|
2019-01-26 15:15:23 +01:00
|
|
|
return make_pyproject_path(self.setup_py_dir)
|
2016-11-25 15:14:03 +01:00
|
|
|
|
2018-07-25 23:34:13 +02:00
|
|
|
def load_pyproject_toml(self):
|
2018-12-17 12:13:00 +01:00
|
|
|
# type: () -> None
|
2018-08-02 20:48:56 +02:00
|
|
|
"""Load the pyproject.toml file.
|
2018-01-23 17:02:00 +01:00
|
|
|
|
2018-08-02 20:48:56 +02:00
|
|
|
After calling this routine, all of the attributes related to PEP 517
|
|
|
|
processing for this requirement have been set. In particular, the
|
|
|
|
use_pep517 attribute can be used to determine whether we should
|
|
|
|
follow the PEP 517 or legacy (setup.py) code path.
|
2018-01-23 17:02:00 +01:00
|
|
|
"""
|
2019-03-29 14:59:42 +01:00
|
|
|
requires, pep517_data = load_pyproject_toml(
|
2018-08-05 15:23:41 +02:00
|
|
|
self.use_pep517,
|
2019-03-12 10:49:01 +01:00
|
|
|
self.editable,
|
2018-08-05 15:23:41 +02:00
|
|
|
self.pyproject_toml,
|
|
|
|
self.setup_py,
|
|
|
|
str(self)
|
|
|
|
)
|
2018-07-26 15:41:50 +02:00
|
|
|
|
2019-03-29 14:59:42 +01:00
|
|
|
use_pep517 = bool(pep517_data)
|
|
|
|
|
|
|
|
self.use_pep517 = use_pep517
|
|
|
|
self.pyproject_requires = requires
|
|
|
|
|
|
|
|
if use_pep517:
|
|
|
|
backend, check = pep517_data
|
2018-08-05 15:23:41 +02:00
|
|
|
self.requirements_to_check = check
|
|
|
|
self.pep517_backend = Pep517HookCaller(self.setup_py_dir, backend)
|
2018-01-23 17:02:00 +01:00
|
|
|
|
2018-10-19 21:57:47 +02:00
|
|
|
# Use a custom function to call subprocesses
|
|
|
|
self.spin_message = ""
|
|
|
|
|
2018-12-19 17:39:35 +01:00
|
|
|
def runner(
|
|
|
|
cmd, # type: List[str]
|
|
|
|
cwd=None, # type: Optional[str]
|
|
|
|
extra_environ=None # type: Optional[Mapping[str, Any]]
|
|
|
|
):
|
|
|
|
# type: (...) -> None
|
2018-10-19 21:57:47 +02:00
|
|
|
with open_spinner(self.spin_message) as spinner:
|
|
|
|
call_subprocess(
|
|
|
|
cmd,
|
|
|
|
cwd=cwd,
|
|
|
|
extra_environ=extra_environ,
|
|
|
|
spinner=spinner
|
|
|
|
)
|
|
|
|
self.spin_message = ""
|
|
|
|
|
|
|
|
self.pep517_backend._subprocess_runner = runner
|
|
|
|
|
2018-08-20 14:37:36 +02:00
|
|
|
def prepare_metadata(self):
|
2018-12-17 12:13:00 +01:00
|
|
|
# type: () -> None
|
2018-08-20 14:37:36 +02:00
|
|
|
"""Ensure that project metadata is available.
|
|
|
|
|
|
|
|
Under PEP 517, call the backend hook to prepare the metadata.
|
|
|
|
Under legacy processing, call setup.py egg-info.
|
|
|
|
"""
|
2014-01-12 01:50:11 +01:00
|
|
|
assert self.source_dir
|
|
|
|
|
2014-08-31 01:52:28 +02:00
|
|
|
with indent_log():
|
2018-08-20 14:37:36 +02:00
|
|
|
if self.use_pep517:
|
|
|
|
self.prepare_pep517_metadata()
|
2014-01-12 01:50:11 +01:00
|
|
|
else:
|
2018-08-20 14:37:36 +02:00
|
|
|
self.run_egg_info()
|
2014-08-31 01:52:28 +02:00
|
|
|
|
2014-01-12 01:50:11 +01:00
|
|
|
if not self.req:
|
2018-07-31 21:59:34 +02:00
|
|
|
if isinstance(parse_version(self.metadata["Version"]), Version):
|
2014-07-04 00:56:26 +02:00
|
|
|
op = "=="
|
|
|
|
else:
|
|
|
|
op = "==="
|
2015-11-12 00:51:46 +01:00
|
|
|
self.req = Requirement(
|
2014-07-04 00:56:26 +02:00
|
|
|
"".join([
|
2018-07-31 21:59:34 +02:00
|
|
|
self.metadata["Name"],
|
2014-07-04 00:56:26 +02:00
|
|
|
op,
|
2018-07-31 21:59:34 +02:00
|
|
|
self.metadata["Version"],
|
2015-11-12 00:51:46 +01:00
|
|
|
])
|
|
|
|
)
|
2014-12-21 14:17:37 +01:00
|
|
|
self._correct_build_location()
|
2015-09-30 22:04:59 +02:00
|
|
|
else:
|
2018-07-31 21:59:34 +02:00
|
|
|
metadata_name = canonicalize_name(self.metadata["Name"])
|
2015-11-12 00:51:46 +01:00
|
|
|
if canonicalize_name(self.req.name) != metadata_name:
|
2016-01-08 00:13:58 +01:00
|
|
|
logger.warning(
|
2018-08-20 14:37:36 +02:00
|
|
|
'Generating metadata for package %s '
|
2016-01-08 00:13:58 +01:00
|
|
|
'produced metadata for project name %s. Fix your '
|
|
|
|
'#egg=%s fragments.',
|
2018-08-20 14:37:36 +02:00
|
|
|
self.name, metadata_name, self.name
|
2015-09-30 22:04:59 +02:00
|
|
|
)
|
2015-11-12 00:51:46 +01:00
|
|
|
self.req = Requirement(metadata_name)
|
2014-01-12 01:50:11 +01:00
|
|
|
|
2018-08-20 14:37:36 +02:00
|
|
|
def prepare_pep517_metadata(self):
|
2018-12-17 12:13:00 +01:00
|
|
|
# type: () -> None
|
2018-08-20 14:37:36 +02:00
|
|
|
assert self.pep517_backend is not None
|
|
|
|
|
|
|
|
metadata_dir = os.path.join(
|
|
|
|
self.setup_py_dir,
|
|
|
|
'pip-wheel-metadata'
|
|
|
|
)
|
|
|
|
ensure_dir(metadata_dir)
|
|
|
|
|
|
|
|
with self.build_env:
|
|
|
|
# Note that Pep517HookCaller implements a fallback for
|
|
|
|
# prepare_metadata_for_build_wheel, so we don't have to
|
|
|
|
# consider the possibility that this hook doesn't exist.
|
|
|
|
backend = self.pep517_backend
|
2018-10-19 21:57:47 +02:00
|
|
|
self.spin_message = "Preparing wheel metadata"
|
2018-08-20 14:37:36 +02:00
|
|
|
distinfo_dir = backend.prepare_metadata_for_build_wheel(
|
|
|
|
metadata_dir
|
|
|
|
)
|
|
|
|
|
|
|
|
self.metadata_directory = os.path.join(metadata_dir, distinfo_dir)
|
|
|
|
|
|
|
|
def run_egg_info(self):
|
2018-12-17 12:13:00 +01:00
|
|
|
# type: () -> None
|
2018-11-14 20:24:27 +01:00
|
|
|
if self.name:
|
|
|
|
logger.debug(
|
|
|
|
'Running setup.py (path:%s) egg_info for package %s',
|
|
|
|
self.setup_py, self.name,
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
logger.debug(
|
|
|
|
'Running setup.py (path:%s) egg_info for package from %s',
|
|
|
|
self.setup_py, self.link,
|
|
|
|
)
|
2018-08-20 14:37:36 +02:00
|
|
|
script = SETUPTOOLS_SHIM % self.setup_py
|
|
|
|
base_cmd = [sys.executable, '-c', script]
|
|
|
|
if self.isolated:
|
|
|
|
base_cmd += ["--no-user-cfg"]
|
|
|
|
egg_info_cmd = base_cmd + ['egg_info']
|
|
|
|
# We can't put the .egg-info files at the root, because then the
|
|
|
|
# source code will be mistaken for an installed egg, causing
|
|
|
|
# problems
|
|
|
|
if self.editable:
|
2018-12-17 12:13:00 +01:00
|
|
|
egg_base_option = [] # type: List[str]
|
2018-08-20 14:37:36 +02:00
|
|
|
else:
|
|
|
|
egg_info_dir = os.path.join(self.setup_py_dir, 'pip-egg-info')
|
|
|
|
ensure_dir(egg_info_dir)
|
|
|
|
egg_base_option = ['--egg-base', 'pip-egg-info']
|
|
|
|
with self.build_env:
|
|
|
|
call_subprocess(
|
|
|
|
egg_info_cmd + egg_base_option,
|
|
|
|
cwd=self.setup_py_dir,
|
|
|
|
command_desc='python setup.py egg_info')
|
|
|
|
|
2018-07-31 21:59:34 +02:00
|
|
|
@property
|
|
|
|
def egg_info_path(self):
|
2018-12-17 12:13:00 +01:00
|
|
|
# type: () -> str
|
2014-01-12 01:50:11 +01:00
|
|
|
if self._egg_info_path is None:
|
|
|
|
if self.editable:
|
|
|
|
base = self.source_dir
|
|
|
|
else:
|
2016-02-08 14:04:10 +01:00
|
|
|
base = os.path.join(self.setup_py_dir, 'pip-egg-info')
|
2014-01-12 01:50:11 +01:00
|
|
|
filenames = os.listdir(base)
|
|
|
|
if self.editable:
|
|
|
|
filenames = []
|
|
|
|
for root, dirs, files in os.walk(base):
|
|
|
|
for dir in vcs.dirnames:
|
|
|
|
if dir in dirs:
|
|
|
|
dirs.remove(dir)
|
|
|
|
# Iterate over a copy of ``dirs``, since mutating
|
|
|
|
# a list while iterating over it can cause trouble.
|
|
|
|
# (See https://github.com/pypa/pip/pull/462.)
|
|
|
|
for dir in list(dirs):
|
2014-01-27 15:07:10 +01:00
|
|
|
# Don't search in anything that looks like a virtualenv
|
|
|
|
# environment
|
|
|
|
if (
|
2016-07-20 06:00:14 +02:00
|
|
|
os.path.lexists(
|
2014-01-27 15:07:10 +01:00
|
|
|
os.path.join(root, dir, 'bin', 'python')
|
2015-02-24 13:46:10 +01:00
|
|
|
) or
|
|
|
|
os.path.exists(
|
2014-01-27 15:07:10 +01:00
|
|
|
os.path.join(
|
|
|
|
root, dir, 'Scripts', 'Python.exe'
|
|
|
|
)
|
|
|
|
)):
|
2014-01-12 01:50:11 +01:00
|
|
|
dirs.remove(dir)
|
|
|
|
# Also don't search through tests
|
2014-03-28 06:46:19 +01:00
|
|
|
elif dir == 'test' or dir == 'tests':
|
2014-01-12 01:50:11 +01:00
|
|
|
dirs.remove(dir)
|
|
|
|
filenames.extend([os.path.join(root, dir)
|
2017-06-01 14:54:29 +02:00
|
|
|
for dir in dirs])
|
2014-01-12 01:50:11 +01:00
|
|
|
filenames = [f for f in filenames if f.endswith('.egg-info')]
|
|
|
|
|
|
|
|
if not filenames:
|
2014-01-27 15:07:10 +01:00
|
|
|
raise InstallationError(
|
2018-07-31 21:59:34 +02:00
|
|
|
"Files/directories not found in %s" % base
|
2014-01-27 15:07:10 +01:00
|
|
|
)
|
|
|
|
# if we have more than one match, we pick the toplevel one. This
|
|
|
|
# can easily be the case if there is a dist folder which contains
|
|
|
|
# an extracted tarball for testing purposes.
|
2014-01-12 01:50:11 +01:00
|
|
|
if len(filenames) > 1:
|
2014-01-27 15:07:10 +01:00
|
|
|
filenames.sort(
|
2015-02-24 13:46:10 +01:00
|
|
|
key=lambda x: x.count(os.path.sep) +
|
|
|
|
(os.path.altsep and x.count(os.path.altsep) or 0)
|
2014-01-27 15:07:10 +01:00
|
|
|
)
|
2014-01-12 01:50:11 +01:00
|
|
|
self._egg_info_path = os.path.join(base, filenames[0])
|
2018-07-31 21:59:34 +02:00
|
|
|
return self._egg_info_path
|
2014-01-12 01:50:11 +01:00
|
|
|
|
2018-07-31 21:59:34 +02:00
|
|
|
@property
|
|
|
|
def metadata(self):
|
2018-12-19 17:39:35 +01:00
|
|
|
# type: () -> Any
|
2018-07-31 21:59:34 +02:00
|
|
|
if not hasattr(self, '_metadata'):
|
|
|
|
self._metadata = get_metadata(self.get_dist())
|
2014-01-12 01:50:11 +01:00
|
|
|
|
2018-07-31 21:59:34 +02:00
|
|
|
return self._metadata
|
2014-01-12 01:50:11 +01:00
|
|
|
|
2018-05-29 22:37:19 +02:00
|
|
|
def get_dist(self):
|
2018-12-17 12:13:00 +01:00
|
|
|
# type: () -> Distribution
|
2018-08-20 14:37:36 +02:00
|
|
|
"""Return a pkg_resources.Distribution for this requirement"""
|
|
|
|
if self.metadata_directory:
|
|
|
|
base_dir, distinfo = os.path.split(self.metadata_directory)
|
|
|
|
metadata = pkg_resources.PathMetadata(
|
|
|
|
base_dir, self.metadata_directory
|
|
|
|
)
|
|
|
|
dist_name = os.path.splitext(distinfo)[0]
|
|
|
|
typ = pkg_resources.DistInfoDistribution
|
|
|
|
else:
|
|
|
|
egg_info = self.egg_info_path.rstrip(os.path.sep)
|
|
|
|
base_dir = os.path.dirname(egg_info)
|
|
|
|
metadata = pkg_resources.PathMetadata(base_dir, egg_info)
|
|
|
|
dist_name = os.path.splitext(os.path.basename(egg_info))[0]
|
2018-12-17 12:13:00 +01:00
|
|
|
# https://github.com/python/mypy/issues/1174
|
|
|
|
typ = pkg_resources.Distribution # type: ignore
|
2018-08-20 14:37:36 +02:00
|
|
|
|
|
|
|
return typ(
|
|
|
|
base_dir,
|
2018-05-29 22:37:19 +02:00
|
|
|
project_name=dist_name,
|
|
|
|
metadata=metadata,
|
|
|
|
)
|
2014-01-12 01:50:11 +01:00
|
|
|
|
|
|
|
def assert_source_matches_version(self):
|
2018-12-17 12:13:00 +01:00
|
|
|
# type: () -> None
|
2014-01-12 01:50:11 +01:00
|
|
|
assert self.source_dir
|
2018-07-31 21:59:34 +02:00
|
|
|
version = self.metadata['version']
|
2015-11-12 00:51:46 +01:00
|
|
|
if self.req.specifier and version not in self.req.specifier:
|
2014-08-31 01:52:28 +02:00
|
|
|
logger.warning(
|
|
|
|
'Requested %s, but installing version %s',
|
|
|
|
self,
|
2017-08-09 00:28:28 +02:00
|
|
|
version,
|
2014-01-27 15:07:10 +01:00
|
|
|
)
|
2014-01-12 01:50:11 +01:00
|
|
|
else:
|
2014-01-27 15:07:10 +01:00
|
|
|
logger.debug(
|
2014-08-31 01:52:28 +02:00
|
|
|
'Source in %s has version %s, which satisfies requirement %s',
|
|
|
|
display_path(self.source_dir),
|
|
|
|
version,
|
|
|
|
self,
|
2014-01-27 15:07:10 +01:00
|
|
|
)
|
2014-01-12 01:50:11 +01:00
|
|
|
|
2018-05-29 22:37:19 +02:00
|
|
|
# For both source distributions and editables
|
|
|
|
def ensure_has_source_dir(self, parent_dir):
|
2018-12-17 12:13:00 +01:00
|
|
|
# type: (str) -> str
|
2018-05-29 22:37:19 +02:00
|
|
|
"""Ensure that a source_dir is set.
|
|
|
|
|
|
|
|
This will create a temporary build dir if the name of the requirement
|
|
|
|
isn't known yet.
|
|
|
|
|
|
|
|
:param parent_dir: The ideal pip parent_dir for the source_dir.
|
|
|
|
Generally src_dir for editables and build_dir for sdists.
|
|
|
|
:return: self.source_dir
|
|
|
|
"""
|
|
|
|
if self.source_dir is None:
|
|
|
|
self.source_dir = self.build_location(parent_dir)
|
|
|
|
return self.source_dir
|
|
|
|
|
|
|
|
# For editable installations
|
2018-12-17 12:13:00 +01:00
|
|
|
def install_editable(
|
|
|
|
self,
|
|
|
|
install_options, # type: List[str]
|
|
|
|
global_options=(), # type: Sequence[str]
|
|
|
|
prefix=None # type: Optional[str]
|
|
|
|
):
|
|
|
|
# type: (...) -> None
|
2018-05-29 22:37:19 +02:00
|
|
|
logger.info('Running setup.py develop for %s', self.name)
|
|
|
|
|
|
|
|
if self.isolated:
|
|
|
|
global_options = list(global_options) + ["--no-user-cfg"]
|
|
|
|
|
|
|
|
if prefix:
|
|
|
|
prefix_param = ['--prefix={}'.format(prefix)]
|
|
|
|
install_options = list(install_options) + prefix_param
|
|
|
|
|
|
|
|
with indent_log():
|
|
|
|
# FIXME: should we do --install-headers here too?
|
|
|
|
with self.build_env:
|
|
|
|
call_subprocess(
|
|
|
|
[
|
|
|
|
sys.executable,
|
|
|
|
'-c',
|
|
|
|
SETUPTOOLS_SHIM % self.setup_py
|
|
|
|
] +
|
|
|
|
list(global_options) +
|
|
|
|
['develop', '--no-deps'] +
|
|
|
|
list(install_options),
|
|
|
|
|
|
|
|
cwd=self.setup_py_dir,
|
|
|
|
)
|
|
|
|
|
|
|
|
self.install_succeeded = True
|
|
|
|
|
2014-01-12 01:50:11 +01:00
|
|
|
def update_editable(self, obtain=True):
|
2018-12-17 12:13:00 +01:00
|
|
|
# type: (bool) -> None
|
2014-12-28 16:50:16 +01:00
|
|
|
if not self.link:
|
2014-08-31 01:52:28 +02:00
|
|
|
logger.debug(
|
2014-01-27 15:07:10 +01:00
|
|
|
"Cannot update repository at %s; repository location is "
|
2014-08-31 01:52:28 +02:00
|
|
|
"unknown",
|
|
|
|
self.source_dir,
|
2014-01-27 15:07:10 +01:00
|
|
|
)
|
2014-01-12 01:50:11 +01:00
|
|
|
return
|
|
|
|
assert self.editable
|
|
|
|
assert self.source_dir
|
2014-12-28 16:50:16 +01:00
|
|
|
if self.link.scheme == 'file':
|
2014-01-12 01:50:11 +01:00
|
|
|
# Static paths don't get updated
|
|
|
|
return
|
2014-12-28 16:50:16 +01:00
|
|
|
assert '+' in self.link.url, "bad url: %r" % self.link.url
|
2014-01-12 01:50:11 +01:00
|
|
|
if not self.update:
|
|
|
|
return
|
2014-12-28 16:50:16 +01:00
|
|
|
vc_type, url = self.link.url.split('+', 1)
|
2014-01-12 01:50:11 +01:00
|
|
|
backend = vcs.get_backend(vc_type)
|
|
|
|
if backend:
|
2014-12-28 16:50:16 +01:00
|
|
|
vcs_backend = backend(self.link.url)
|
2014-01-12 01:50:11 +01:00
|
|
|
if obtain:
|
|
|
|
vcs_backend.obtain(self.source_dir)
|
|
|
|
else:
|
|
|
|
vcs_backend.export(self.source_dir)
|
|
|
|
else:
|
|
|
|
assert 0, (
|
|
|
|
'Unexpected version control type (in %s): %s'
|
2014-12-28 16:50:16 +01:00
|
|
|
% (self.link, vc_type))
|
2014-01-12 01:50:11 +01:00
|
|
|
|
2018-05-29 22:37:19 +02:00
|
|
|
# Top-level Actions
|
2018-01-20 12:28:26 +01:00
|
|
|
def uninstall(self, auto_confirm=False, verbose=False,
|
|
|
|
use_user_site=False):
|
2018-12-17 12:13:00 +01:00
|
|
|
# type: (bool, bool, bool) -> Optional[UninstallPathSet]
|
2014-01-12 01:50:11 +01:00
|
|
|
"""
|
|
|
|
Uninstall the distribution currently satisfying this requirement.
|
|
|
|
|
|
|
|
Prompts before removing or modifying files unless
|
|
|
|
``auto_confirm`` is True.
|
|
|
|
|
|
|
|
Refuses to delete or modify files outside of ``sys.prefix`` -
|
|
|
|
thus uninstallation within a virtual environment can only
|
|
|
|
modify that virtual environment, even if the virtualenv is
|
|
|
|
linked to global site-packages.
|
|
|
|
|
|
|
|
"""
|
2018-01-20 12:28:26 +01:00
|
|
|
if not self.check_if_exists(use_user_site):
|
2017-08-03 01:25:21 +02:00
|
|
|
logger.warning("Skipping %s as it is not installed.", self.name)
|
2018-12-17 12:13:00 +01:00
|
|
|
return None
|
2014-01-12 01:50:11 +01:00
|
|
|
dist = self.satisfied_by or self.conflicts_with
|
|
|
|
|
2017-08-03 01:24:03 +02:00
|
|
|
uninstalled_pathset = UninstallPathSet.from_dist(dist)
|
|
|
|
uninstalled_pathset.remove(auto_confirm, verbose)
|
|
|
|
return uninstalled_pathset
|
2014-01-12 01:50:11 +01:00
|
|
|
|
2018-05-29 22:37:19 +02:00
|
|
|
def _clean_zip_name(self, name, prefix): # only used by archive.
|
2018-12-19 17:39:35 +01:00
|
|
|
# type: (str, str) -> str
|
2018-05-29 22:37:19 +02:00
|
|
|
assert name.startswith(prefix + os.path.sep), (
|
|
|
|
"name %r doesn't start with prefix %r" % (name, prefix)
|
|
|
|
)
|
|
|
|
name = name[len(prefix) + 1:]
|
|
|
|
name = name.replace(os.path.sep, '/')
|
|
|
|
return name
|
|
|
|
|
2018-12-17 12:13:00 +01:00
|
|
|
def _get_archive_name(self, path, parentdir, rootdir):
|
|
|
|
# type: (str, str, str) -> str
|
|
|
|
path = os.path.join(parentdir, path)
|
|
|
|
name = self._clean_zip_name(path, rootdir)
|
|
|
|
return self.name + '/' + name
|
|
|
|
|
2018-05-29 22:37:19 +02:00
|
|
|
# TODO: Investigate if this should be kept in InstallRequirement
|
|
|
|
# Seems to be used only when VCS + downloads
|
2014-01-12 01:50:11 +01:00
|
|
|
def archive(self, build_dir):
|
2018-12-17 12:13:00 +01:00
|
|
|
# type: (str) -> None
|
2014-01-12 01:50:11 +01:00
|
|
|
assert self.source_dir
|
|
|
|
create_archive = True
|
2018-07-31 21:59:34 +02:00
|
|
|
archive_name = '%s-%s.zip' % (self.name, self.metadata["version"])
|
2014-01-12 01:50:11 +01:00
|
|
|
archive_path = os.path.join(build_dir, archive_name)
|
|
|
|
if os.path.exists(archive_path):
|
|
|
|
response = ask_path_exists(
|
2016-08-30 04:21:31 +02:00
|
|
|
'The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)bort ' %
|
|
|
|
display_path(archive_path), ('i', 'w', 'b', 'a'))
|
2014-01-12 01:50:11 +01:00
|
|
|
if response == 'i':
|
|
|
|
create_archive = False
|
|
|
|
elif response == 'w':
|
2014-08-31 01:52:28 +02:00
|
|
|
logger.warning('Deleting %s', display_path(archive_path))
|
2014-01-12 01:50:11 +01:00
|
|
|
os.remove(archive_path)
|
|
|
|
elif response == 'b':
|
|
|
|
dest_file = backup_dir(archive_path)
|
2014-08-31 01:52:28 +02:00
|
|
|
logger.warning(
|
|
|
|
'Backing up %s to %s',
|
|
|
|
display_path(archive_path),
|
|
|
|
display_path(dest_file),
|
2014-01-27 15:07:10 +01:00
|
|
|
)
|
2014-01-12 01:50:11 +01:00
|
|
|
shutil.move(archive_path, dest_file)
|
2016-08-30 04:21:31 +02:00
|
|
|
elif response == 'a':
|
|
|
|
sys.exit(-1)
|
2014-01-12 01:50:11 +01:00
|
|
|
if create_archive:
|
2014-06-13 13:15:03 +02:00
|
|
|
zip = zipfile.ZipFile(
|
|
|
|
archive_path, 'w', zipfile.ZIP_DEFLATED,
|
|
|
|
allowZip64=True
|
|
|
|
)
|
2016-02-09 10:39:46 +01:00
|
|
|
dir = os.path.normcase(os.path.abspath(self.setup_py_dir))
|
2014-01-12 01:50:11 +01:00
|
|
|
for dirpath, dirnames, filenames in os.walk(dir):
|
|
|
|
if 'pip-egg-info' in dirnames:
|
|
|
|
dirnames.remove('pip-egg-info')
|
|
|
|
for dirname in dirnames:
|
2018-12-17 12:13:00 +01:00
|
|
|
dir_arcname = self._get_archive_name(dirname,
|
|
|
|
parentdir=dirpath,
|
|
|
|
rootdir=dir)
|
2018-12-18 10:40:40 +01:00
|
|
|
zipdir = zipfile.ZipInfo(dir_arcname + '/')
|
2014-01-27 15:07:10 +01:00
|
|
|
zipdir.external_attr = 0x1ED << 16 # 0o755
|
2014-01-12 01:50:11 +01:00
|
|
|
zip.writestr(zipdir, '')
|
|
|
|
for filename in filenames:
|
|
|
|
if filename == PIP_DELETE_MARKER_FILENAME:
|
|
|
|
continue
|
2018-12-17 12:13:00 +01:00
|
|
|
file_arcname = self._get_archive_name(filename,
|
|
|
|
parentdir=dirpath,
|
|
|
|
rootdir=dir)
|
2014-01-12 01:50:11 +01:00
|
|
|
filename = os.path.join(dirpath, filename)
|
2018-12-17 12:13:00 +01:00
|
|
|
zip.write(filename, file_arcname)
|
2014-01-12 01:50:11 +01:00
|
|
|
zip.close()
|
2014-08-31 01:52:28 +02:00
|
|
|
logger.info('Saved %s', display_path(archive_path))
|
2014-01-12 01:50:11 +01:00
|
|
|
|
2018-12-17 12:13:00 +01:00
|
|
|
def install(
|
|
|
|
self,
|
|
|
|
install_options, # type: List[str]
|
|
|
|
global_options=None, # type: Optional[Sequence[str]]
|
|
|
|
root=None, # type: Optional[str]
|
|
|
|
home=None, # type: Optional[str]
|
|
|
|
prefix=None, # type: Optional[str]
|
|
|
|
warn_script_location=True, # type: bool
|
|
|
|
use_user_site=False, # type: bool
|
|
|
|
pycompile=True # type: bool
|
|
|
|
):
|
|
|
|
# type: (...) -> None
|
2017-08-03 00:03:38 +02:00
|
|
|
global_options = global_options if global_options is not None else []
|
2014-01-12 01:50:11 +01:00
|
|
|
if self.editable:
|
2015-11-23 18:36:57 +01:00
|
|
|
self.install_editable(
|
2017-12-25 10:53:27 +01:00
|
|
|
install_options, global_options, prefix=prefix,
|
|
|
|
)
|
2014-01-12 01:50:11 +01:00
|
|
|
return
|
|
|
|
if self.is_wheel:
|
2017-08-31 17:48:18 +02:00
|
|
|
version = wheel.wheel_version(self.source_dir)
|
|
|
|
wheel.check_compatibility(version, self.name)
|
2014-02-18 05:16:54 +01:00
|
|
|
|
2017-10-02 18:54:37 +02:00
|
|
|
self.move_wheel_files(
|
2018-01-20 12:35:07 +01:00
|
|
|
self.source_dir, root=root, prefix=prefix, home=home,
|
2017-10-02 18:54:37 +02:00
|
|
|
warn_script_location=warn_script_location,
|
2018-01-20 12:39:01 +01:00
|
|
|
use_user_site=use_user_site, pycompile=pycompile,
|
2017-10-02 18:54:37 +02:00
|
|
|
)
|
2014-01-12 01:50:11 +01:00
|
|
|
self.install_succeeded = True
|
|
|
|
return
|
|
|
|
|
2015-03-14 21:37:09 +01:00
|
|
|
# Extend the list of global and install options passed on to
|
|
|
|
# the setup.py call with the ones from the requirements file.
|
|
|
|
# Options specified in requirements file override those
|
|
|
|
# specified on the command line, since the last option given
|
|
|
|
# to setup.py is the one that is used.
|
2018-03-20 01:30:09 +01:00
|
|
|
global_options = list(global_options) + \
|
|
|
|
self.options.get('global_options', [])
|
|
|
|
install_options = list(install_options) + \
|
|
|
|
self.options.get('install_options', [])
|
2015-03-14 21:37:09 +01:00
|
|
|
|
2014-12-11 15:44:53 +01:00
|
|
|
if self.isolated:
|
2018-12-17 12:13:00 +01:00
|
|
|
# https://github.com/python/mypy/issues/1174
|
|
|
|
global_options = global_options + ["--no-user-cfg"] # type: ignore
|
2014-12-11 15:44:53 +01:00
|
|
|
|
2017-06-01 14:54:29 +02:00
|
|
|
with TempDirectory(kind="record") as temp_dir:
|
|
|
|
record_filename = os.path.join(temp_dir.path, 'install-record.txt')
|
2016-06-01 19:54:03 +02:00
|
|
|
install_args = self.get_install_args(
|
2018-01-20 12:39:01 +01:00
|
|
|
global_options, record_filename, root, prefix, pycompile,
|
2017-12-25 10:53:27 +01:00
|
|
|
)
|
2015-11-04 09:06:50 +01:00
|
|
|
msg = 'Running setup.py install for %s' % (self.name,)
|
|
|
|
with open_spinner(msg) as spinner:
|
|
|
|
with indent_log():
|
2018-04-08 10:25:27 +02:00
|
|
|
with self.build_env:
|
|
|
|
call_subprocess(
|
|
|
|
install_args + install_options,
|
|
|
|
cwd=self.setup_py_dir,
|
|
|
|
spinner=spinner,
|
|
|
|
)
|
2014-08-31 01:52:28 +02:00
|
|
|
|
2014-01-12 01:50:11 +01:00
|
|
|
if not os.path.exists(record_filename):
|
2014-08-31 01:52:28 +02:00
|
|
|
logger.debug('Record file %s not found', record_filename)
|
2014-01-12 01:50:11 +01:00
|
|
|
return
|
|
|
|
self.install_succeeded = True
|
|
|
|
|
|
|
|
def prepend_root(path):
|
2018-12-19 17:39:35 +01:00
|
|
|
# type: (str) -> str
|
2014-01-12 01:50:11 +01:00
|
|
|
if root is None or not os.path.isabs(path):
|
|
|
|
return path
|
|
|
|
else:
|
|
|
|
return change_root(root, path)
|
|
|
|
|
2014-10-02 23:45:37 +02:00
|
|
|
with open(record_filename) as f:
|
|
|
|
for line in f:
|
|
|
|
directory = os.path.dirname(line)
|
|
|
|
if directory.endswith('.egg-info'):
|
|
|
|
egg_info_dir = prepend_root(directory)
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
logger.warning(
|
|
|
|
'Could not find .egg-info directory in install record'
|
|
|
|
' for %s',
|
|
|
|
self,
|
|
|
|
)
|
|
|
|
# FIXME: put the record somewhere
|
|
|
|
# FIXME: should this be an error?
|
|
|
|
return
|
2014-01-12 01:50:11 +01:00
|
|
|
new_lines = []
|
2014-10-02 23:45:37 +02:00
|
|
|
with open(record_filename) as f:
|
|
|
|
for line in f:
|
|
|
|
filename = line.strip()
|
|
|
|
if os.path.isdir(filename):
|
|
|
|
filename += os.path.sep
|
|
|
|
new_lines.append(
|
2017-05-18 13:25:55 +02:00
|
|
|
os.path.relpath(prepend_root(filename), egg_info_dir)
|
2014-10-02 23:45:37 +02:00
|
|
|
)
|
2017-10-24 11:46:48 +02:00
|
|
|
new_lines.sort()
|
2017-05-18 13:25:55 +02:00
|
|
|
ensure_dir(egg_info_dir)
|
2014-10-02 23:45:37 +02:00
|
|
|
inst_files_path = os.path.join(egg_info_dir, 'installed-files.txt')
|
|
|
|
with open(inst_files_path, 'w') as f:
|
|
|
|
f.write('\n'.join(new_lines) + '\n')
|
2014-01-12 01:50:11 +01:00
|
|
|
|
2018-12-17 12:13:00 +01:00
|
|
|
def get_install_args(
|
|
|
|
self,
|
|
|
|
global_options, # type: Sequence[str]
|
|
|
|
record_filename, # type: str
|
|
|
|
root, # type: Optional[str]
|
|
|
|
prefix, # type: Optional[str]
|
|
|
|
pycompile # type: bool
|
|
|
|
):
|
|
|
|
# type: (...) -> List[str]
|
2016-06-01 19:54:03 +02:00
|
|
|
install_args = [sys.executable, "-u"]
|
|
|
|
install_args.append('-c')
|
|
|
|
install_args.append(SETUPTOOLS_SHIM % self.setup_py)
|
|
|
|
install_args += list(global_options) + \
|
|
|
|
['install', '--record', record_filename]
|
2017-03-20 00:58:28 +01:00
|
|
|
install_args += ['--single-version-externally-managed']
|
2016-06-01 19:54:03 +02:00
|
|
|
|
|
|
|
if root is not None:
|
|
|
|
install_args += ['--root', root]
|
|
|
|
if prefix is not None:
|
|
|
|
install_args += ['--prefix', prefix]
|
|
|
|
|
2018-01-20 12:39:01 +01:00
|
|
|
if pycompile:
|
2016-06-01 19:54:03 +02:00
|
|
|
install_args += ["--compile"]
|
|
|
|
else:
|
|
|
|
install_args += ["--no-compile"]
|
|
|
|
|
|
|
|
if running_under_virtualenv():
|
|
|
|
py_ver_str = 'python' + sysconfig.get_python_version()
|
|
|
|
install_args += ['--install-headers',
|
|
|
|
os.path.join(sys.prefix, 'include', 'site',
|
|
|
|
py_ver_str, self.name)]
|
|
|
|
|
|
|
|
return install_args
|