2019-07-20 08:36:59 +02:00
|
|
|
# The following comment should be removed at some point in the future.
|
|
|
|
# mypy: strict-optional=False
|
|
|
|
|
2014-08-31 01:52:28 +02:00
|
|
|
from __future__ import absolute_import
|
|
|
|
|
|
|
|
import logging
|
2014-01-12 07:50:07 +01:00
|
|
|
import os
|
|
|
|
import shutil
|
|
|
|
import sys
|
|
|
|
import zipfile
|
|
|
|
|
2018-08-05 15:23:41 +02:00
|
|
|
from pip._vendor import pkg_resources, six
|
2018-08-21 17:11:36 +02:00
|
|
|
from pip._vendor.packaging.requirements import Requirement
|
2016-02-09 23:58:08 +01:00
|
|
|
from pip._vendor.packaging.utils import canonicalize_name
|
2017-05-16 12:16:30 +02:00
|
|
|
from pip._vendor.packaging.version import Version
|
2018-06-25 13:53:15 +02:00
|
|
|
from pip._vendor.packaging.version import parse as parse_version
|
2018-08-01 12:34:11 +02:00
|
|
|
from pip._vendor.pep517.wrappers import Pep517HookCaller
|
2014-09-12 00:40:45 +02:00
|
|
|
|
2019-12-03 23:02:39 +01:00
|
|
|
from pip._internal import pep425tags
|
2018-04-19 09:45:30 +02:00
|
|
|
from pip._internal.build_env import NoOpBuildEnvironment
|
2018-05-30 09:19:05 +02:00
|
|
|
from pip._internal.exceptions import InstallationError
|
2019-11-07 03:10:11 +01:00
|
|
|
from pip._internal.locations import get_scheme
|
2018-07-24 04:39:17 +02:00
|
|
|
from pip._internal.models.link import Link
|
2019-11-03 11:55:35 +01:00
|
|
|
from pip._internal.operations.build.metadata import generate_metadata
|
2019-11-04 10:54:09 +01:00
|
|
|
from pip._internal.operations.build.metadata_legacy import \
|
|
|
|
generate_metadata as generate_metadata_legacy
|
2019-12-05 04:13:20 +01:00
|
|
|
from pip._internal.operations.install.editable_legacy import \
|
2019-12-18 11:17:45 +01:00
|
|
|
install_editable as install_editable_legacy
|
2019-12-18 11:41:12 +01:00
|
|
|
from pip._internal.operations.install.legacy import install as install_legacy
|
2019-12-14 17:23:19 +01:00
|
|
|
from pip._internal.operations.install.wheel import install_wheel
|
2019-01-26 15:15:23 +01:00
|
|
|
from pip._internal.pyproject import load_pyproject_toml, make_pyproject_path
|
2017-08-31 17:48:18 +02:00
|
|
|
from pip._internal.req.req_uninstall import UninstallPathSet
|
2020-01-02 15:42:10 +01:00
|
|
|
from pip._internal.utils.deprecation import deprecated
|
2017-08-31 17:48:18 +02:00
|
|
|
from pip._internal.utils.hashes import Hashes
|
|
|
|
from pip._internal.utils.logging import indent_log
|
|
|
|
from pip._internal.utils.misc import (
|
2019-07-22 06:45:27 +02:00
|
|
|
ask_path_exists,
|
|
|
|
backup_dir,
|
|
|
|
display_path,
|
|
|
|
dist_in_site_packages,
|
|
|
|
dist_in_usersite,
|
|
|
|
get_installed_version,
|
2019-08-21 12:22:57 +02:00
|
|
|
hide_url,
|
2019-08-27 00:50:08 +02:00
|
|
|
redact_auth_from_url,
|
2014-01-27 15:07:10 +01:00
|
|
|
)
|
2018-07-31 21:59:34 +02:00
|
|
|
from pip._internal.utils.packaging import get_metadata
|
2020-01-25 22:37:29 +01:00
|
|
|
from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
|
2018-12-17 12:13:00 +01:00
|
|
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
2019-07-18 22:39:34 +02:00
|
|
|
from pip._internal.utils.virtualenv import running_under_virtualenv
|
2017-08-31 17:48:18 +02:00
|
|
|
from pip._internal.vcs import vcs
|
2014-01-12 01:50:11 +01:00
|
|
|
|
2018-12-17 12:13:00 +01:00
|
|
|
if MYPY_CHECK_RUNNING:
|
2019-02-22 12:17:07 +01:00
|
|
|
from typing import (
|
2019-11-07 03:10:11 +01:00
|
|
|
Any, Dict, Iterable, List, Optional, Sequence, Union,
|
2018-12-17 12:13:00 +01:00
|
|
|
)
|
2019-02-22 12:17:07 +01:00
|
|
|
from pip._internal.build_env import BuildEnvironment
|
|
|
|
from pip._internal.cache import WheelCache
|
2019-10-06 18:59:05 +02:00
|
|
|
from pip._internal.index.package_finder import PackageFinder
|
2019-02-22 12:17:07 +01:00
|
|
|
from pip._vendor.pkg_resources import Distribution
|
|
|
|
from pip._vendor.packaging.specifiers import SpecifierSet
|
|
|
|
from pip._vendor.packaging.markers import Marker
|
2018-12-17 12:13:00 +01:00
|
|
|
|
|
|
|
|
2014-08-31 01:52:28 +02:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2015-05-14 18:24:49 +02:00
|
|
|
|
2019-10-13 02:03:09 +02:00
|
|
|
def _get_dist(metadata_directory):
|
|
|
|
# type: (str) -> Distribution
|
|
|
|
"""Return a pkg_resources.Distribution for the provided
|
|
|
|
metadata directory.
|
|
|
|
"""
|
|
|
|
dist_dir = metadata_directory.rstrip(os.sep)
|
|
|
|
|
|
|
|
# Determine the correct Distribution object type.
|
|
|
|
if dist_dir.endswith(".egg-info"):
|
|
|
|
dist_cls = pkg_resources.Distribution
|
|
|
|
else:
|
|
|
|
assert dist_dir.endswith(".dist-info")
|
|
|
|
dist_cls = pkg_resources.DistInfoDistribution
|
|
|
|
|
|
|
|
# Build a PathMetadata object, from path to metadata. :wink:
|
|
|
|
base_dir, dist_dir_name = os.path.split(dist_dir)
|
|
|
|
dist_name = os.path.splitext(dist_dir_name)[0]
|
|
|
|
metadata = pkg_resources.PathMetadata(base_dir, dist_dir)
|
|
|
|
|
|
|
|
return dist_cls(
|
|
|
|
base_dir,
|
|
|
|
project_name=dist_name,
|
|
|
|
metadata=metadata,
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2014-01-12 01:50:11 +01:00
|
|
|
class InstallRequirement(object):
|
2017-12-16 19:13:46 +01:00
|
|
|
"""
|
|
|
|
Represents something that may be installed later on, may have information
|
2019-03-12 20:25:09 +01:00
|
|
|
about where to fetch the relevant requirement and also contains logic for
|
2017-12-16 19:13:46 +01:00
|
|
|
installing the said requirement.
|
|
|
|
"""
|
2014-01-12 01:50:11 +01:00
|
|
|
|
2018-12-17 12:13:00 +01:00
|
|
|
def __init__(
|
|
|
|
self,
|
|
|
|
req, # type: Optional[Requirement]
|
|
|
|
comes_from, # type: Optional[Union[str, InstallRequirement]]
|
|
|
|
source_dir=None, # type: Optional[str]
|
|
|
|
editable=False, # type: bool
|
|
|
|
link=None, # type: Optional[Link]
|
|
|
|
markers=None, # type: Optional[Marker]
|
|
|
|
use_pep517=None, # type: Optional[bool]
|
|
|
|
isolated=False, # type: bool
|
2020-02-06 17:05:11 +01:00
|
|
|
install_options=None, # type: Optional[List[str]]
|
|
|
|
global_options=None, # type: Optional[List[str]]
|
|
|
|
hash_options=None, # type: Optional[Dict[str, List[str]]]
|
2018-12-17 12:13:00 +01:00
|
|
|
wheel_cache=None, # type: Optional[WheelCache]
|
|
|
|
constraint=False, # type: bool
|
|
|
|
extras=() # type: Iterable[str]
|
|
|
|
):
|
|
|
|
# type: (...) -> None
|
2016-12-04 17:47:04 +01:00
|
|
|
assert req is None or isinstance(req, Requirement), req
|
2014-01-12 01:50:11 +01:00
|
|
|
self.req = req
|
|
|
|
self.comes_from = comes_from
|
2015-06-02 05:39:10 +02:00
|
|
|
self.constraint = constraint
|
2019-07-04 00:27:56 +02:00
|
|
|
if source_dir is None:
|
|
|
|
self.source_dir = None # type: Optional[str]
|
2016-12-28 15:56:58 +01:00
|
|
|
else:
|
2019-07-04 00:27:56 +02:00
|
|
|
self.source_dir = os.path.normpath(os.path.abspath(source_dir))
|
2014-01-12 01:50:11 +01:00
|
|
|
self.editable = editable
|
|
|
|
|
2015-04-20 06:43:02 +02:00
|
|
|
self._wheel_cache = wheel_cache
|
2018-12-17 12:13:00 +01:00
|
|
|
if link is None and req and req.url:
|
|
|
|
# PEP 508 URL requirement
|
|
|
|
link = Link(req.url)
|
|
|
|
self.link = self.original_link = link
|
2019-12-14 17:22:00 +01:00
|
|
|
# Path to any downloaded or already-existing package.
|
|
|
|
self.local_file_path = None # type: Optional[str]
|
|
|
|
if self.link and self.link.is_file:
|
|
|
|
self.local_file_path = self.link.file_path
|
2017-02-05 12:22:36 +01:00
|
|
|
|
2016-12-04 17:47:04 +01:00
|
|
|
if extras:
|
|
|
|
self.extras = extras
|
|
|
|
elif req:
|
2017-12-15 06:58:30 +01:00
|
|
|
self.extras = {
|
2016-12-04 17:47:04 +01:00
|
|
|
pkg_resources.safe_extra(extra) for extra in req.extras
|
2017-12-15 06:58:30 +01:00
|
|
|
}
|
2016-12-04 17:47:04 +01:00
|
|
|
else:
|
|
|
|
self.extras = set()
|
2018-12-17 12:13:00 +01:00
|
|
|
if markers is None and req:
|
|
|
|
markers = req.marker
|
|
|
|
self.markers = markers
|
|
|
|
|
2014-01-12 01:50:11 +01:00
|
|
|
# This holds the pkg_resources.Distribution object if this requirement
|
|
|
|
# is already available:
|
2019-12-12 14:00:44 +01:00
|
|
|
self.satisfied_by = None # type: Optional[Distribution]
|
2019-12-05 04:53:58 +01:00
|
|
|
# Whether the installation process should try to uninstall an existing
|
|
|
|
# distribution before installing this requirement.
|
|
|
|
self.should_reinstall = False
|
2014-12-21 14:17:37 +01:00
|
|
|
# Temporary build location
|
2019-09-20 04:09:01 +02:00
|
|
|
self._temp_build_dir = None # type: Optional[TempDirectory]
|
2014-01-12 01:50:11 +01:00
|
|
|
# Set to True after successful installation
|
2018-12-17 12:13:00 +01:00
|
|
|
self.install_succeeded = None # type: Optional[bool]
|
2020-02-06 17:05:11 +01:00
|
|
|
# Supplied options
|
|
|
|
self.install_options = install_options if install_options else []
|
|
|
|
self.global_options = global_options if global_options else []
|
|
|
|
self.hash_options = hash_options if hash_options else {}
|
2015-06-18 08:58:20 +02:00
|
|
|
# Set to True after successful preparation of this requirement
|
|
|
|
self.prepared = False
|
2018-01-20 14:04:42 +01:00
|
|
|
self.is_direct = False
|
2014-01-12 01:50:11 +01:00
|
|
|
|
2014-12-11 15:44:53 +01:00
|
|
|
self.isolated = isolated
|
2018-12-17 19:26:00 +01:00
|
|
|
self.build_env = NoOpBuildEnvironment() # type: BuildEnvironment
|
2014-12-11 15:44:53 +01:00
|
|
|
|
2018-08-20 14:37:36 +02:00
|
|
|
# For PEP 517, the directory where we request the project metadata
|
|
|
|
# gets stored. We need this to pass to build_wheel, so the backend
|
|
|
|
# can ensure that the wheel matches the metadata (see the PEP for
|
|
|
|
# details).
|
2018-12-17 12:13:00 +01:00
|
|
|
self.metadata_directory = None # type: Optional[str]
|
2018-08-20 14:37:36 +02:00
|
|
|
|
2018-08-02 20:48:56 +02:00
|
|
|
# The static build requirements (from pyproject.toml)
|
2018-12-17 12:13:00 +01:00
|
|
|
self.pyproject_requires = None # type: Optional[List[str]]
|
2018-08-02 20:48:56 +02:00
|
|
|
|
|
|
|
# Build requirements that we will check are available
|
2018-12-17 12:13:00 +01:00
|
|
|
self.requirements_to_check = [] # type: List[str]
|
2018-08-02 20:48:56 +02:00
|
|
|
|
|
|
|
# The PEP 517 backend we should use to build the project
|
2018-12-17 12:13:00 +01:00
|
|
|
self.pep517_backend = None # type: Optional[Pep517HookCaller]
|
2018-07-25 23:34:13 +02:00
|
|
|
|
2018-07-26 15:41:50 +02:00
|
|
|
# Are we using PEP 517 for this requirement?
|
2018-07-31 20:23:59 +02:00
|
|
|
# After pyproject.toml has been loaded, the only valid values are True
|
|
|
|
# and False. Before loading, None is valid (meaning "use the default").
|
|
|
|
# Setting an explicit value before loading pyproject.toml is supported,
|
|
|
|
# but after loading this flag should be treated as read only.
|
2018-10-09 19:16:08 +02:00
|
|
|
self.use_pep517 = use_pep517
|
2018-07-26 15:41:50 +02:00
|
|
|
|
2014-01-12 01:50:11 +01:00
|
|
|
def __str__(self):
|
2018-12-19 17:39:35 +01:00
|
|
|
# type: () -> str
|
2014-01-12 01:50:11 +01:00
|
|
|
if self.req:
|
|
|
|
s = str(self.req)
|
2014-12-28 16:50:16 +01:00
|
|
|
if self.link:
|
2020-01-29 18:24:26 +01:00
|
|
|
s += ' from {}'.format(redact_auth_from_url(self.link.url))
|
2018-08-04 09:50:35 +02:00
|
|
|
elif self.link:
|
2019-08-27 00:50:08 +02:00
|
|
|
s = redact_auth_from_url(self.link.url)
|
2014-01-12 01:50:11 +01:00
|
|
|
else:
|
2018-08-04 09:50:35 +02:00
|
|
|
s = '<InstallRequirement>'
|
2014-01-12 01:50:11 +01:00
|
|
|
if self.satisfied_by is not None:
|
2020-01-29 18:24:26 +01:00
|
|
|
s += ' in {}'.format(display_path(self.satisfied_by.location))
|
2014-01-12 01:50:11 +01:00
|
|
|
if self.comes_from:
|
2014-06-06 20:21:51 +02:00
|
|
|
if isinstance(self.comes_from, six.string_types):
|
2019-07-04 00:27:56 +02:00
|
|
|
comes_from = self.comes_from # type: Optional[str]
|
2014-01-12 01:50:11 +01:00
|
|
|
else:
|
|
|
|
comes_from = self.comes_from.from_path()
|
|
|
|
if comes_from:
|
2020-01-29 18:24:26 +01:00
|
|
|
s += ' (from {})'.format(comes_from)
|
2014-01-12 01:50:11 +01:00
|
|
|
return s
|
|
|
|
|
2015-03-06 16:58:34 +01:00
|
|
|
def __repr__(self):
|
2018-12-19 17:39:35 +01:00
|
|
|
# type: () -> str
|
2020-01-29 18:24:26 +01:00
|
|
|
return '<{} object: {} editable={!r}>'.format(
|
2015-03-13 23:06:58 +01:00
|
|
|
self.__class__.__name__, str(self), self.editable)
|
2015-03-06 16:58:34 +01:00
|
|
|
|
2019-07-20 06:59:14 +02:00
|
|
|
def format_debug(self):
|
|
|
|
# type: () -> str
|
|
|
|
"""An un-tested helper for getting state, for debugging.
|
|
|
|
"""
|
|
|
|
attributes = vars(self)
|
|
|
|
names = sorted(attributes)
|
|
|
|
|
|
|
|
state = (
|
|
|
|
"{}={!r}".format(attr, attributes[attr]) for attr in sorted(names)
|
|
|
|
)
|
|
|
|
return '<{name} object: {{{state}}}>'.format(
|
|
|
|
name=self.__class__.__name__,
|
|
|
|
state=", ".join(state),
|
|
|
|
)
|
|
|
|
|
Fix false hash mismatches when installing a package that has a cached wheel.
This would occur when, for example, installing from a requirements file that references a certain hashed sdist, a common situation.
As of pip 7, pip always tries to build a wheel for each requirement (if one wasn't provided directly) and installs from that. The way this was implemented, InstallRequirement.link pointed to the cached wheel, which obviously had a different hash than the index-sourced archive, so spurious mismatch errors would result.
Now we no longer read from the wheel cache in hash-checking mode.
Make populate_link(), rather than the `link` setter, responsible for mapping InstallRequirement.link to a cached wheel. populate_link() isn't called until until prepare_files(). At that point, when we've examined all InstallRequirements and their potential --hash options, we know whether we should be requiring hashes and thus whether to use the wheel cache at all.
The only place that sets InstallRequirement.link other than InstallRequirement itself is pip.wheel, which does so long after hashes have been checked, when it's unpacking the wheel it just built, so it won't cause spurious hash mismatches.
2015-10-16 21:58:59 +02:00
|
|
|
def populate_link(self, finder, upgrade, require_hashes):
|
2018-12-17 12:13:00 +01:00
|
|
|
# type: (PackageFinder, bool, bool) -> None
|
2015-03-20 00:08:42 +01:00
|
|
|
"""Ensure that if a link can be found for this, that it is found.
|
|
|
|
|
|
|
|
Note that self.link may still be None - if Upgrade is False and the
|
|
|
|
requirement is already installed.
|
Fix false hash mismatches when installing a package that has a cached wheel.
This would occur when, for example, installing from a requirements file that references a certain hashed sdist, a common situation.
As of pip 7, pip always tries to build a wheel for each requirement (if one wasn't provided directly) and installs from that. The way this was implemented, InstallRequirement.link pointed to the cached wheel, which obviously had a different hash than the index-sourced archive, so spurious mismatch errors would result.
Now we no longer read from the wheel cache in hash-checking mode.
Make populate_link(), rather than the `link` setter, responsible for mapping InstallRequirement.link to a cached wheel. populate_link() isn't called until until prepare_files(). At that point, when we've examined all InstallRequirements and their potential --hash options, we know whether we should be requiring hashes and thus whether to use the wheel cache at all.
The only place that sets InstallRequirement.link other than InstallRequirement itself is pip.wheel, which does so long after hashes have been checked, when it's unpacking the wheel it just built, so it won't cause spurious hash mismatches.
2015-10-16 21:58:59 +02:00
|
|
|
|
|
|
|
If require_hashes is True, don't use the wheel cache, because cached
|
|
|
|
wheels, always built locally, have different hashes than the files
|
|
|
|
downloaded from the index server and thus throw false hash mismatches.
|
|
|
|
Furthermore, cached wheels at present have undeterministic contents due
|
|
|
|
to file modification times.
|
2015-03-20 00:08:42 +01:00
|
|
|
"""
|
|
|
|
if self.link is None:
|
|
|
|
self.link = finder.find_requirement(self, upgrade)
|
Fix false hash mismatches when installing a package that has a cached wheel.
This would occur when, for example, installing from a requirements file that references a certain hashed sdist, a common situation.
As of pip 7, pip always tries to build a wheel for each requirement (if one wasn't provided directly) and installs from that. The way this was implemented, InstallRequirement.link pointed to the cached wheel, which obviously had a different hash than the index-sourced archive, so spurious mismatch errors would result.
Now we no longer read from the wheel cache in hash-checking mode.
Make populate_link(), rather than the `link` setter, responsible for mapping InstallRequirement.link to a cached wheel. populate_link() isn't called until until prepare_files(). At that point, when we've examined all InstallRequirements and their potential --hash options, we know whether we should be requiring hashes and thus whether to use the wheel cache at all.
The only place that sets InstallRequirement.link other than InstallRequirement itself is pip.wheel, which does so long after hashes have been checked, when it's unpacking the wheel it just built, so it won't cause spurious hash mismatches.
2015-10-16 21:58:59 +02:00
|
|
|
if self._wheel_cache is not None and not require_hashes:
|
|
|
|
old_link = self.link
|
2019-08-10 08:19:19 +02:00
|
|
|
supported_tags = pep425tags.get_supported()
|
|
|
|
self.link = self._wheel_cache.get(
|
|
|
|
link=self.link,
|
|
|
|
package_name=self.name,
|
|
|
|
supported_tags=supported_tags,
|
|
|
|
)
|
Fix false hash mismatches when installing a package that has a cached wheel.
This would occur when, for example, installing from a requirements file that references a certain hashed sdist, a common situation.
As of pip 7, pip always tries to build a wheel for each requirement (if one wasn't provided directly) and installs from that. The way this was implemented, InstallRequirement.link pointed to the cached wheel, which obviously had a different hash than the index-sourced archive, so spurious mismatch errors would result.
Now we no longer read from the wheel cache in hash-checking mode.
Make populate_link(), rather than the `link` setter, responsible for mapping InstallRequirement.link to a cached wheel. populate_link() isn't called until until prepare_files(). At that point, when we've examined all InstallRequirements and their potential --hash options, we know whether we should be requiring hashes and thus whether to use the wheel cache at all.
The only place that sets InstallRequirement.link other than InstallRequirement itself is pip.wheel, which does so long after hashes have been checked, when it's unpacking the wheel it just built, so it won't cause spurious hash mismatches.
2015-10-16 21:58:59 +02:00
|
|
|
if old_link != self.link:
|
|
|
|
logger.debug('Using cached wheel link: %s', self.link)
|
2015-03-30 23:44:02 +02:00
|
|
|
|
2018-05-29 22:37:19 +02:00
|
|
|
# Things that are valid for all kinds of requirements?
|
|
|
|
@property
|
|
|
|
def name(self):
|
2018-12-17 12:13:00 +01:00
|
|
|
# type: () -> Optional[str]
|
2018-05-29 22:37:19 +02:00
|
|
|
if self.req is None:
|
|
|
|
return None
|
2019-11-11 00:05:10 +01:00
|
|
|
return six.ensure_str(pkg_resources.safe_name(self.req.name))
|
2018-05-29 22:37:19 +02:00
|
|
|
|
2014-07-04 00:56:26 +02:00
|
|
|
@property
|
|
|
|
def specifier(self):
|
2018-12-17 12:13:00 +01:00
|
|
|
# type: () -> SpecifierSet
|
2014-07-04 00:56:26 +02:00
|
|
|
return self.req.specifier
|
|
|
|
|
Add checks against requirements-file-dwelling hashes for most kinds of packages. Close #1175.
* Add --require-hashes option. This is handy in deployment scripts to force application authors to hash their requirements. It is also a convenient way to get pip to show computed hashes for a virgin, unhashed requirements file. Eventually, additions to `pip freeze` should fill a superset of this use case.
* In --require-hashes mode, at least one hash is required to match for each requirement.
* Option-based requirements (--sha256=...) turn on --require-hashes mode implicitly.
* Internet-derived URL-based hashes are "necessary but not sufficient": they do not satisfy --require-hashes mode when they match, but they are still used to guard against transmission errors.
* Other URL-based requirements (#md5=...) are treated just like flag-based ones, except they don't turn on --require-hashes.
* Complain informatively, with the most devastating errors first so you don't chase your tail all day only to run up against a brick wall at the end. This also means we don't complain that a hash is missing, only for the user to find, after fixing it, that we have no idea how to even compute a hash for that type of requirement.
* Complain about unpinned requirements when hash-checking mode is on, lest they cause the user surprise later.
* Complain about missing hashes.
* Complain about requirement types we don't know how to hash (like VCS ones and local dirs).
* Have InstallRequirement keep its original Link around (original_link) so we can differentiate between URL hashes from requirements files and ones downloaded from the (untrustworthy) internet.
* Remove test_download_hashes, which is obsolete. Similar coverage is provided in test_utils.TestHashes and the various hash cases in test_req.py.
2015-09-09 19:01:53 +02:00
|
|
|
@property
|
|
|
|
def is_pinned(self):
|
2018-12-17 12:13:00 +01:00
|
|
|
# type: () -> bool
|
Add checks against requirements-file-dwelling hashes for most kinds of packages. Close #1175.
* Add --require-hashes option. This is handy in deployment scripts to force application authors to hash their requirements. It is also a convenient way to get pip to show computed hashes for a virgin, unhashed requirements file. Eventually, additions to `pip freeze` should fill a superset of this use case.
* In --require-hashes mode, at least one hash is required to match for each requirement.
* Option-based requirements (--sha256=...) turn on --require-hashes mode implicitly.
* Internet-derived URL-based hashes are "necessary but not sufficient": they do not satisfy --require-hashes mode when they match, but they are still used to guard against transmission errors.
* Other URL-based requirements (#md5=...) are treated just like flag-based ones, except they don't turn on --require-hashes.
* Complain informatively, with the most devastating errors first so you don't chase your tail all day only to run up against a brick wall at the end. This also means we don't complain that a hash is missing, only for the user to find, after fixing it, that we have no idea how to even compute a hash for that type of requirement.
* Complain about unpinned requirements when hash-checking mode is on, lest they cause the user surprise later.
* Complain about missing hashes.
* Complain about requirement types we don't know how to hash (like VCS ones and local dirs).
* Have InstallRequirement keep its original Link around (original_link) so we can differentiate between URL hashes from requirements files and ones downloaded from the (untrustworthy) internet.
* Remove test_download_hashes, which is obsolete. Similar coverage is provided in test_utils.TestHashes and the various hash cases in test_req.py.
2015-09-09 19:01:53 +02:00
|
|
|
"""Return whether I am pinned to an exact version.
|
|
|
|
|
|
|
|
For example, some-package==1.2 is pinned; some-package>1.2 is not.
|
|
|
|
"""
|
|
|
|
specifiers = self.specifier
|
2015-10-11 16:13:13 +02:00
|
|
|
return (len(specifiers) == 1 and
|
2017-03-18 19:26:30 +01:00
|
|
|
next(iter(specifiers)).operator in {'==', '==='})
|
Add checks against requirements-file-dwelling hashes for most kinds of packages. Close #1175.
* Add --require-hashes option. This is handy in deployment scripts to force application authors to hash their requirements. It is also a convenient way to get pip to show computed hashes for a virgin, unhashed requirements file. Eventually, additions to `pip freeze` should fill a superset of this use case.
* In --require-hashes mode, at least one hash is required to match for each requirement.
* Option-based requirements (--sha256=...) turn on --require-hashes mode implicitly.
* Internet-derived URL-based hashes are "necessary but not sufficient": they do not satisfy --require-hashes mode when they match, but they are still used to guard against transmission errors.
* Other URL-based requirements (#md5=...) are treated just like flag-based ones, except they don't turn on --require-hashes.
* Complain informatively, with the most devastating errors first so you don't chase your tail all day only to run up against a brick wall at the end. This also means we don't complain that a hash is missing, only for the user to find, after fixing it, that we have no idea how to even compute a hash for that type of requirement.
* Complain about unpinned requirements when hash-checking mode is on, lest they cause the user surprise later.
* Complain about missing hashes.
* Complain about requirement types we don't know how to hash (like VCS ones and local dirs).
* Have InstallRequirement keep its original Link around (original_link) so we can differentiate between URL hashes from requirements files and ones downloaded from the (untrustworthy) internet.
* Remove test_download_hashes, which is obsolete. Similar coverage is provided in test_utils.TestHashes and the various hash cases in test_req.py.
2015-09-09 19:01:53 +02:00
|
|
|
|
2018-05-29 22:37:19 +02:00
|
|
|
@property
|
|
|
|
def installed_version(self):
|
2018-12-19 17:39:35 +01:00
|
|
|
# type: () -> Optional[str]
|
2018-05-29 22:37:19 +02:00
|
|
|
return get_installed_version(self.name)
|
|
|
|
|
|
|
|
def match_markers(self, extras_requested=None):
|
2018-12-17 12:13:00 +01:00
|
|
|
# type: (Optional[Iterable[str]]) -> bool
|
2018-05-29 22:37:19 +02:00
|
|
|
if not extras_requested:
|
|
|
|
# Provide an extra to safely evaluate the markers
|
|
|
|
# without matching any extra
|
|
|
|
extras_requested = ('',)
|
|
|
|
if self.markers is not None:
|
|
|
|
return any(
|
|
|
|
self.markers.evaluate({'extra': extra})
|
|
|
|
for extra in extras_requested)
|
|
|
|
else:
|
|
|
|
return True
|
|
|
|
|
|
|
|
@property
|
|
|
|
def has_hash_options(self):
|
2018-12-17 12:13:00 +01:00
|
|
|
# type: () -> bool
|
2018-05-29 22:37:19 +02:00
|
|
|
"""Return whether any known-good hashes are specified as options.
|
|
|
|
|
|
|
|
These activate --require-hashes mode; hashes specified as part of a
|
|
|
|
URL do not.
|
|
|
|
|
|
|
|
"""
|
2020-02-06 17:05:11 +01:00
|
|
|
return bool(self.hash_options)
|
2018-05-29 22:37:19 +02:00
|
|
|
|
|
|
|
def hashes(self, trust_internet=True):
|
2018-12-17 12:13:00 +01:00
|
|
|
# type: (bool) -> Hashes
|
2018-05-29 22:37:19 +02:00
|
|
|
"""Return a hash-comparer that considers my option- and URL-based
|
|
|
|
hashes to be known-good.
|
|
|
|
|
|
|
|
Hashes in URLs--ones embedded in the requirements file, not ones
|
|
|
|
downloaded from an index server--are almost peers with ones from
|
|
|
|
flags. They satisfy --require-hashes (whether it was implicitly or
|
|
|
|
explicitly activated) but do not activate it. md5 and sha224 are not
|
|
|
|
allowed in flags, which should nudge people toward good algos. We
|
|
|
|
always OR all hashes together, even ones from URLs.
|
|
|
|
|
|
|
|
:param trust_internet: Whether to trust URL-based (#md5=...) hashes
|
|
|
|
downloaded from the internet, as by populate_link()
|
|
|
|
|
|
|
|
"""
|
2020-02-06 17:05:11 +01:00
|
|
|
good_hashes = self.hash_options.copy()
|
2018-05-29 22:37:19 +02:00
|
|
|
link = self.link if trust_internet else self.original_link
|
|
|
|
if link and link.hash:
|
|
|
|
good_hashes.setdefault(link.hash_name, []).append(link.hash)
|
|
|
|
return Hashes(good_hashes)
|
|
|
|
|
2014-01-12 01:50:11 +01:00
|
|
|
def from_path(self):
|
2018-12-17 12:13:00 +01:00
|
|
|
# type: () -> Optional[str]
|
2018-05-29 22:37:19 +02:00
|
|
|
"""Format a nice indicator to show where this "comes from"
|
|
|
|
"""
|
2014-01-12 01:50:11 +01:00
|
|
|
if self.req is None:
|
|
|
|
return None
|
|
|
|
s = str(self.req)
|
|
|
|
if self.comes_from:
|
2014-06-06 20:21:51 +02:00
|
|
|
if isinstance(self.comes_from, six.string_types):
|
2014-01-12 01:50:11 +01:00
|
|
|
comes_from = self.comes_from
|
|
|
|
else:
|
|
|
|
comes_from = self.comes_from.from_path()
|
|
|
|
if comes_from:
|
|
|
|
s += '->' + comes_from
|
|
|
|
return s
|
|
|
|
|
2020-02-05 04:26:11 +01:00
|
|
|
def ensure_build_location(self, build_dir, autodelete):
|
|
|
|
# type: (str, bool) -> str
|
2017-06-01 14:54:29 +02:00
|
|
|
assert build_dir is not None
|
2019-09-20 04:09:01 +02:00
|
|
|
if self._temp_build_dir is not None:
|
|
|
|
assert self._temp_build_dir.path
|
2017-06-01 14:54:29 +02:00
|
|
|
return self._temp_build_dir.path
|
2014-01-12 01:50:11 +01:00
|
|
|
if self.req is None:
|
2016-05-20 03:02:26 +02:00
|
|
|
# Some systems have /tmp as a symlink which confuses custom
|
|
|
|
# builds (such as numpy). Thus, we ensure that the real path
|
|
|
|
# is returned.
|
2020-01-31 03:58:04 +01:00
|
|
|
self._temp_build_dir = TempDirectory(
|
|
|
|
kind=tempdir_kinds.REQ_BUILD, globally_managed=True
|
|
|
|
)
|
2017-06-01 14:54:29 +02:00
|
|
|
|
|
|
|
return self._temp_build_dir.path
|
2014-01-12 01:50:11 +01:00
|
|
|
if self.editable:
|
|
|
|
name = self.name.lower()
|
|
|
|
else:
|
|
|
|
name = self.name
|
2014-01-27 15:07:10 +01:00
|
|
|
# FIXME: Is there a better place to create the build_dir? (hg and bzr
|
|
|
|
# need this)
|
2014-01-12 01:50:11 +01:00
|
|
|
if not os.path.exists(build_dir):
|
2014-12-21 14:17:37 +01:00
|
|
|
logger.debug('Creating directory %s', build_dir)
|
2020-01-09 02:30:15 +01:00
|
|
|
os.makedirs(build_dir)
|
2020-02-05 04:13:23 +01:00
|
|
|
actual_build_dir = os.path.join(build_dir, name)
|
2020-02-05 04:26:11 +01:00
|
|
|
# `None` indicates that we respect the globally-configured deletion
|
|
|
|
# settings, which is what we actually want when auto-deleting.
|
|
|
|
delete_arg = None if autodelete else False
|
2020-02-05 04:13:23 +01:00
|
|
|
return TempDirectory(
|
|
|
|
path=actual_build_dir,
|
2020-02-05 04:26:11 +01:00
|
|
|
delete=delete_arg,
|
2020-02-05 04:13:23 +01:00
|
|
|
kind=tempdir_kinds.REQ_BUILD,
|
|
|
|
globally_managed=True,
|
|
|
|
).path
|
2014-01-12 01:50:11 +01:00
|
|
|
|
2019-11-27 00:47:12 +01:00
|
|
|
def _set_requirement(self):
|
2018-12-17 12:13:00 +01:00
|
|
|
# type: () -> None
|
2019-11-27 00:47:12 +01:00
|
|
|
"""Set requirement after generating metadata.
|
2014-12-21 14:17:37 +01:00
|
|
|
"""
|
2019-10-20 11:05:42 +02:00
|
|
|
assert self.req is None
|
|
|
|
assert self.metadata is not None
|
2019-11-27 00:41:25 +01:00
|
|
|
assert self.source_dir is not None
|
2019-10-20 11:05:42 +02:00
|
|
|
|
|
|
|
# Construct a Requirement object from the generated metadata
|
|
|
|
if isinstance(parse_version(self.metadata["Version"]), Version):
|
|
|
|
op = "=="
|
|
|
|
else:
|
|
|
|
op = "==="
|
|
|
|
|
|
|
|
self.req = Requirement(
|
|
|
|
"".join([
|
|
|
|
self.metadata["Name"],
|
|
|
|
op,
|
|
|
|
self.metadata["Version"],
|
|
|
|
])
|
|
|
|
)
|
|
|
|
|
2019-10-20 11:07:34 +02:00
|
|
|
def warn_on_mismatching_name(self):
|
2019-11-19 04:46:26 +01:00
|
|
|
# type: () -> None
|
2019-10-20 11:07:34 +02:00
|
|
|
metadata_name = canonicalize_name(self.metadata["Name"])
|
2019-10-20 11:09:01 +02:00
|
|
|
if canonicalize_name(self.req.name) == metadata_name:
|
|
|
|
# Everything is fine.
|
|
|
|
return
|
|
|
|
|
|
|
|
# If we're here, there's a mismatch. Log a warning about it.
|
|
|
|
logger.warning(
|
|
|
|
'Generating metadata for package %s '
|
|
|
|
'produced metadata for project name %s. Fix your '
|
|
|
|
'#egg=%s fragments.',
|
|
|
|
self.name, metadata_name, self.name
|
|
|
|
)
|
|
|
|
self.req = Requirement(metadata_name)
|
2019-10-20 11:07:34 +02:00
|
|
|
|
2018-05-29 22:37:19 +02:00
|
|
|
def check_if_exists(self, use_user_site):
|
2019-12-04 00:54:22 +01:00
|
|
|
# type: (bool) -> None
|
2018-05-29 22:37:19 +02:00
|
|
|
"""Find an installed distribution that satisfies or conflicts
|
|
|
|
with this requirement, and set self.satisfied_by or
|
2019-12-05 04:57:05 +01:00
|
|
|
self.should_reinstall appropriately.
|
2018-05-29 22:37:19 +02:00
|
|
|
"""
|
2014-01-12 01:50:11 +01:00
|
|
|
if self.req is None:
|
2019-12-04 00:54:22 +01:00
|
|
|
return
|
2019-11-30 22:39:57 +01:00
|
|
|
# get_distribution() will resolve the entire list of requirements
|
|
|
|
# anyway, and we've already determined that we need the requirement
|
|
|
|
# in question, so strip the marker so that we don't try to
|
|
|
|
# evaluate it.
|
|
|
|
no_marker = Requirement(str(self.req))
|
|
|
|
no_marker.marker = None
|
2018-05-29 22:37:19 +02:00
|
|
|
try:
|
|
|
|
self.satisfied_by = pkg_resources.get_distribution(str(no_marker))
|
|
|
|
except pkg_resources.DistributionNotFound:
|
2019-12-04 14:31:24 +01:00
|
|
|
return
|
2018-05-29 22:37:19 +02:00
|
|
|
except pkg_resources.VersionConflict:
|
|
|
|
existing_dist = pkg_resources.get_distribution(
|
|
|
|
self.req.name
|
|
|
|
)
|
|
|
|
if use_user_site:
|
|
|
|
if dist_in_usersite(existing_dist):
|
2019-12-05 04:53:58 +01:00
|
|
|
self.should_reinstall = True
|
2018-05-29 22:37:19 +02:00
|
|
|
elif (running_under_virtualenv() and
|
|
|
|
dist_in_site_packages(existing_dist)):
|
|
|
|
raise InstallationError(
|
|
|
|
"Will not install to the user site because it will "
|
2020-01-29 18:24:26 +01:00
|
|
|
"lack sys.path precedence to {} in {}".format(
|
|
|
|
existing_dist.project_name, existing_dist.location)
|
2018-05-29 22:37:19 +02:00
|
|
|
)
|
|
|
|
else:
|
2019-12-05 04:53:58 +01:00
|
|
|
self.should_reinstall = True
|
2019-11-30 22:39:57 +01:00
|
|
|
else:
|
|
|
|
if self.editable and self.satisfied_by:
|
2019-12-05 04:53:58 +01:00
|
|
|
self.should_reinstall = True
|
2019-11-30 22:39:57 +01:00
|
|
|
# when installing editables, nothing pre-existing should ever
|
|
|
|
# satisfy
|
|
|
|
self.satisfied_by = None
|
2018-05-29 22:41:42 +02:00
|
|
|
|
2018-05-29 22:37:19 +02:00
|
|
|
# Things valid for wheels
|
|
|
|
@property
|
|
|
|
def is_wheel(self):
|
2018-12-17 12:13:00 +01:00
|
|
|
# type: () -> bool
|
|
|
|
if not self.link:
|
|
|
|
return False
|
|
|
|
return self.link.is_wheel
|
|
|
|
|
2018-05-29 22:37:19 +02:00
|
|
|
# Things valid for sdists
|
2016-02-08 14:04:10 +01:00
|
|
|
@property
|
2019-09-26 21:23:24 +02:00
|
|
|
def unpacked_source_directory(self):
|
2018-12-17 12:13:00 +01:00
|
|
|
# type: () -> str
|
2016-02-08 14:04:10 +01:00
|
|
|
return os.path.join(
|
|
|
|
self.source_dir,
|
|
|
|
self.link and self.link.subdirectory_fragment or '')
|
|
|
|
|
2014-01-12 01:50:11 +01:00
|
|
|
@property
|
2019-06-30 08:19:59 +02:00
|
|
|
def setup_py_path(self):
|
2018-12-17 12:13:00 +01:00
|
|
|
# type: () -> str
|
2020-01-29 18:24:26 +01:00
|
|
|
assert self.source_dir, "No source dir for {}".format(self)
|
2019-09-26 21:23:24 +02:00
|
|
|
setup_py = os.path.join(self.unpacked_source_directory, 'setup.py')
|
2014-01-12 01:50:11 +01:00
|
|
|
|
2014-02-21 02:33:59 +01:00
|
|
|
# Python2 __file__ should not be unicode
|
|
|
|
if six.PY2 and isinstance(setup_py, six.text_type):
|
|
|
|
setup_py = setup_py.encode(sys.getfilesystemencoding())
|
|
|
|
|
2014-01-12 01:50:11 +01:00
|
|
|
return setup_py
|
|
|
|
|
2016-11-25 15:14:03 +01:00
|
|
|
@property
|
2019-06-29 19:04:06 +02:00
|
|
|
def pyproject_toml_path(self):
|
2018-12-17 12:13:00 +01:00
|
|
|
# type: () -> str
|
2020-01-29 18:24:26 +01:00
|
|
|
assert self.source_dir, "No source dir for {}".format(self)
|
2019-09-26 21:23:24 +02:00
|
|
|
return make_pyproject_path(self.unpacked_source_directory)
|
2016-11-25 15:14:03 +01:00
|
|
|
|
2018-07-25 23:34:13 +02:00
|
|
|
def load_pyproject_toml(self):
|
2018-12-17 12:13:00 +01:00
|
|
|
# type: () -> None
|
2018-08-02 20:48:56 +02:00
|
|
|
"""Load the pyproject.toml file.
|
2018-01-23 17:02:00 +01:00
|
|
|
|
2018-08-02 20:48:56 +02:00
|
|
|
After calling this routine, all of the attributes related to PEP 517
|
|
|
|
processing for this requirement have been set. In particular, the
|
|
|
|
use_pep517 attribute can be used to determine whether we should
|
|
|
|
follow the PEP 517 or legacy (setup.py) code path.
|
2018-01-23 17:02:00 +01:00
|
|
|
"""
|
2019-06-16 19:27:07 +02:00
|
|
|
pyproject_toml_data = load_pyproject_toml(
|
2018-08-05 15:23:41 +02:00
|
|
|
self.use_pep517,
|
2019-06-29 19:04:06 +02:00
|
|
|
self.pyproject_toml_path,
|
2019-06-30 08:19:59 +02:00
|
|
|
self.setup_py_path,
|
2018-08-05 15:23:41 +02:00
|
|
|
str(self)
|
|
|
|
)
|
2018-07-26 15:41:50 +02:00
|
|
|
|
2019-08-27 10:07:51 +02:00
|
|
|
if pyproject_toml_data is None:
|
|
|
|
self.use_pep517 = False
|
2019-06-16 19:27:32 +02:00
|
|
|
return
|
2018-01-23 17:02:00 +01:00
|
|
|
|
2019-08-27 10:07:51 +02:00
|
|
|
self.use_pep517 = True
|
2019-11-23 15:38:27 +01:00
|
|
|
requires, backend, check, backend_path = pyproject_toml_data
|
2019-06-16 19:27:32 +02:00
|
|
|
self.requirements_to_check = check
|
|
|
|
self.pyproject_requires = requires
|
2019-09-26 21:23:24 +02:00
|
|
|
self.pep517_backend = Pep517HookCaller(
|
2019-11-23 15:38:27 +01:00
|
|
|
self.unpacked_source_directory, backend, backend_path=backend_path,
|
2019-09-26 21:23:24 +02:00
|
|
|
)
|
2019-06-16 19:27:32 +02:00
|
|
|
|
2019-12-16 12:18:57 +01:00
|
|
|
def _generate_metadata(self):
|
|
|
|
# type: () -> str
|
|
|
|
"""Invokes metadata generator functions, with the required arguments.
|
|
|
|
"""
|
|
|
|
if not self.use_pep517:
|
2019-12-18 07:23:34 +01:00
|
|
|
assert self.unpacked_source_directory
|
|
|
|
|
2019-12-16 12:37:26 +01:00
|
|
|
return generate_metadata_legacy(
|
|
|
|
build_env=self.build_env,
|
|
|
|
setup_py_path=self.setup_py_path,
|
|
|
|
source_dir=self.unpacked_source_directory,
|
|
|
|
editable=self.editable,
|
|
|
|
isolated=self.isolated,
|
|
|
|
details=self.name or "from {}".format(self.link)
|
|
|
|
)
|
2019-12-16 12:52:20 +01:00
|
|
|
|
2019-12-18 07:23:34 +01:00
|
|
|
assert self.pep517_backend is not None
|
|
|
|
|
2019-12-16 12:52:20 +01:00
|
|
|
return generate_metadata(
|
|
|
|
build_env=self.build_env,
|
|
|
|
backend=self.pep517_backend,
|
|
|
|
)
|
2019-12-16 12:18:57 +01:00
|
|
|
|
2018-08-20 14:37:36 +02:00
|
|
|
def prepare_metadata(self):
|
2018-12-17 12:13:00 +01:00
|
|
|
# type: () -> None
|
2018-08-20 14:37:36 +02:00
|
|
|
"""Ensure that project metadata is available.
|
|
|
|
|
|
|
|
Under PEP 517, call the backend hook to prepare the metadata.
|
|
|
|
Under legacy processing, call setup.py egg-info.
|
|
|
|
"""
|
2014-01-12 01:50:11 +01:00
|
|
|
assert self.source_dir
|
|
|
|
|
2014-08-31 01:52:28 +02:00
|
|
|
with indent_log():
|
2019-12-16 12:18:57 +01:00
|
|
|
self.metadata_directory = self._generate_metadata()
|
2014-08-31 01:52:28 +02:00
|
|
|
|
2019-11-03 12:05:31 +01:00
|
|
|
# Act on the newly generated metadata, based on the name and version.
|
2019-10-20 11:05:42 +02:00
|
|
|
if not self.name:
|
2019-11-27 00:47:12 +01:00
|
|
|
self._set_requirement()
|
2015-09-30 22:04:59 +02:00
|
|
|
else:
|
2019-10-20 11:07:34 +02:00
|
|
|
self.warn_on_mismatching_name()
|
2014-01-12 01:50:11 +01:00
|
|
|
|
2019-11-03 11:56:52 +01:00
|
|
|
self.assert_source_matches_version()
|
|
|
|
|
2018-07-31 21:59:34 +02:00
|
|
|
@property
|
|
|
|
def metadata(self):
|
2018-12-19 17:39:35 +01:00
|
|
|
# type: () -> Any
|
2018-07-31 21:59:34 +02:00
|
|
|
if not hasattr(self, '_metadata'):
|
|
|
|
self._metadata = get_metadata(self.get_dist())
|
2014-01-12 01:50:11 +01:00
|
|
|
|
2018-07-31 21:59:34 +02:00
|
|
|
return self._metadata
|
2014-01-12 01:50:11 +01:00
|
|
|
|
2018-05-29 22:37:19 +02:00
|
|
|
def get_dist(self):
|
2018-12-17 12:13:00 +01:00
|
|
|
# type: () -> Distribution
|
2019-10-13 02:03:09 +02:00
|
|
|
return _get_dist(self.metadata_directory)
|
2014-01-12 01:50:11 +01:00
|
|
|
|
|
|
|
def assert_source_matches_version(self):
|
2018-12-17 12:13:00 +01:00
|
|
|
# type: () -> None
|
2014-01-12 01:50:11 +01:00
|
|
|
assert self.source_dir
|
2018-07-31 21:59:34 +02:00
|
|
|
version = self.metadata['version']
|
2015-11-12 00:51:46 +01:00
|
|
|
if self.req.specifier and version not in self.req.specifier:
|
2014-08-31 01:52:28 +02:00
|
|
|
logger.warning(
|
|
|
|
'Requested %s, but installing version %s',
|
|
|
|
self,
|
2017-08-09 00:28:28 +02:00
|
|
|
version,
|
2014-01-27 15:07:10 +01:00
|
|
|
)
|
2014-01-12 01:50:11 +01:00
|
|
|
else:
|
2014-01-27 15:07:10 +01:00
|
|
|
logger.debug(
|
2014-08-31 01:52:28 +02:00
|
|
|
'Source in %s has version %s, which satisfies requirement %s',
|
|
|
|
display_path(self.source_dir),
|
|
|
|
version,
|
|
|
|
self,
|
2014-01-27 15:07:10 +01:00
|
|
|
)
|
2014-01-12 01:50:11 +01:00
|
|
|
|
2018-05-29 22:37:19 +02:00
|
|
|
# For both source distributions and editables
|
2020-02-05 04:26:11 +01:00
|
|
|
def ensure_has_source_dir(self, parent_dir, autodelete=False):
|
|
|
|
# type: (str, bool) -> None
|
2018-05-29 22:37:19 +02:00
|
|
|
"""Ensure that a source_dir is set.
|
|
|
|
|
|
|
|
This will create a temporary build dir if the name of the requirement
|
|
|
|
isn't known yet.
|
|
|
|
|
|
|
|
:param parent_dir: The ideal pip parent_dir for the source_dir.
|
|
|
|
Generally src_dir for editables and build_dir for sdists.
|
|
|
|
:return: self.source_dir
|
|
|
|
"""
|
|
|
|
if self.source_dir is None:
|
2020-02-05 04:26:11 +01:00
|
|
|
self.source_dir = self.ensure_build_location(
|
|
|
|
parent_dir, autodelete
|
|
|
|
)
|
2018-05-29 22:37:19 +02:00
|
|
|
|
|
|
|
# For editable installations
|
2014-01-12 01:50:11 +01:00
|
|
|
def update_editable(self, obtain=True):
|
2018-12-17 12:13:00 +01:00
|
|
|
# type: (bool) -> None
|
2014-12-28 16:50:16 +01:00
|
|
|
if not self.link:
|
2014-08-31 01:52:28 +02:00
|
|
|
logger.debug(
|
2014-01-27 15:07:10 +01:00
|
|
|
"Cannot update repository at %s; repository location is "
|
2014-08-31 01:52:28 +02:00
|
|
|
"unknown",
|
|
|
|
self.source_dir,
|
2014-01-27 15:07:10 +01:00
|
|
|
)
|
2014-01-12 01:50:11 +01:00
|
|
|
return
|
|
|
|
assert self.editable
|
|
|
|
assert self.source_dir
|
2014-12-28 16:50:16 +01:00
|
|
|
if self.link.scheme == 'file':
|
2014-01-12 01:50:11 +01:00
|
|
|
# Static paths don't get updated
|
|
|
|
return
|
2014-12-28 16:50:16 +01:00
|
|
|
assert '+' in self.link.url, "bad url: %r" % self.link.url
|
|
|
|
vc_type, url = self.link.url.split('+', 1)
|
2019-04-14 10:13:50 +02:00
|
|
|
vcs_backend = vcs.get_backend(vc_type)
|
|
|
|
if vcs_backend:
|
2020-01-02 15:42:10 +01:00
|
|
|
if not self.link.is_vcs:
|
|
|
|
reason = (
|
|
|
|
"This form of VCS requirement is being deprecated: {}."
|
|
|
|
).format(
|
|
|
|
self.link.url
|
|
|
|
)
|
|
|
|
replacement = None
|
|
|
|
if self.link.url.startswith("git+git@"):
|
|
|
|
replacement = (
|
2020-01-04 12:50:06 +01:00
|
|
|
"git+https://git@example.com/..., "
|
|
|
|
"git+ssh://git@example.com/..., "
|
|
|
|
"or the insecure git+git://git@example.com/..."
|
2020-01-02 15:42:10 +01:00
|
|
|
)
|
2020-01-04 12:50:38 +01:00
|
|
|
deprecated(reason, replacement, gone_in="21.0", issue=7554)
|
2019-08-21 12:22:57 +02:00
|
|
|
hidden_url = hide_url(self.link.url)
|
2014-01-12 01:50:11 +01:00
|
|
|
if obtain:
|
2019-08-21 12:22:57 +02:00
|
|
|
vcs_backend.obtain(self.source_dir, url=hidden_url)
|
2014-01-12 01:50:11 +01:00
|
|
|
else:
|
2019-08-21 12:22:57 +02:00
|
|
|
vcs_backend.export(self.source_dir, url=hidden_url)
|
2014-01-12 01:50:11 +01:00
|
|
|
else:
|
|
|
|
assert 0, (
|
2020-01-29 18:24:26 +01:00
|
|
|
'Unexpected version control type (in {}): {}'.format(
|
|
|
|
self.link, vc_type))
|
2014-01-12 01:50:11 +01:00
|
|
|
|
2018-05-29 22:37:19 +02:00
|
|
|
# Top-level Actions
|
2019-12-04 00:20:00 +01:00
|
|
|
def uninstall(self, auto_confirm=False, verbose=False):
|
|
|
|
# type: (bool, bool) -> Optional[UninstallPathSet]
|
2014-01-12 01:50:11 +01:00
|
|
|
"""
|
|
|
|
Uninstall the distribution currently satisfying this requirement.
|
|
|
|
|
|
|
|
Prompts before removing or modifying files unless
|
|
|
|
``auto_confirm`` is True.
|
|
|
|
|
|
|
|
Refuses to delete or modify files outside of ``sys.prefix`` -
|
|
|
|
thus uninstallation within a virtual environment can only
|
|
|
|
modify that virtual environment, even if the virtualenv is
|
|
|
|
linked to global site-packages.
|
|
|
|
|
|
|
|
"""
|
2019-12-04 00:20:00 +01:00
|
|
|
assert self.req
|
|
|
|
try:
|
|
|
|
dist = pkg_resources.get_distribution(self.req.name)
|
|
|
|
except pkg_resources.DistributionNotFound:
|
2017-08-03 01:25:21 +02:00
|
|
|
logger.warning("Skipping %s as it is not installed.", self.name)
|
2018-12-17 12:13:00 +01:00
|
|
|
return None
|
2019-12-05 04:43:33 +01:00
|
|
|
else:
|
|
|
|
logger.info('Found existing installation: %s', dist)
|
2014-01-12 01:50:11 +01:00
|
|
|
|
2017-08-03 01:24:03 +02:00
|
|
|
uninstalled_pathset = UninstallPathSet.from_dist(dist)
|
|
|
|
uninstalled_pathset.remove(auto_confirm, verbose)
|
|
|
|
return uninstalled_pathset
|
2014-01-12 01:50:11 +01:00
|
|
|
|
2018-12-17 12:13:00 +01:00
|
|
|
def _get_archive_name(self, path, parentdir, rootdir):
|
|
|
|
# type: (str, str, str) -> str
|
2019-11-02 16:25:52 +01:00
|
|
|
|
|
|
|
def _clean_zip_name(name, prefix):
|
|
|
|
# type: (str, str) -> str
|
|
|
|
assert name.startswith(prefix + os.path.sep), (
|
|
|
|
"name %r doesn't start with prefix %r" % (name, prefix)
|
|
|
|
)
|
|
|
|
name = name[len(prefix) + 1:]
|
|
|
|
name = name.replace(os.path.sep, '/')
|
|
|
|
return name
|
|
|
|
|
2018-12-17 12:13:00 +01:00
|
|
|
path = os.path.join(parentdir, path)
|
2019-11-02 16:25:52 +01:00
|
|
|
name = _clean_zip_name(path, rootdir)
|
2018-12-17 12:13:00 +01:00
|
|
|
return self.name + '/' + name
|
|
|
|
|
2014-01-12 01:50:11 +01:00
|
|
|
def archive(self, build_dir):
|
2018-12-17 12:13:00 +01:00
|
|
|
# type: (str) -> None
|
2019-09-10 01:45:33 +02:00
|
|
|
"""Saves archive to provided build_dir.
|
|
|
|
|
|
|
|
Used for saving downloaded VCS requirements as part of `pip download`.
|
|
|
|
"""
|
2014-01-12 01:50:11 +01:00
|
|
|
assert self.source_dir
|
2019-09-07 03:12:21 +02:00
|
|
|
|
2014-01-12 01:50:11 +01:00
|
|
|
create_archive = True
|
2020-01-29 18:24:26 +01:00
|
|
|
archive_name = '{}-{}.zip'.format(self.name, self.metadata["version"])
|
2014-01-12 01:50:11 +01:00
|
|
|
archive_path = os.path.join(build_dir, archive_name)
|
2019-09-07 03:12:21 +02:00
|
|
|
|
2014-01-12 01:50:11 +01:00
|
|
|
if os.path.exists(archive_path):
|
|
|
|
response = ask_path_exists(
|
2020-01-29 18:24:26 +01:00
|
|
|
'The file {} exists. (i)gnore, (w)ipe, '
|
|
|
|
'(b)ackup, (a)bort '.format(
|
|
|
|
display_path(archive_path)),
|
|
|
|
('i', 'w', 'b', 'a'))
|
2014-01-12 01:50:11 +01:00
|
|
|
if response == 'i':
|
|
|
|
create_archive = False
|
|
|
|
elif response == 'w':
|
2014-08-31 01:52:28 +02:00
|
|
|
logger.warning('Deleting %s', display_path(archive_path))
|
2014-01-12 01:50:11 +01:00
|
|
|
os.remove(archive_path)
|
|
|
|
elif response == 'b':
|
|
|
|
dest_file = backup_dir(archive_path)
|
2014-08-31 01:52:28 +02:00
|
|
|
logger.warning(
|
|
|
|
'Backing up %s to %s',
|
|
|
|
display_path(archive_path),
|
|
|
|
display_path(dest_file),
|
2014-01-27 15:07:10 +01:00
|
|
|
)
|
2014-01-12 01:50:11 +01:00
|
|
|
shutil.move(archive_path, dest_file)
|
2016-08-30 04:21:31 +02:00
|
|
|
elif response == 'a':
|
|
|
|
sys.exit(-1)
|
2019-09-07 03:05:15 +02:00
|
|
|
|
|
|
|
if not create_archive:
|
|
|
|
return
|
|
|
|
|
2019-09-07 03:12:21 +02:00
|
|
|
zip_output = zipfile.ZipFile(
|
|
|
|
archive_path, 'w', zipfile.ZIP_DEFLATED, allowZip64=True,
|
|
|
|
)
|
|
|
|
with zip_output:
|
2019-09-26 21:23:24 +02:00
|
|
|
dir = os.path.normcase(
|
|
|
|
os.path.abspath(self.unpacked_source_directory)
|
|
|
|
)
|
2014-01-12 01:50:11 +01:00
|
|
|
for dirpath, dirnames, filenames in os.walk(dir):
|
|
|
|
if 'pip-egg-info' in dirnames:
|
|
|
|
dirnames.remove('pip-egg-info')
|
|
|
|
for dirname in dirnames:
|
2019-09-07 03:12:21 +02:00
|
|
|
dir_arcname = self._get_archive_name(
|
|
|
|
dirname, parentdir=dirpath, rootdir=dir,
|
|
|
|
)
|
2018-12-18 10:40:40 +01:00
|
|
|
zipdir = zipfile.ZipInfo(dir_arcname + '/')
|
2014-01-27 15:07:10 +01:00
|
|
|
zipdir.external_attr = 0x1ED << 16 # 0o755
|
2019-09-07 03:12:21 +02:00
|
|
|
zip_output.writestr(zipdir, '')
|
2014-01-12 01:50:11 +01:00
|
|
|
for filename in filenames:
|
2019-09-07 03:12:21 +02:00
|
|
|
file_arcname = self._get_archive_name(
|
|
|
|
filename, parentdir=dirpath, rootdir=dir,
|
|
|
|
)
|
2014-01-12 01:50:11 +01:00
|
|
|
filename = os.path.join(dirpath, filename)
|
2019-09-07 03:12:21 +02:00
|
|
|
zip_output.write(filename, file_arcname)
|
2019-09-07 03:05:15 +02:00
|
|
|
|
|
|
|
logger.info('Saved %s', display_path(archive_path))
|
2014-01-12 01:50:11 +01:00
|
|
|
|
2018-12-17 12:13:00 +01:00
|
|
|
def install(
|
|
|
|
self,
|
|
|
|
install_options, # type: List[str]
|
|
|
|
global_options=None, # type: Optional[Sequence[str]]
|
|
|
|
root=None, # type: Optional[str]
|
|
|
|
home=None, # type: Optional[str]
|
|
|
|
prefix=None, # type: Optional[str]
|
|
|
|
warn_script_location=True, # type: bool
|
|
|
|
use_user_site=False, # type: bool
|
|
|
|
pycompile=True # type: bool
|
|
|
|
):
|
|
|
|
# type: (...) -> None
|
2019-11-07 03:10:11 +01:00
|
|
|
scheme = get_scheme(
|
2019-11-07 02:54:47 +01:00
|
|
|
self.name,
|
|
|
|
user=use_user_site,
|
|
|
|
home=home,
|
|
|
|
root=root,
|
|
|
|
isolated=self.isolated,
|
|
|
|
prefix=prefix,
|
|
|
|
)
|
|
|
|
|
2017-08-03 00:03:38 +02:00
|
|
|
global_options = global_options if global_options is not None else []
|
2014-01-12 01:50:11 +01:00
|
|
|
if self.editable:
|
2019-12-05 04:13:20 +01:00
|
|
|
install_editable_legacy(
|
2019-11-05 01:40:00 +01:00
|
|
|
install_options,
|
|
|
|
global_options,
|
|
|
|
prefix=prefix,
|
2019-11-05 01:48:24 +01:00
|
|
|
home=home,
|
2019-11-05 01:40:00 +01:00
|
|
|
use_user_site=use_user_site,
|
2019-12-05 04:03:10 +01:00
|
|
|
name=self.name,
|
|
|
|
setup_py_path=self.setup_py_path,
|
|
|
|
isolated=self.isolated,
|
|
|
|
build_env=self.build_env,
|
|
|
|
unpacked_source_directory=self.unpacked_source_directory,
|
2017-12-25 10:53:27 +01:00
|
|
|
)
|
2019-12-04 01:45:55 +01:00
|
|
|
self.install_succeeded = True
|
2014-01-12 01:50:11 +01:00
|
|
|
return
|
2019-11-07 02:54:47 +01:00
|
|
|
|
2014-01-12 01:50:11 +01:00
|
|
|
if self.is_wheel:
|
2019-12-14 17:23:19 +01:00
|
|
|
assert self.local_file_path
|
|
|
|
install_wheel(
|
2019-12-05 03:52:13 +01:00
|
|
|
self.name,
|
2019-12-14 17:23:19 +01:00
|
|
|
self.local_file_path,
|
2019-10-12 03:49:39 +02:00
|
|
|
scheme=scheme,
|
2019-12-05 03:52:13 +01:00
|
|
|
req_description=str(self.req),
|
2019-10-12 03:49:39 +02:00
|
|
|
pycompile=pycompile,
|
2019-12-05 03:52:13 +01:00
|
|
|
warn_script_location=warn_script_location,
|
2017-10-02 18:54:37 +02:00
|
|
|
)
|
2014-01-12 01:50:11 +01:00
|
|
|
self.install_succeeded = True
|
|
|
|
return
|
|
|
|
|
2019-12-18 11:41:12 +01:00
|
|
|
install_legacy(
|
|
|
|
self,
|
|
|
|
install_options=install_options,
|
|
|
|
global_options=global_options,
|
|
|
|
root=root,
|
|
|
|
home=home,
|
|
|
|
prefix=prefix,
|
|
|
|
use_user_site=use_user_site,
|
|
|
|
pycompile=pycompile,
|
|
|
|
scheme=scheme,
|
|
|
|
)
|