2014-08-31 01:52:28 +02:00
|
|
|
from __future__ import absolute_import
|
|
|
|
|
2015-03-25 01:53:10 +01:00
|
|
|
from collections import defaultdict
|
Add checks against requirements-file-dwelling hashes for most kinds of packages. Close #1175.
* Add --require-hashes option. This is handy in deployment scripts to force application authors to hash their requirements. It is also a convenient way to get pip to show computed hashes for a virgin, unhashed requirements file. Eventually, additions to `pip freeze` should fill a superset of this use case.
* In --require-hashes mode, at least one hash is required to match for each requirement.
* Option-based requirements (--sha256=...) turn on --require-hashes mode implicitly.
* Internet-derived URL-based hashes are "necessary but not sufficient": they do not satisfy --require-hashes mode when they match, but they are still used to guard against transmission errors.
* Other URL-based requirements (#md5=...) are treated just like flag-based ones, except they don't turn on --require-hashes.
* Complain informatively, with the most devastating errors first so you don't chase your tail all day only to run up against a brick wall at the end. This also means we don't complain that a hash is missing, only for the user to find, after fixing it, that we have no idea how to even compute a hash for that type of requirement.
* Complain about unpinned requirements when hash-checking mode is on, lest they cause the user surprise later.
* Complain about missing hashes.
* Complain about requirement types we don't know how to hash (like VCS ones and local dirs).
* Have InstallRequirement keep its original Link around (original_link) so we can differentiate between URL hashes from requirements files and ones downloaded from the (untrustworthy) internet.
* Remove test_download_hashes, which is obsolete. Similar coverage is provided in test_utils.TestHashes and the various hash cases in test_req.py.
2015-09-09 19:01:53 +02:00
|
|
|
from itertools import chain
|
2014-08-31 01:52:28 +02:00
|
|
|
import logging
|
2014-01-12 07:50:07 +01:00
|
|
|
import os
|
|
|
|
|
|
|
|
from pip._vendor import pkg_resources
|
2014-09-11 21:28:21 +02:00
|
|
|
from pip._vendor import requests
|
|
|
|
|
2015-09-29 21:31:27 +02:00
|
|
|
from pip.compat import expanduser
|
Add checks against requirements-file-dwelling hashes for most kinds of packages. Close #1175.
* Add --require-hashes option. This is handy in deployment scripts to force application authors to hash their requirements. It is also a convenient way to get pip to show computed hashes for a virgin, unhashed requirements file. Eventually, additions to `pip freeze` should fill a superset of this use case.
* In --require-hashes mode, at least one hash is required to match for each requirement.
* Option-based requirements (--sha256=...) turn on --require-hashes mode implicitly.
* Internet-derived URL-based hashes are "necessary but not sufficient": they do not satisfy --require-hashes mode when they match, but they are still used to guard against transmission errors.
* Other URL-based requirements (#md5=...) are treated just like flag-based ones, except they don't turn on --require-hashes.
* Complain informatively, with the most devastating errors first so you don't chase your tail all day only to run up against a brick wall at the end. This also means we don't complain that a hash is missing, only for the user to find, after fixing it, that we have no idea how to even compute a hash for that type of requirement.
* Complain about unpinned requirements when hash-checking mode is on, lest they cause the user surprise later.
* Complain about missing hashes.
* Complain about requirement types we don't know how to hash (like VCS ones and local dirs).
* Have InstallRequirement keep its original Link around (original_link) so we can differentiate between URL hashes from requirements files and ones downloaded from the (untrustworthy) internet.
* Remove test_download_hashes, which is obsolete. Similar coverage is provided in test_utils.TestHashes and the various hash cases in test_req.py.
2015-09-09 19:01:53 +02:00
|
|
|
from pip.download import (is_file_url, is_dir_url, is_vcs_url, url_to_path,
|
|
|
|
unpack_url)
|
2014-01-12 07:50:07 +01:00
|
|
|
from pip.exceptions import (InstallationError, BestVersionAlreadyInstalled,
|
Add checks against requirements-file-dwelling hashes for most kinds of packages. Close #1175.
* Add --require-hashes option. This is handy in deployment scripts to force application authors to hash their requirements. It is also a convenient way to get pip to show computed hashes for a virgin, unhashed requirements file. Eventually, additions to `pip freeze` should fill a superset of this use case.
* In --require-hashes mode, at least one hash is required to match for each requirement.
* Option-based requirements (--sha256=...) turn on --require-hashes mode implicitly.
* Internet-derived URL-based hashes are "necessary but not sufficient": they do not satisfy --require-hashes mode when they match, but they are still used to guard against transmission errors.
* Other URL-based requirements (#md5=...) are treated just like flag-based ones, except they don't turn on --require-hashes.
* Complain informatively, with the most devastating errors first so you don't chase your tail all day only to run up against a brick wall at the end. This also means we don't complain that a hash is missing, only for the user to find, after fixing it, that we have no idea how to even compute a hash for that type of requirement.
* Complain about unpinned requirements when hash-checking mode is on, lest they cause the user surprise later.
* Complain about missing hashes.
* Complain about requirement types we don't know how to hash (like VCS ones and local dirs).
* Have InstallRequirement keep its original Link around (original_link) so we can differentiate between URL hashes from requirements files and ones downloaded from the (untrustworthy) internet.
* Remove test_download_hashes, which is obsolete. Similar coverage is provided in test_utils.TestHashes and the various hash cases in test_req.py.
2015-09-09 19:01:53 +02:00
|
|
|
DistributionNotFound, PreviousBuildDirError,
|
|
|
|
HashError, HashErrors, HashUnpinned,
|
|
|
|
DirectoryUrlHashUnsupported, VcsHashUnsupported)
|
2014-01-12 19:05:11 +01:00
|
|
|
from pip.req.req_install import InstallRequirement
|
2015-03-27 03:13:03 +01:00
|
|
|
from pip.utils import (
|
|
|
|
display_path, dist_in_usersite, ensure_dir, normalize_path)
|
Add checks against requirements-file-dwelling hashes for most kinds of packages. Close #1175.
* Add --require-hashes option. This is handy in deployment scripts to force application authors to hash their requirements. It is also a convenient way to get pip to show computed hashes for a virgin, unhashed requirements file. Eventually, additions to `pip freeze` should fill a superset of this use case.
* In --require-hashes mode, at least one hash is required to match for each requirement.
* Option-based requirements (--sha256=...) turn on --require-hashes mode implicitly.
* Internet-derived URL-based hashes are "necessary but not sufficient": they do not satisfy --require-hashes mode when they match, but they are still used to guard against transmission errors.
* Other URL-based requirements (#md5=...) are treated just like flag-based ones, except they don't turn on --require-hashes.
* Complain informatively, with the most devastating errors first so you don't chase your tail all day only to run up against a brick wall at the end. This also means we don't complain that a hash is missing, only for the user to find, after fixing it, that we have no idea how to even compute a hash for that type of requirement.
* Complain about unpinned requirements when hash-checking mode is on, lest they cause the user surprise later.
* Complain about missing hashes.
* Complain about requirement types we don't know how to hash (like VCS ones and local dirs).
* Have InstallRequirement keep its original Link around (original_link) so we can differentiate between URL hashes from requirements files and ones downloaded from the (untrustworthy) internet.
* Remove test_download_hashes, which is obsolete. Similar coverage is provided in test_utils.TestHashes and the various hash cases in test_req.py.
2015-09-09 19:01:53 +02:00
|
|
|
from pip.utils.hashes import MissingHashes
|
2014-08-31 01:52:28 +02:00
|
|
|
from pip.utils.logging import indent_log
|
2014-01-12 07:50:07 +01:00
|
|
|
from pip.vcs import vcs
|
2014-01-12 01:50:11 +01:00
|
|
|
|
|
|
|
|
2014-08-31 01:52:28 +02:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
2014-01-12 01:50:11 +01:00
|
|
|
class Requirements(object):
|
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
self._keys = []
|
|
|
|
self._dict = {}
|
|
|
|
|
|
|
|
def keys(self):
|
|
|
|
return self._keys
|
|
|
|
|
|
|
|
def values(self):
|
|
|
|
return [self._dict[key] for key in self._keys]
|
|
|
|
|
|
|
|
def __contains__(self, item):
|
|
|
|
return item in self._keys
|
|
|
|
|
|
|
|
def __setitem__(self, key, value):
|
|
|
|
if key not in self._keys:
|
|
|
|
self._keys.append(key)
|
|
|
|
self._dict[key] = value
|
|
|
|
|
|
|
|
def __getitem__(self, key):
|
|
|
|
return self._dict[key]
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
values = ['%s: %s' % (repr(k), repr(self[k])) for k in self.keys()]
|
|
|
|
return 'Requirements({%s})' % ', '.join(values)
|
|
|
|
|
|
|
|
|
2015-03-19 05:01:55 +01:00
|
|
|
class DistAbstraction(object):
|
2015-03-23 02:12:08 +01:00
|
|
|
"""Abstracts out the wheel vs non-wheel prepare_files logic.
|
|
|
|
|
|
|
|
The requirements for anything installable are as follows:
|
|
|
|
- we must be able to determine the requirement name
|
|
|
|
(or we can't correctly handle the non-upgrade case).
|
|
|
|
- we must be able to generate a list of run-time dependencies
|
|
|
|
without installing any additional packages (or we would
|
|
|
|
have to either burn time by doing temporary isolated installs
|
|
|
|
or alternatively violate pips 'don't start installing unless
|
|
|
|
all requirements are available' rule - neither of which are
|
|
|
|
desirable).
|
|
|
|
- for packages with setup requirements, we must also be able
|
|
|
|
to determine their requirements without installing additional
|
|
|
|
packages (for the same reason as run-time dependencies)
|
|
|
|
- we must be able to create a Distribution object exposing the
|
|
|
|
above metadata.
|
|
|
|
"""
|
2015-03-19 05:01:55 +01:00
|
|
|
|
|
|
|
def __init__(self, req_to_install):
|
|
|
|
self.req_to_install = req_to_install
|
|
|
|
|
|
|
|
def dist(self, finder):
|
|
|
|
"""Return a setuptools Dist object."""
|
|
|
|
raise NotImplementedError(self.dist)
|
|
|
|
|
|
|
|
def prep_for_dist(self):
|
|
|
|
"""Ensure that we can get a Dist for this requirement."""
|
|
|
|
raise NotImplementedError(self.dist)
|
|
|
|
|
|
|
|
|
2015-03-23 03:58:34 +01:00
|
|
|
def make_abstract_dist(req_to_install):
|
|
|
|
"""Factory to make an abstract dist object.
|
|
|
|
|
|
|
|
Preconditions: Either an editable req with a source_dir, or satisfied_by or
|
|
|
|
a wheel link, or a non-editable req with a source_dir.
|
|
|
|
|
|
|
|
:return: A concrete DistAbstraction.
|
|
|
|
"""
|
|
|
|
if req_to_install.editable:
|
|
|
|
return IsSDist(req_to_install)
|
|
|
|
elif req_to_install.link and req_to_install.link.is_wheel:
|
|
|
|
return IsWheel(req_to_install)
|
|
|
|
else:
|
|
|
|
return IsSDist(req_to_install)
|
2015-03-19 05:01:55 +01:00
|
|
|
|
2015-03-23 03:58:34 +01:00
|
|
|
|
2015-03-19 05:01:55 +01:00
|
|
|
class IsWheel(DistAbstraction):
|
|
|
|
|
|
|
|
def dist(self, finder):
|
2015-03-23 00:36:10 +01:00
|
|
|
return list(pkg_resources.find_distributions(
|
|
|
|
self.req_to_install.source_dir))[0]
|
2015-03-19 05:01:55 +01:00
|
|
|
|
|
|
|
def prep_for_dist(self):
|
|
|
|
# FIXME:https://github.com/pypa/pip/issues/1112
|
2015-03-20 00:08:42 +01:00
|
|
|
pass
|
2015-03-19 05:01:55 +01:00
|
|
|
|
|
|
|
|
|
|
|
class IsSDist(DistAbstraction):
|
|
|
|
|
|
|
|
def dist(self, finder):
|
2015-03-23 03:44:01 +01:00
|
|
|
dist = self.req_to_install.get_dist()
|
2015-03-19 05:01:55 +01:00
|
|
|
# FIXME: shouldn't be globally added:
|
|
|
|
if dist.has_metadata('dependency_links.txt'):
|
|
|
|
finder.add_dependency_links(
|
|
|
|
dist.get_metadata_lines('dependency_links.txt')
|
|
|
|
)
|
|
|
|
return dist
|
|
|
|
|
|
|
|
def prep_for_dist(self):
|
|
|
|
self.req_to_install.run_egg_info()
|
|
|
|
self.req_to_install.assert_source_matches_version()
|
|
|
|
|
|
|
|
|
2015-03-23 03:44:01 +01:00
|
|
|
class Installed(DistAbstraction):
|
|
|
|
|
|
|
|
def dist(self, finder):
|
|
|
|
return self.req_to_install.satisfied_by
|
|
|
|
|
|
|
|
def prep_for_dist(self):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2014-01-12 01:50:11 +01:00
|
|
|
class RequirementSet(object):
|
|
|
|
|
2014-04-24 13:29:57 +02:00
|
|
|
def __init__(self, build_dir, src_dir, download_dir, upgrade=False,
|
|
|
|
ignore_installed=False, as_egg=False, target_dir=None,
|
|
|
|
ignore_dependencies=False, force_reinstall=False,
|
|
|
|
use_user_site=False, session=None, pycompile=True,
|
2015-03-30 23:44:02 +02:00
|
|
|
isolated=False, wheel_download_dir=None,
|
Add checks against requirements-file-dwelling hashes for most kinds of packages. Close #1175.
* Add --require-hashes option. This is handy in deployment scripts to force application authors to hash their requirements. It is also a convenient way to get pip to show computed hashes for a virgin, unhashed requirements file. Eventually, additions to `pip freeze` should fill a superset of this use case.
* In --require-hashes mode, at least one hash is required to match for each requirement.
* Option-based requirements (--sha256=...) turn on --require-hashes mode implicitly.
* Internet-derived URL-based hashes are "necessary but not sufficient": they do not satisfy --require-hashes mode when they match, but they are still used to guard against transmission errors.
* Other URL-based requirements (#md5=...) are treated just like flag-based ones, except they don't turn on --require-hashes.
* Complain informatively, with the most devastating errors first so you don't chase your tail all day only to run up against a brick wall at the end. This also means we don't complain that a hash is missing, only for the user to find, after fixing it, that we have no idea how to even compute a hash for that type of requirement.
* Complain about unpinned requirements when hash-checking mode is on, lest they cause the user surprise later.
* Complain about missing hashes.
* Complain about requirement types we don't know how to hash (like VCS ones and local dirs).
* Have InstallRequirement keep its original Link around (original_link) so we can differentiate between URL hashes from requirements files and ones downloaded from the (untrustworthy) internet.
* Remove test_download_hashes, which is obsolete. Similar coverage is provided in test_utils.TestHashes and the various hash cases in test_req.py.
2015-09-09 19:01:53 +02:00
|
|
|
wheel_cache=None, require_hashes=False):
|
2015-04-07 23:47:13 +02:00
|
|
|
"""Create a RequirementSet.
|
|
|
|
|
|
|
|
:param wheel_download_dir: Where still-packed .whl files should be
|
|
|
|
written to. If None they are written to the download_dir parameter.
|
|
|
|
Separate to download_dir to permit only keeping wheel archives for
|
|
|
|
pip wheel.
|
|
|
|
:param download_dir: Where still packed archives should be written to.
|
|
|
|
If None they are not saved, and are deleted immediately after
|
|
|
|
unpacking.
|
2015-04-20 06:43:02 +02:00
|
|
|
:param wheel_cache: The pip wheel cache, for passing to
|
2015-03-30 23:44:02 +02:00
|
|
|
InstallRequirement.
|
2015-04-07 23:47:13 +02:00
|
|
|
"""
|
2014-05-07 01:25:44 +02:00
|
|
|
if session is None:
|
|
|
|
raise TypeError(
|
|
|
|
"RequirementSet() missing 1 required keyword argument: "
|
|
|
|
"'session'"
|
|
|
|
)
|
|
|
|
|
2014-01-12 01:50:11 +01:00
|
|
|
self.build_dir = build_dir
|
|
|
|
self.src_dir = src_dir
|
2015-03-20 03:15:56 +01:00
|
|
|
# XXX: download_dir and wheel_download_dir overlap semantically and may
|
2015-04-01 03:39:45 +02:00
|
|
|
# be combined if we're willing to have non-wheel archives present in
|
|
|
|
# the wheelhouse output by 'pip wheel'.
|
2014-01-12 01:50:11 +01:00
|
|
|
self.download_dir = download_dir
|
|
|
|
self.upgrade = upgrade
|
|
|
|
self.ignore_installed = ignore_installed
|
|
|
|
self.force_reinstall = force_reinstall
|
|
|
|
self.requirements = Requirements()
|
|
|
|
# Mapping of alias: real_name
|
|
|
|
self.requirement_aliases = {}
|
|
|
|
self.unnamed_requirements = []
|
|
|
|
self.ignore_dependencies = ignore_dependencies
|
|
|
|
self.successfully_downloaded = []
|
|
|
|
self.successfully_installed = []
|
|
|
|
self.reqs_to_cleanup = []
|
|
|
|
self.as_egg = as_egg
|
|
|
|
self.use_user_site = use_user_site
|
2014-01-27 15:07:10 +01:00
|
|
|
self.target_dir = target_dir # set from --target option
|
2014-05-07 01:25:44 +02:00
|
|
|
self.session = session
|
2014-01-12 01:50:11 +01:00
|
|
|
self.pycompile = pycompile
|
2014-12-11 15:44:53 +01:00
|
|
|
self.isolated = isolated
|
2014-05-03 22:41:44 +02:00
|
|
|
if wheel_download_dir:
|
|
|
|
wheel_download_dir = normalize_path(wheel_download_dir)
|
2014-02-01 20:41:55 +01:00
|
|
|
self.wheel_download_dir = wheel_download_dir
|
2015-04-20 06:43:02 +02:00
|
|
|
self._wheel_cache = wheel_cache
|
2015-10-21 21:50:57 +02:00
|
|
|
self.require_hashes = require_hashes
|
2015-03-25 01:53:10 +01:00
|
|
|
# Maps from install_req -> dependencies_of_install_req
|
|
|
|
self._dependencies = defaultdict(list)
|
2014-01-12 01:50:11 +01:00
|
|
|
|
|
|
|
def __str__(self):
|
|
|
|
reqs = [req for req in self.requirements.values()
|
|
|
|
if not req.comes_from]
|
|
|
|
reqs.sort(key=lambda req: req.name.lower())
|
|
|
|
return ' '.join([str(req.req) for req in reqs])
|
|
|
|
|
Add RequirementSet.__repr__
This is useful when exploring with pdb -- e.g.:
(Pdb++) requirement_set
<RequirementSet object; 17 requirement(s): coverage==3.7.1, dj-database-url==0.2.2, dj-static==0.0.5, Django==1.6.2, django-nose==1.2, django-toolbelt==0.0.1, flake8==2.1.0, gunicorn==18.0, mccabe==0.2.1, nose==1.3.0, pep8==1.4.6, psycopg2==2.5.2, pyflakes==0.7.3, selenium==2.39.0, South==0.8.4, static==0.4, wsgiref==0.1.2>
2015-03-20 18:55:36 +01:00
|
|
|
def __repr__(self):
|
|
|
|
reqs = [req for req in self.requirements.values()]
|
|
|
|
reqs.sort(key=lambda req: req.name.lower())
|
|
|
|
reqs_str = ', '.join([str(req.req) for req in reqs])
|
|
|
|
return ('<%s object; %d requirement(s): %s>'
|
|
|
|
% (self.__class__.__name__, len(reqs), reqs_str))
|
|
|
|
|
2015-03-25 01:53:10 +01:00
|
|
|
def add_requirement(self, install_req, parent_req_name=None):
|
|
|
|
"""Add install_req as a requirement to install.
|
|
|
|
|
|
|
|
:param parent_req_name: The name of the requirement that needed this
|
|
|
|
added. The name is used because when multiple unnamed requirements
|
|
|
|
resolve to the same name, we could otherwise end up with dependency
|
|
|
|
links that point outside the Requirements set. parent_req must
|
|
|
|
already be added. Note that None implies that this is a user
|
|
|
|
supplied requirement, vs an inferred one.
|
|
|
|
:return: Additional requirements to scan. That is either [] if
|
|
|
|
the requirement is not applicable, or [install_req] if the
|
|
|
|
requirement is applicable and has just been added.
|
|
|
|
"""
|
|
|
|
name = install_req.name
|
2015-04-07 22:37:38 +02:00
|
|
|
if not install_req.match_markers():
|
2015-04-29 00:59:37 +02:00
|
|
|
logger.warning("Ignoring %s: markers %r don't match your "
|
2015-04-29 01:17:50 +02:00
|
|
|
"environment", install_req.name,
|
|
|
|
install_req.markers)
|
2015-03-25 01:53:10 +01:00
|
|
|
return []
|
2014-01-14 18:36:47 +01:00
|
|
|
|
2014-01-12 01:50:11 +01:00
|
|
|
install_req.as_egg = self.as_egg
|
|
|
|
install_req.use_user_site = self.use_user_site
|
|
|
|
install_req.target_dir = self.target_dir
|
|
|
|
install_req.pycompile = self.pycompile
|
|
|
|
if not name:
|
2014-03-26 23:24:19 +01:00
|
|
|
# url or path requirement w/o an egg fragment
|
2014-01-12 01:50:11 +01:00
|
|
|
self.unnamed_requirements.append(install_req)
|
2015-03-25 01:53:10 +01:00
|
|
|
return [install_req]
|
2014-01-12 01:50:11 +01:00
|
|
|
else:
|
2015-06-02 05:39:10 +02:00
|
|
|
try:
|
|
|
|
existing_req = self.get_requirement(name)
|
|
|
|
except KeyError:
|
|
|
|
existing_req = None
|
|
|
|
if (parent_req_name is None and existing_req and not
|
2015-08-24 07:03:01 +02:00
|
|
|
existing_req.constraint and
|
|
|
|
existing_req.extras == install_req.extras):
|
2014-01-12 01:50:11 +01:00
|
|
|
raise InstallationError(
|
|
|
|
'Double requirement given: %s (already in %s, name=%r)'
|
2015-06-02 05:39:10 +02:00
|
|
|
% (install_req, existing_req, name))
|
|
|
|
if not existing_req:
|
2015-03-25 01:53:10 +01:00
|
|
|
# Add requirement
|
|
|
|
self.requirements[name] = install_req
|
|
|
|
# FIXME: what about other normalizations? E.g., _ vs. -?
|
|
|
|
if name.lower() != name:
|
|
|
|
self.requirement_aliases[name.lower()] = name
|
|
|
|
result = [install_req]
|
|
|
|
else:
|
2015-10-13 08:03:21 +02:00
|
|
|
# Assume there's no need to scan, and that we've already
|
|
|
|
# encountered this for scanning.
|
|
|
|
result = []
|
|
|
|
if not install_req.constraint and existing_req.constraint:
|
2015-06-29 07:40:57 +02:00
|
|
|
if (install_req.link and not (existing_req.link and
|
|
|
|
install_req.link.path == existing_req.link.path)):
|
|
|
|
self.reqs_to_cleanup.append(install_req)
|
|
|
|
raise InstallationError(
|
|
|
|
"Could not satisfy constraints for '%s': "
|
|
|
|
"installation from path or url cannot be "
|
|
|
|
"constrained to a version" % name)
|
2015-06-02 05:39:10 +02:00
|
|
|
# If we're now installing a constraint, mark the existing
|
|
|
|
# object for real installation.
|
|
|
|
existing_req.constraint = False
|
2015-08-24 07:03:01 +02:00
|
|
|
existing_req.extras = tuple(
|
|
|
|
sorted(set(existing_req.extras).union(
|
|
|
|
set(install_req.extras))))
|
2015-12-27 00:04:40 +01:00
|
|
|
logger.debug("Setting %s extras to: %s",
|
|
|
|
existing_req, existing_req.extras)
|
2015-06-02 05:39:10 +02:00
|
|
|
# And now we need to scan this.
|
|
|
|
result = [existing_req]
|
|
|
|
# Canonicalise to the already-added object for the backref
|
|
|
|
# check below.
|
|
|
|
install_req = existing_req
|
2015-03-25 01:53:10 +01:00
|
|
|
if parent_req_name:
|
|
|
|
parent_req = self.get_requirement(parent_req_name)
|
|
|
|
self._dependencies[parent_req].append(install_req)
|
|
|
|
return result
|
2014-01-12 01:50:11 +01:00
|
|
|
|
|
|
|
def has_requirement(self, project_name):
|
2015-06-29 07:40:57 +02:00
|
|
|
name = project_name.lower()
|
|
|
|
if (name in self.requirements and
|
|
|
|
not self.requirements[name].constraint or
|
|
|
|
name in self.requirement_aliases and
|
|
|
|
not self.requirements[self.requirement_aliases[name]].constraint):
|
|
|
|
return True
|
2014-01-12 01:50:11 +01:00
|
|
|
return False
|
|
|
|
|
|
|
|
@property
|
|
|
|
def has_requirements(self):
|
2015-06-02 05:39:10 +02:00
|
|
|
return list(req for req in self.requirements.values() if not
|
|
|
|
req.constraint) or self.unnamed_requirements
|
2014-01-12 01:50:11 +01:00
|
|
|
|
|
|
|
@property
|
|
|
|
def is_download(self):
|
|
|
|
if self.download_dir:
|
2015-09-29 21:31:27 +02:00
|
|
|
self.download_dir = expanduser(self.download_dir)
|
2014-01-12 01:50:11 +01:00
|
|
|
if os.path.exists(self.download_dir):
|
|
|
|
return True
|
|
|
|
else:
|
2014-08-31 01:52:28 +02:00
|
|
|
logger.critical('Could not find download directory')
|
2014-01-12 01:50:11 +01:00
|
|
|
raise InstallationError(
|
|
|
|
"Could not find or access download directory '%s'"
|
|
|
|
% display_path(self.download_dir))
|
|
|
|
return False
|
|
|
|
|
|
|
|
def get_requirement(self, project_name):
|
|
|
|
for name in project_name, project_name.lower():
|
|
|
|
if name in self.requirements:
|
|
|
|
return self.requirements[name]
|
|
|
|
if name in self.requirement_aliases:
|
|
|
|
return self.requirements[self.requirement_aliases[name]]
|
|
|
|
raise KeyError("No project with the name %r" % project_name)
|
|
|
|
|
|
|
|
def uninstall(self, auto_confirm=False):
|
|
|
|
for req in self.requirements.values():
|
2015-06-02 05:39:10 +02:00
|
|
|
if req.constraint:
|
|
|
|
continue
|
2014-01-12 01:50:11 +01:00
|
|
|
req.uninstall(auto_confirm=auto_confirm)
|
|
|
|
req.commit_uninstall()
|
|
|
|
|
2014-05-08 18:12:34 +02:00
|
|
|
def prepare_files(self, finder):
|
2014-01-27 15:07:10 +01:00
|
|
|
"""
|
|
|
|
Prepare process. Create temp directories, download and/or unpack files.
|
|
|
|
"""
|
2015-03-27 03:13:03 +01:00
|
|
|
# make the wheelhouse
|
|
|
|
if self.wheel_download_dir:
|
|
|
|
ensure_dir(self.wheel_download_dir)
|
|
|
|
|
Add checks against requirements-file-dwelling hashes for most kinds of packages. Close #1175.
* Add --require-hashes option. This is handy in deployment scripts to force application authors to hash their requirements. It is also a convenient way to get pip to show computed hashes for a virgin, unhashed requirements file. Eventually, additions to `pip freeze` should fill a superset of this use case.
* In --require-hashes mode, at least one hash is required to match for each requirement.
* Option-based requirements (--sha256=...) turn on --require-hashes mode implicitly.
* Internet-derived URL-based hashes are "necessary but not sufficient": they do not satisfy --require-hashes mode when they match, but they are still used to guard against transmission errors.
* Other URL-based requirements (#md5=...) are treated just like flag-based ones, except they don't turn on --require-hashes.
* Complain informatively, with the most devastating errors first so you don't chase your tail all day only to run up against a brick wall at the end. This also means we don't complain that a hash is missing, only for the user to find, after fixing it, that we have no idea how to even compute a hash for that type of requirement.
* Complain about unpinned requirements when hash-checking mode is on, lest they cause the user surprise later.
* Complain about missing hashes.
* Complain about requirement types we don't know how to hash (like VCS ones and local dirs).
* Have InstallRequirement keep its original Link around (original_link) so we can differentiate between URL hashes from requirements files and ones downloaded from the (untrustworthy) internet.
* Remove test_download_hashes, which is obsolete. Similar coverage is provided in test_utils.TestHashes and the various hash cases in test_req.py.
2015-09-09 19:01:53 +02:00
|
|
|
# If any top-level requirement has a hash specified, enter
|
|
|
|
# hash-checking mode, which requires hashes from all.
|
|
|
|
root_reqs = self.unnamed_requirements + self.requirements.values()
|
2015-10-21 21:50:57 +02:00
|
|
|
require_hashes = (self.require_hashes or
|
Add checks against requirements-file-dwelling hashes for most kinds of packages. Close #1175.
* Add --require-hashes option. This is handy in deployment scripts to force application authors to hash their requirements. It is also a convenient way to get pip to show computed hashes for a virgin, unhashed requirements file. Eventually, additions to `pip freeze` should fill a superset of this use case.
* In --require-hashes mode, at least one hash is required to match for each requirement.
* Option-based requirements (--sha256=...) turn on --require-hashes mode implicitly.
* Internet-derived URL-based hashes are "necessary but not sufficient": they do not satisfy --require-hashes mode when they match, but they are still used to guard against transmission errors.
* Other URL-based requirements (#md5=...) are treated just like flag-based ones, except they don't turn on --require-hashes.
* Complain informatively, with the most devastating errors first so you don't chase your tail all day only to run up against a brick wall at the end. This also means we don't complain that a hash is missing, only for the user to find, after fixing it, that we have no idea how to even compute a hash for that type of requirement.
* Complain about unpinned requirements when hash-checking mode is on, lest they cause the user surprise later.
* Complain about missing hashes.
* Complain about requirement types we don't know how to hash (like VCS ones and local dirs).
* Have InstallRequirement keep its original Link around (original_link) so we can differentiate between URL hashes from requirements files and ones downloaded from the (untrustworthy) internet.
* Remove test_download_hashes, which is obsolete. Similar coverage is provided in test_utils.TestHashes and the various hash cases in test_req.py.
2015-09-09 19:01:53 +02:00
|
|
|
any(req.has_hash_options for req in root_reqs))
|
|
|
|
if require_hashes and self.as_egg:
|
|
|
|
raise InstallationError(
|
|
|
|
'--egg is not allowed with --require-hashes mode, since it '
|
|
|
|
'delegates dependency resolution to setuptools and could thus '
|
|
|
|
'result in installation of unhashed packages.')
|
|
|
|
|
2015-09-26 01:07:20 +02:00
|
|
|
# Actually prepare the files, and collect any exceptions. Most hash
|
|
|
|
# exceptions cannot be checked ahead of time, because
|
Fix false hash mismatches when installing a package that has a cached wheel.
This would occur when, for example, installing from a requirements file that references a certain hashed sdist, a common situation.
As of pip 7, pip always tries to build a wheel for each requirement (if one wasn't provided directly) and installs from that. The way this was implemented, InstallRequirement.link pointed to the cached wheel, which obviously had a different hash than the index-sourced archive, so spurious mismatch errors would result.
Now we no longer read from the wheel cache in hash-checking mode.
Make populate_link(), rather than the `link` setter, responsible for mapping InstallRequirement.link to a cached wheel. populate_link() isn't called until until prepare_files(). At that point, when we've examined all InstallRequirements and their potential --hash options, we know whether we should be requiring hashes and thus whether to use the wheel cache at all.
The only place that sets InstallRequirement.link other than InstallRequirement itself is pip.wheel, which does so long after hashes have been checked, when it's unpacking the wheel it just built, so it won't cause spurious hash mismatches.
2015-10-16 21:58:59 +02:00
|
|
|
# req.populate_link() needs to be called before we can make decisions
|
2015-09-26 01:07:20 +02:00
|
|
|
# based on link type.
|
Add checks against requirements-file-dwelling hashes for most kinds of packages. Close #1175.
* Add --require-hashes option. This is handy in deployment scripts to force application authors to hash their requirements. It is also a convenient way to get pip to show computed hashes for a virgin, unhashed requirements file. Eventually, additions to `pip freeze` should fill a superset of this use case.
* In --require-hashes mode, at least one hash is required to match for each requirement.
* Option-based requirements (--sha256=...) turn on --require-hashes mode implicitly.
* Internet-derived URL-based hashes are "necessary but not sufficient": they do not satisfy --require-hashes mode when they match, but they are still used to guard against transmission errors.
* Other URL-based requirements (#md5=...) are treated just like flag-based ones, except they don't turn on --require-hashes.
* Complain informatively, with the most devastating errors first so you don't chase your tail all day only to run up against a brick wall at the end. This also means we don't complain that a hash is missing, only for the user to find, after fixing it, that we have no idea how to even compute a hash for that type of requirement.
* Complain about unpinned requirements when hash-checking mode is on, lest they cause the user surprise later.
* Complain about missing hashes.
* Complain about requirement types we don't know how to hash (like VCS ones and local dirs).
* Have InstallRequirement keep its original Link around (original_link) so we can differentiate between URL hashes from requirements files and ones downloaded from the (untrustworthy) internet.
* Remove test_download_hashes, which is obsolete. Similar coverage is provided in test_utils.TestHashes and the various hash cases in test_req.py.
2015-09-09 19:01:53 +02:00
|
|
|
discovered_reqs = []
|
|
|
|
hash_errors = HashErrors()
|
|
|
|
for req in chain(root_reqs, discovered_reqs):
|
|
|
|
try:
|
|
|
|
discovered_reqs.extend(self._prepare_file(
|
|
|
|
finder,
|
|
|
|
req,
|
|
|
|
require_hashes=require_hashes,
|
|
|
|
ignore_dependencies=self.ignore_dependencies))
|
|
|
|
except HashError as exc:
|
|
|
|
exc.req = req
|
|
|
|
hash_errors.append(exc)
|
|
|
|
|
|
|
|
if hash_errors:
|
|
|
|
raise hash_errors
|
|
|
|
|
2015-03-23 00:09:40 +01:00
|
|
|
def _check_skip_installed(self, req_to_install, finder):
|
|
|
|
"""Check if req_to_install should be skipped.
|
|
|
|
|
|
|
|
This will check if the req is installed, and whether we should upgrade
|
|
|
|
or reinstall it, taking into account all the relevant user options.
|
|
|
|
|
|
|
|
After calling this req_to_install will only have satisfied_by set to
|
|
|
|
None if the req_to_install is to be upgraded/reinstalled etc. Any
|
|
|
|
other value will be a dist recording the current thing installed that
|
|
|
|
satisfies the requirement.
|
|
|
|
|
|
|
|
Note that for vcs urls and the like we can't assess skipping in this
|
|
|
|
routine - we simply identify that we need to pull the thing down,
|
|
|
|
then later on it is pulled down and introspected to assess upgrade/
|
|
|
|
reinstalls etc.
|
|
|
|
|
|
|
|
:return: A text reason for why it was skipped, or None.
|
|
|
|
"""
|
|
|
|
# Check whether to upgrade/reinstall this req or not.
|
|
|
|
req_to_install.check_if_exists()
|
|
|
|
if req_to_install.satisfied_by:
|
|
|
|
skip_reason = 'satisfied (use --upgrade to upgrade)'
|
2015-04-04 20:21:40 +02:00
|
|
|
if self.upgrade:
|
2015-03-23 00:09:40 +01:00
|
|
|
best_installed = False
|
|
|
|
# For link based requirements we have to pull the
|
|
|
|
# tree down and inspect to assess the version #, so
|
|
|
|
# its handled way down.
|
|
|
|
if not (self.force_reinstall or req_to_install.link):
|
|
|
|
try:
|
|
|
|
finder.find_requirement(req_to_install, self.upgrade)
|
|
|
|
except BestVersionAlreadyInstalled:
|
|
|
|
skip_reason = 'up-to-date'
|
|
|
|
best_installed = True
|
2015-03-23 00:36:40 +01:00
|
|
|
except DistributionNotFound:
|
2015-03-23 00:09:40 +01:00
|
|
|
# No distribution found, so we squash the
|
2015-03-23 00:36:40 +01:00
|
|
|
# error - it will be raised later when we
|
|
|
|
# re-try later to do the install.
|
|
|
|
# Why don't we just raise here?
|
2015-03-23 00:09:40 +01:00
|
|
|
pass
|
|
|
|
|
|
|
|
if not best_installed:
|
|
|
|
# don't uninstall conflict if user install and
|
|
|
|
# conflict is not user install
|
|
|
|
if not (self.use_user_site and not
|
|
|
|
dist_in_usersite(req_to_install.satisfied_by)):
|
|
|
|
req_to_install.conflicts_with = \
|
|
|
|
req_to_install.satisfied_by
|
|
|
|
req_to_install.satisfied_by = None
|
|
|
|
return skip_reason
|
|
|
|
else:
|
|
|
|
return None
|
|
|
|
|
Add checks against requirements-file-dwelling hashes for most kinds of packages. Close #1175.
* Add --require-hashes option. This is handy in deployment scripts to force application authors to hash their requirements. It is also a convenient way to get pip to show computed hashes for a virgin, unhashed requirements file. Eventually, additions to `pip freeze` should fill a superset of this use case.
* In --require-hashes mode, at least one hash is required to match for each requirement.
* Option-based requirements (--sha256=...) turn on --require-hashes mode implicitly.
* Internet-derived URL-based hashes are "necessary but not sufficient": they do not satisfy --require-hashes mode when they match, but they are still used to guard against transmission errors.
* Other URL-based requirements (#md5=...) are treated just like flag-based ones, except they don't turn on --require-hashes.
* Complain informatively, with the most devastating errors first so you don't chase your tail all day only to run up against a brick wall at the end. This also means we don't complain that a hash is missing, only for the user to find, after fixing it, that we have no idea how to even compute a hash for that type of requirement.
* Complain about unpinned requirements when hash-checking mode is on, lest they cause the user surprise later.
* Complain about missing hashes.
* Complain about requirement types we don't know how to hash (like VCS ones and local dirs).
* Have InstallRequirement keep its original Link around (original_link) so we can differentiate between URL hashes from requirements files and ones downloaded from the (untrustworthy) internet.
* Remove test_download_hashes, which is obsolete. Similar coverage is provided in test_utils.TestHashes and the various hash cases in test_req.py.
2015-09-09 19:01:53 +02:00
|
|
|
def _prepare_file(self,
|
|
|
|
finder,
|
|
|
|
req_to_install,
|
|
|
|
require_hashes=False,
|
|
|
|
ignore_dependencies=False):
|
2015-09-03 23:38:34 +02:00
|
|
|
"""Prepare a single requirements file.
|
2015-03-19 00:20:40 +01:00
|
|
|
|
2015-09-03 23:38:34 +02:00
|
|
|
:return: A list of additional InstallRequirements to also install.
|
2015-03-19 00:20:40 +01:00
|
|
|
"""
|
2015-03-23 00:09:40 +01:00
|
|
|
# Tell user what we are doing for this requirement:
|
|
|
|
# obtain (editable), skipping, processing (local url), collecting
|
|
|
|
# (remote url or package name)
|
2015-06-18 08:58:20 +02:00
|
|
|
if req_to_install.constraint or req_to_install.prepared:
|
2015-06-02 05:39:10 +02:00
|
|
|
return []
|
|
|
|
|
2015-06-18 08:58:20 +02:00
|
|
|
req_to_install.prepared = True
|
|
|
|
|
2015-09-11 20:41:58 +02:00
|
|
|
# ###################### #
|
|
|
|
# # print log messages # #
|
|
|
|
# ###################### #
|
2015-03-19 00:20:40 +01:00
|
|
|
if req_to_install.editable:
|
|
|
|
logger.info('Obtaining %s', req_to_install)
|
2015-03-20 07:40:51 +01:00
|
|
|
else:
|
2015-03-23 00:09:40 +01:00
|
|
|
# satisfied_by is only evaluated by calling _check_skip_installed,
|
|
|
|
# so it must be None here.
|
|
|
|
assert req_to_install.satisfied_by is None
|
2015-03-20 07:40:51 +01:00
|
|
|
if not self.ignore_installed:
|
2015-03-23 00:09:40 +01:00
|
|
|
skip_reason = self._check_skip_installed(
|
|
|
|
req_to_install, finder)
|
|
|
|
|
|
|
|
if req_to_install.satisfied_by:
|
|
|
|
assert skip_reason is not None, (
|
|
|
|
'_check_skip_installed returned None but '
|
|
|
|
'req_to_install.satisfied_by is set to %r'
|
|
|
|
% (req_to_install.satisfied_by,))
|
|
|
|
logger.info(
|
|
|
|
'Requirement already %s: %s', skip_reason,
|
|
|
|
req_to_install)
|
2014-01-12 01:50:11 +01:00
|
|
|
else:
|
2015-03-20 07:40:51 +01:00
|
|
|
if (req_to_install.link and
|
|
|
|
req_to_install.link.scheme == 'file'):
|
|
|
|
path = url_to_path(req_to_install.link.url)
|
|
|
|
logger.info('Processing %s', display_path(path))
|
|
|
|
else:
|
|
|
|
logger.info('Collecting %s', req_to_install)
|
2014-02-01 20:41:55 +01:00
|
|
|
|
2015-03-19 00:20:40 +01:00
|
|
|
with indent_log():
|
|
|
|
# ################################ #
|
|
|
|
# # vcs update or unpack archive # #
|
|
|
|
# ################################ #
|
|
|
|
if req_to_install.editable:
|
Add checks against requirements-file-dwelling hashes for most kinds of packages. Close #1175.
* Add --require-hashes option. This is handy in deployment scripts to force application authors to hash their requirements. It is also a convenient way to get pip to show computed hashes for a virgin, unhashed requirements file. Eventually, additions to `pip freeze` should fill a superset of this use case.
* In --require-hashes mode, at least one hash is required to match for each requirement.
* Option-based requirements (--sha256=...) turn on --require-hashes mode implicitly.
* Internet-derived URL-based hashes are "necessary but not sufficient": they do not satisfy --require-hashes mode when they match, but they are still used to guard against transmission errors.
* Other URL-based requirements (#md5=...) are treated just like flag-based ones, except they don't turn on --require-hashes.
* Complain informatively, with the most devastating errors first so you don't chase your tail all day only to run up against a brick wall at the end. This also means we don't complain that a hash is missing, only for the user to find, after fixing it, that we have no idea how to even compute a hash for that type of requirement.
* Complain about unpinned requirements when hash-checking mode is on, lest they cause the user surprise later.
* Complain about missing hashes.
* Complain about requirement types we don't know how to hash (like VCS ones and local dirs).
* Have InstallRequirement keep its original Link around (original_link) so we can differentiate between URL hashes from requirements files and ones downloaded from the (untrustworthy) internet.
* Remove test_download_hashes, which is obsolete. Similar coverage is provided in test_utils.TestHashes and the various hash cases in test_req.py.
2015-09-09 19:01:53 +02:00
|
|
|
if require_hashes:
|
|
|
|
raise InstallationError(
|
|
|
|
'The editable requirement %s cannot be installed when '
|
|
|
|
'requiring hashes, because there is no single file to '
|
|
|
|
'hash.' % req_to_install)
|
2015-03-23 00:36:10 +01:00
|
|
|
req_to_install.ensure_has_source_dir(self.src_dir)
|
2015-03-19 00:20:40 +01:00
|
|
|
req_to_install.update_editable(not self.is_download)
|
2015-03-23 03:58:34 +01:00
|
|
|
abstract_dist = make_abstract_dist(req_to_install)
|
2015-03-19 05:01:55 +01:00
|
|
|
abstract_dist.prep_for_dist()
|
2015-03-19 00:20:40 +01:00
|
|
|
if self.is_download:
|
|
|
|
req_to_install.archive(self.download_dir)
|
2015-03-23 03:44:01 +01:00
|
|
|
elif req_to_install.satisfied_by:
|
Add checks against requirements-file-dwelling hashes for most kinds of packages. Close #1175.
* Add --require-hashes option. This is handy in deployment scripts to force application authors to hash their requirements. It is also a convenient way to get pip to show computed hashes for a virgin, unhashed requirements file. Eventually, additions to `pip freeze` should fill a superset of this use case.
* In --require-hashes mode, at least one hash is required to match for each requirement.
* Option-based requirements (--sha256=...) turn on --require-hashes mode implicitly.
* Internet-derived URL-based hashes are "necessary but not sufficient": they do not satisfy --require-hashes mode when they match, but they are still used to guard against transmission errors.
* Other URL-based requirements (#md5=...) are treated just like flag-based ones, except they don't turn on --require-hashes.
* Complain informatively, with the most devastating errors first so you don't chase your tail all day only to run up against a brick wall at the end. This also means we don't complain that a hash is missing, only for the user to find, after fixing it, that we have no idea how to even compute a hash for that type of requirement.
* Complain about unpinned requirements when hash-checking mode is on, lest they cause the user surprise later.
* Complain about missing hashes.
* Complain about requirement types we don't know how to hash (like VCS ones and local dirs).
* Have InstallRequirement keep its original Link around (original_link) so we can differentiate between URL hashes from requirements files and ones downloaded from the (untrustworthy) internet.
* Remove test_download_hashes, which is obsolete. Similar coverage is provided in test_utils.TestHashes and the various hash cases in test_req.py.
2015-09-09 19:01:53 +02:00
|
|
|
if require_hashes:
|
2015-10-09 18:29:46 +02:00
|
|
|
logger.debug(
|
Add checks against requirements-file-dwelling hashes for most kinds of packages. Close #1175.
* Add --require-hashes option. This is handy in deployment scripts to force application authors to hash their requirements. It is also a convenient way to get pip to show computed hashes for a virgin, unhashed requirements file. Eventually, additions to `pip freeze` should fill a superset of this use case.
* In --require-hashes mode, at least one hash is required to match for each requirement.
* Option-based requirements (--sha256=...) turn on --require-hashes mode implicitly.
* Internet-derived URL-based hashes are "necessary but not sufficient": they do not satisfy --require-hashes mode when they match, but they are still used to guard against transmission errors.
* Other URL-based requirements (#md5=...) are treated just like flag-based ones, except they don't turn on --require-hashes.
* Complain informatively, with the most devastating errors first so you don't chase your tail all day only to run up against a brick wall at the end. This also means we don't complain that a hash is missing, only for the user to find, after fixing it, that we have no idea how to even compute a hash for that type of requirement.
* Complain about unpinned requirements when hash-checking mode is on, lest they cause the user surprise later.
* Complain about missing hashes.
* Complain about requirement types we don't know how to hash (like VCS ones and local dirs).
* Have InstallRequirement keep its original Link around (original_link) so we can differentiate between URL hashes from requirements files and ones downloaded from the (untrustworthy) internet.
* Remove test_download_hashes, which is obsolete. Similar coverage is provided in test_utils.TestHashes and the various hash cases in test_req.py.
2015-09-09 19:01:53 +02:00
|
|
|
'Since it is already installed, we are trusting this '
|
|
|
|
'package without checking its hash. To ensure a '
|
|
|
|
'completely repeatable environment, install into an '
|
|
|
|
'empty virtualenv.')
|
2015-03-23 03:44:01 +01:00
|
|
|
abstract_dist = Installed(req_to_install)
|
|
|
|
else:
|
2015-03-19 00:20:40 +01:00
|
|
|
# @@ if filesystem packages are not marked
|
|
|
|
# editable in a req, a non deterministic error
|
|
|
|
# occurs when the script attempts to unpack the
|
|
|
|
# build directory
|
2015-03-23 00:36:10 +01:00
|
|
|
req_to_install.ensure_has_source_dir(self.build_dir)
|
2015-03-19 00:20:40 +01:00
|
|
|
# If a checkout exists, it's unwise to keep going. version
|
|
|
|
# inconsistencies are logged later, but do not fail the
|
|
|
|
# installation.
|
2015-03-20 00:08:42 +01:00
|
|
|
# FIXME: this won't upgrade when there's an existing
|
2015-03-23 00:36:10 +01:00
|
|
|
# package unpacked in `req_to_install.source_dir`
|
|
|
|
if os.path.exists(
|
|
|
|
os.path.join(req_to_install.source_dir, 'setup.py')):
|
2015-03-19 00:20:40 +01:00
|
|
|
raise PreviousBuildDirError(
|
|
|
|
"pip can't proceed with requirements '%s' due to a"
|
|
|
|
" pre-existing build directory (%s). This is "
|
|
|
|
"likely due to a previous installation that failed"
|
|
|
|
". pip is being responsible and not assuming it "
|
|
|
|
"can delete this. Please delete it and try again."
|
2015-03-23 00:36:10 +01:00
|
|
|
% (req_to_install, req_to_install.source_dir)
|
2015-03-19 00:20:40 +01:00
|
|
|
)
|
Fix false hash mismatches when installing a package that has a cached wheel.
This would occur when, for example, installing from a requirements file that references a certain hashed sdist, a common situation.
As of pip 7, pip always tries to build a wheel for each requirement (if one wasn't provided directly) and installs from that. The way this was implemented, InstallRequirement.link pointed to the cached wheel, which obviously had a different hash than the index-sourced archive, so spurious mismatch errors would result.
Now we no longer read from the wheel cache in hash-checking mode.
Make populate_link(), rather than the `link` setter, responsible for mapping InstallRequirement.link to a cached wheel. populate_link() isn't called until until prepare_files(). At that point, when we've examined all InstallRequirements and their potential --hash options, we know whether we should be requiring hashes and thus whether to use the wheel cache at all.
The only place that sets InstallRequirement.link other than InstallRequirement itself is pip.wheel, which does so long after hashes have been checked, when it's unpacking the wheel it just built, so it won't cause spurious hash mismatches.
2015-10-16 21:58:59 +02:00
|
|
|
req_to_install.populate_link(
|
|
|
|
finder, self.upgrade, require_hashes)
|
2015-03-23 03:45:19 +01:00
|
|
|
# We can't hit this spot and have populate_link return None.
|
|
|
|
# req_to_install.satisfied_by is None here (because we're
|
|
|
|
# guarded) and upgrade has no impact except when satisfied_by
|
|
|
|
# is not None.
|
|
|
|
# Then inside find_requirement existing_applicable -> False
|
|
|
|
# If no new versions are found, DistributionNotFound is raised,
|
|
|
|
# otherwise a result is guaranteed.
|
|
|
|
assert req_to_install.link
|
Add checks against requirements-file-dwelling hashes for most kinds of packages. Close #1175.
* Add --require-hashes option. This is handy in deployment scripts to force application authors to hash their requirements. It is also a convenient way to get pip to show computed hashes for a virgin, unhashed requirements file. Eventually, additions to `pip freeze` should fill a superset of this use case.
* In --require-hashes mode, at least one hash is required to match for each requirement.
* Option-based requirements (--sha256=...) turn on --require-hashes mode implicitly.
* Internet-derived URL-based hashes are "necessary but not sufficient": they do not satisfy --require-hashes mode when they match, but they are still used to guard against transmission errors.
* Other URL-based requirements (#md5=...) are treated just like flag-based ones, except they don't turn on --require-hashes.
* Complain informatively, with the most devastating errors first so you don't chase your tail all day only to run up against a brick wall at the end. This also means we don't complain that a hash is missing, only for the user to find, after fixing it, that we have no idea how to even compute a hash for that type of requirement.
* Complain about unpinned requirements when hash-checking mode is on, lest they cause the user surprise later.
* Complain about missing hashes.
* Complain about requirement types we don't know how to hash (like VCS ones and local dirs).
* Have InstallRequirement keep its original Link around (original_link) so we can differentiate between URL hashes from requirements files and ones downloaded from the (untrustworthy) internet.
* Remove test_download_hashes, which is obsolete. Similar coverage is provided in test_utils.TestHashes and the various hash cases in test_req.py.
2015-09-09 19:01:53 +02:00
|
|
|
link = req_to_install.link
|
|
|
|
|
|
|
|
# Now that we have the real link, we can tell what kind of
|
|
|
|
# requirements we have and raise some more informative errors
|
|
|
|
# than otherwise. (For example, we can raise VcsHashUnsupported
|
|
|
|
# for a VCS URL rather than HashMissing.)
|
|
|
|
if require_hashes:
|
|
|
|
# We could check these first 2 conditions inside
|
|
|
|
# unpack_url and save repetition of conditions, but then
|
|
|
|
# we would report less-useful error messages for
|
|
|
|
# unhashable requirements, complaining that there's no
|
|
|
|
# hash provided.
|
|
|
|
if is_vcs_url(link):
|
|
|
|
raise VcsHashUnsupported()
|
|
|
|
elif is_file_url(link) and is_dir_url(link):
|
|
|
|
raise DirectoryUrlHashUnsupported()
|
|
|
|
if (not req_to_install.original_link and
|
2015-09-25 00:53:39 +02:00
|
|
|
not req_to_install.is_pinned):
|
Add checks against requirements-file-dwelling hashes for most kinds of packages. Close #1175.
* Add --require-hashes option. This is handy in deployment scripts to force application authors to hash their requirements. It is also a convenient way to get pip to show computed hashes for a virgin, unhashed requirements file. Eventually, additions to `pip freeze` should fill a superset of this use case.
* In --require-hashes mode, at least one hash is required to match for each requirement.
* Option-based requirements (--sha256=...) turn on --require-hashes mode implicitly.
* Internet-derived URL-based hashes are "necessary but not sufficient": they do not satisfy --require-hashes mode when they match, but they are still used to guard against transmission errors.
* Other URL-based requirements (#md5=...) are treated just like flag-based ones, except they don't turn on --require-hashes.
* Complain informatively, with the most devastating errors first so you don't chase your tail all day only to run up against a brick wall at the end. This also means we don't complain that a hash is missing, only for the user to find, after fixing it, that we have no idea how to even compute a hash for that type of requirement.
* Complain about unpinned requirements when hash-checking mode is on, lest they cause the user surprise later.
* Complain about missing hashes.
* Complain about requirement types we don't know how to hash (like VCS ones and local dirs).
* Have InstallRequirement keep its original Link around (original_link) so we can differentiate between URL hashes from requirements files and ones downloaded from the (untrustworthy) internet.
* Remove test_download_hashes, which is obsolete. Similar coverage is provided in test_utils.TestHashes and the various hash cases in test_req.py.
2015-09-09 19:01:53 +02:00
|
|
|
# Unpinned packages are asking for trouble when a new
|
|
|
|
# version is uploaded. This isn't a security check, but
|
|
|
|
# it saves users a surprising hash mismatch in the
|
|
|
|
# future.
|
|
|
|
#
|
|
|
|
# file:/// URLs aren't pinnable, so don't complain
|
|
|
|
# about them not being pinned.
|
|
|
|
raise HashUnpinned()
|
|
|
|
hashes = req_to_install.hashes(
|
|
|
|
trust_internet=not require_hashes)
|
|
|
|
if require_hashes and not hashes:
|
|
|
|
# Known-good hashes are missing for this requirement, so
|
|
|
|
# shim it with a facade object that will provoke hash
|
|
|
|
# computation and then raise a HashMissing exception
|
|
|
|
# showing the user what the hash should be.
|
|
|
|
hashes = MissingHashes()
|
|
|
|
|
2015-03-23 03:45:19 +01:00
|
|
|
try:
|
2015-04-07 00:57:41 +02:00
|
|
|
download_dir = self.download_dir
|
|
|
|
# We always delete unpacked sdists after pip ran.
|
|
|
|
autodelete_unpacked = True
|
|
|
|
if req_to_install.link.is_wheel \
|
|
|
|
and self.wheel_download_dir:
|
|
|
|
# when doing 'pip wheel` we download wheels to a
|
|
|
|
# dedicated dir.
|
2015-03-23 03:45:19 +01:00
|
|
|
download_dir = self.wheel_download_dir
|
2015-04-07 00:57:41 +02:00
|
|
|
if req_to_install.link.is_wheel:
|
|
|
|
if download_dir:
|
|
|
|
# When downloading, we only unpack wheels to get
|
|
|
|
# metadata.
|
|
|
|
autodelete_unpacked = True
|
|
|
|
else:
|
|
|
|
# When installing a wheel, we use the unpacked
|
|
|
|
# wheel.
|
|
|
|
autodelete_unpacked = False
|
2015-03-23 03:45:19 +01:00
|
|
|
unpack_url(
|
|
|
|
req_to_install.link, req_to_install.source_dir,
|
2015-04-07 00:57:41 +02:00
|
|
|
download_dir, autodelete_unpacked,
|
Add checks against requirements-file-dwelling hashes for most kinds of packages. Close #1175.
* Add --require-hashes option. This is handy in deployment scripts to force application authors to hash their requirements. It is also a convenient way to get pip to show computed hashes for a virgin, unhashed requirements file. Eventually, additions to `pip freeze` should fill a superset of this use case.
* In --require-hashes mode, at least one hash is required to match for each requirement.
* Option-based requirements (--sha256=...) turn on --require-hashes mode implicitly.
* Internet-derived URL-based hashes are "necessary but not sufficient": they do not satisfy --require-hashes mode when they match, but they are still used to guard against transmission errors.
* Other URL-based requirements (#md5=...) are treated just like flag-based ones, except they don't turn on --require-hashes.
* Complain informatively, with the most devastating errors first so you don't chase your tail all day only to run up against a brick wall at the end. This also means we don't complain that a hash is missing, only for the user to find, after fixing it, that we have no idea how to even compute a hash for that type of requirement.
* Complain about unpinned requirements when hash-checking mode is on, lest they cause the user surprise later.
* Complain about missing hashes.
* Complain about requirement types we don't know how to hash (like VCS ones and local dirs).
* Have InstallRequirement keep its original Link around (original_link) so we can differentiate between URL hashes from requirements files and ones downloaded from the (untrustworthy) internet.
* Remove test_download_hashes, which is obsolete. Similar coverage is provided in test_utils.TestHashes and the various hash cases in test_req.py.
2015-09-09 19:01:53 +02:00
|
|
|
session=self.session, hashes=hashes)
|
2015-03-23 03:45:19 +01:00
|
|
|
except requests.HTTPError as exc:
|
|
|
|
logger.critical(
|
|
|
|
'Could not install requirement %s because '
|
|
|
|
'of error %s',
|
2015-03-19 06:23:26 +01:00
|
|
|
req_to_install,
|
2015-03-23 03:45:19 +01:00
|
|
|
exc,
|
2015-03-19 06:23:26 +01:00
|
|
|
)
|
2015-03-23 03:45:19 +01:00
|
|
|
raise InstallationError(
|
|
|
|
'Could not install requirement %s because '
|
|
|
|
'of HTTP error %s for URL %s' %
|
|
|
|
(req_to_install, exc, req_to_install.link)
|
|
|
|
)
|
2015-03-23 03:58:34 +01:00
|
|
|
abstract_dist = make_abstract_dist(req_to_install)
|
2015-03-23 03:45:19 +01:00
|
|
|
abstract_dist.prep_for_dist()
|
|
|
|
if self.is_download:
|
|
|
|
# Make a .zip of the source_dir we already created.
|
|
|
|
if req_to_install.link.scheme in vcs.all_schemes:
|
|
|
|
req_to_install.archive(self.download_dir)
|
|
|
|
# req_to_install.req is only avail after unpack for URL
|
|
|
|
# pkgs repeat check_if_exists to uninstall-on-upgrade
|
|
|
|
# (#14)
|
|
|
|
if not self.ignore_installed:
|
|
|
|
req_to_install.check_if_exists()
|
|
|
|
if req_to_install.satisfied_by:
|
|
|
|
if self.upgrade or self.ignore_installed:
|
|
|
|
# don't uninstall conflict if user install and
|
|
|
|
# conflict is not user install
|
|
|
|
if not (self.use_user_site and not
|
|
|
|
dist_in_usersite(
|
|
|
|
req_to_install.satisfied_by)):
|
|
|
|
req_to_install.conflicts_with = \
|
|
|
|
req_to_install.satisfied_by
|
|
|
|
req_to_install.satisfied_by = None
|
|
|
|
else:
|
|
|
|
logger.info(
|
|
|
|
'Requirement already satisfied (use '
|
|
|
|
'--upgrade to upgrade): %s',
|
2015-03-19 00:20:40 +01:00
|
|
|
req_to_install,
|
|
|
|
)
|
|
|
|
|
|
|
|
# ###################### #
|
|
|
|
# # parse dependencies # #
|
|
|
|
# ###################### #
|
2015-03-19 05:01:55 +01:00
|
|
|
dist = abstract_dist.dist(finder)
|
2015-03-23 02:12:33 +01:00
|
|
|
more_reqs = []
|
2014-12-07 23:51:46 +01:00
|
|
|
|
2015-03-24 09:12:40 +01:00
|
|
|
def add_req(subreq):
|
2015-03-25 01:53:10 +01:00
|
|
|
sub_install_req = InstallRequirement(
|
2015-03-24 09:12:40 +01:00
|
|
|
str(subreq),
|
|
|
|
req_to_install,
|
|
|
|
isolated=self.isolated,
|
2015-04-20 06:43:02 +02:00
|
|
|
wheel_cache=self._wheel_cache,
|
2015-03-24 09:12:40 +01:00
|
|
|
)
|
2015-03-25 01:53:10 +01:00
|
|
|
more_reqs.extend(self.add_requirement(
|
|
|
|
sub_install_req, req_to_install.name))
|
2015-03-24 09:12:40 +01:00
|
|
|
|
2015-03-24 09:06:37 +01:00
|
|
|
# We add req_to_install before its dependencies, so that we
|
|
|
|
# can refer to it when adding dependencies.
|
|
|
|
if not self.has_requirement(req_to_install.name):
|
|
|
|
# 'unnamed' requirements will get added here
|
2015-03-25 01:53:10 +01:00
|
|
|
self.add_requirement(req_to_install, None)
|
2015-03-24 09:06:37 +01:00
|
|
|
|
2015-09-26 01:07:20 +02:00
|
|
|
if not ignore_dependencies:
|
2015-03-20 00:41:31 +01:00
|
|
|
if (req_to_install.extras):
|
|
|
|
logger.debug(
|
|
|
|
"Installing extra requirements: %r",
|
|
|
|
','.join(req_to_install.extras),
|
|
|
|
)
|
2015-03-19 00:20:40 +01:00
|
|
|
missing_requested = sorted(
|
|
|
|
set(req_to_install.extras) - set(dist.extras)
|
|
|
|
)
|
|
|
|
for missing in missing_requested:
|
|
|
|
logger.warning(
|
|
|
|
'%s does not provide the extra \'%s\'',
|
|
|
|
dist, missing
|
2015-02-26 19:11:04 +01:00
|
|
|
)
|
2015-02-26 13:50:56 +01:00
|
|
|
|
2015-03-19 00:20:40 +01:00
|
|
|
available_requested = sorted(
|
|
|
|
set(dist.extras) & set(req_to_install.extras)
|
|
|
|
)
|
|
|
|
for subreq in dist.requires(available_requested):
|
2015-03-24 09:12:40 +01:00
|
|
|
add_req(subreq)
|
2015-03-19 00:20:40 +01:00
|
|
|
|
|
|
|
# cleanup tmp src
|
2015-03-19 03:33:23 +01:00
|
|
|
self.reqs_to_cleanup.append(req_to_install)
|
2014-01-12 01:50:11 +01:00
|
|
|
|
2015-03-20 07:40:51 +01:00
|
|
|
if not req_to_install.editable and not req_to_install.satisfied_by:
|
2015-03-23 02:26:53 +01:00
|
|
|
# XXX: --no-install leads this to report 'Successfully
|
|
|
|
# downloaded' for only non-editable reqs, even though we took
|
|
|
|
# action on them.
|
2015-03-19 00:20:40 +01:00
|
|
|
self.successfully_downloaded.append(req_to_install)
|
2014-05-08 18:12:34 +02:00
|
|
|
|
2015-03-19 00:20:40 +01:00
|
|
|
return more_reqs
|
2014-05-08 18:12:34 +02:00
|
|
|
|
|
|
|
def cleanup_files(self):
|
2014-01-12 01:50:11 +01:00
|
|
|
"""Clean up files, remove builds."""
|
2014-12-12 18:02:23 +01:00
|
|
|
logger.debug('Cleaning up...')
|
2014-08-31 01:52:28 +02:00
|
|
|
with indent_log():
|
|
|
|
for req in self.reqs_to_cleanup:
|
|
|
|
req.remove_temporary_source()
|
2014-01-12 01:50:11 +01:00
|
|
|
|
2015-03-25 01:53:10 +01:00
|
|
|
def _to_install(self):
|
|
|
|
"""Create the installation order.
|
|
|
|
|
|
|
|
The installation order is topological - requirements are installed
|
|
|
|
before the requiring thing. We break cycles at an arbitrary point,
|
|
|
|
and make no other guarantees.
|
|
|
|
"""
|
|
|
|
# The current implementation, which we may change at any point
|
|
|
|
# installs the user specified things in the order given, except when
|
2015-04-02 01:55:39 +02:00
|
|
|
# dependencies must come earlier to achieve topological order.
|
2015-03-25 01:53:10 +01:00
|
|
|
order = []
|
|
|
|
ordered_reqs = set()
|
|
|
|
|
|
|
|
def schedule(req):
|
|
|
|
if req.satisfied_by or req in ordered_reqs:
|
|
|
|
return
|
2015-06-02 05:39:10 +02:00
|
|
|
if req.constraint:
|
|
|
|
return
|
2015-03-25 01:53:10 +01:00
|
|
|
ordered_reqs.add(req)
|
|
|
|
for dep in self._dependencies[req]:
|
|
|
|
schedule(dep)
|
|
|
|
order.append(req)
|
|
|
|
for install_req in self.requirements.values():
|
|
|
|
schedule(install_req)
|
|
|
|
return order
|
|
|
|
|
2014-01-12 01:50:11 +01:00
|
|
|
def install(self, install_options, global_options=(), *args, **kwargs):
|
2014-01-27 15:07:10 +01:00
|
|
|
"""
|
|
|
|
Install everything in this set (after having downloaded and unpacked
|
|
|
|
the packages)
|
|
|
|
"""
|
2015-03-25 01:53:10 +01:00
|
|
|
to_install = self._to_install()
|
2014-01-12 01:50:11 +01:00
|
|
|
|
|
|
|
if to_install:
|
2014-08-31 01:52:28 +02:00
|
|
|
logger.info(
|
|
|
|
'Installing collected packages: %s',
|
|
|
|
', '.join([req.name for req in to_install]),
|
2014-01-27 15:07:10 +01:00
|
|
|
)
|
2014-08-31 01:52:28 +02:00
|
|
|
|
|
|
|
with indent_log():
|
2014-01-12 01:50:11 +01:00
|
|
|
for requirement in to_install:
|
|
|
|
if requirement.conflicts_with:
|
2014-08-31 01:52:28 +02:00
|
|
|
logger.info(
|
|
|
|
'Found existing installation: %s',
|
|
|
|
requirement.conflicts_with,
|
|
|
|
)
|
|
|
|
with indent_log():
|
2014-01-12 01:50:11 +01:00
|
|
|
requirement.uninstall(auto_confirm=True)
|
|
|
|
try:
|
2014-01-27 15:07:10 +01:00
|
|
|
requirement.install(
|
|
|
|
install_options,
|
|
|
|
global_options,
|
|
|
|
*args,
|
|
|
|
**kwargs
|
|
|
|
)
|
2014-01-12 01:50:11 +01:00
|
|
|
except:
|
|
|
|
# if install did not succeed, rollback previous uninstall
|
2015-02-24 13:46:10 +01:00
|
|
|
if (requirement.conflicts_with and not
|
|
|
|
requirement.install_succeeded):
|
2014-01-12 01:50:11 +01:00
|
|
|
requirement.rollback_uninstall()
|
|
|
|
raise
|
|
|
|
else:
|
2015-02-24 13:46:10 +01:00
|
|
|
if (requirement.conflicts_with and
|
|
|
|
requirement.install_succeeded):
|
2014-01-12 01:50:11 +01:00
|
|
|
requirement.commit_uninstall()
|
|
|
|
requirement.remove_temporary_source()
|
2014-08-31 01:52:28 +02:00
|
|
|
|
2014-01-12 01:50:11 +01:00
|
|
|
self.successfully_installed = to_install
|