2014-08-31 01:52:28 +02:00
|
|
|
from __future__ import absolute_import
|
|
|
|
|
|
|
|
import logging
|
2014-01-12 07:50:07 +01:00
|
|
|
import os
|
|
|
|
import re
|
|
|
|
import shutil
|
|
|
|
import sys
|
|
|
|
import tempfile
|
2015-10-22 23:14:30 +02:00
|
|
|
import traceback
|
2016-01-20 06:50:18 +01:00
|
|
|
import warnings
|
2014-01-12 07:50:07 +01:00
|
|
|
import zipfile
|
2014-08-31 01:52:28 +02:00
|
|
|
|
2016-01-21 02:43:54 +01:00
|
|
|
from distutils import sysconfig
|
2014-01-12 07:50:07 +01:00
|
|
|
from distutils.util import change_root
|
|
|
|
from email.parser import FeedParser
|
|
|
|
|
2014-02-21 02:33:59 +01:00
|
|
|
from pip._vendor import pkg_resources, six
|
2015-10-23 22:59:38 +02:00
|
|
|
from pip._vendor.packaging import specifiers
|
2016-04-25 02:55:41 +02:00
|
|
|
from pip._vendor.packaging.markers import Marker
|
2015-11-12 00:51:46 +01:00
|
|
|
from pip._vendor.packaging.requirements import InvalidRequirement, Requirement
|
2016-02-09 23:58:08 +01:00
|
|
|
from pip._vendor.packaging.utils import canonicalize_name
|
2016-05-17 22:32:43 +02:00
|
|
|
from pip._vendor.packaging.version import Version, parse as parse_version
|
2014-06-06 20:21:51 +02:00
|
|
|
from pip._vendor.six.moves import configparser
|
2014-09-12 00:40:45 +02:00
|
|
|
|
|
|
|
import pip.wheel
|
|
|
|
|
2016-01-21 02:43:54 +01:00
|
|
|
from pip.compat import native_str, get_stdlib, WINDOWS
|
2014-01-12 07:50:07 +01:00
|
|
|
from pip.download import is_url, url_to_path, path_to_url, is_archive_file
|
2014-01-27 15:07:10 +01:00
|
|
|
from pip.exceptions import (
|
2016-03-10 03:55:20 +01:00
|
|
|
InstallationError, UninstallationError,
|
2014-01-27 15:07:10 +01:00
|
|
|
)
|
|
|
|
from pip.locations import (
|
|
|
|
bin_py, running_under_virtualenv, PIP_DELETE_MARKER_FILENAME, bin_user,
|
|
|
|
)
|
2014-08-31 01:52:28 +02:00
|
|
|
from pip.utils import (
|
2014-01-27 15:07:10 +01:00
|
|
|
display_path, rmtree, ask_path_exists, backup_dir, is_installable_dir,
|
2015-09-01 14:16:12 +02:00
|
|
|
dist_in_usersite, dist_in_site_packages, egg_link_path,
|
2015-03-31 03:40:30 +02:00
|
|
|
call_subprocess, read_text_file, FakeFile, _make_build_dir, ensure_dir,
|
2016-02-09 23:58:08 +01:00
|
|
|
get_installed_version, normalize_path, dist_is_local,
|
2014-01-27 15:07:10 +01:00
|
|
|
)
|
2016-01-20 06:50:18 +01:00
|
|
|
|
Add checks against requirements-file-dwelling hashes for most kinds of packages. Close #1175.
* Add --require-hashes option. This is handy in deployment scripts to force application authors to hash their requirements. It is also a convenient way to get pip to show computed hashes for a virgin, unhashed requirements file. Eventually, additions to `pip freeze` should fill a superset of this use case.
* In --require-hashes mode, at least one hash is required to match for each requirement.
* Option-based requirements (--sha256=...) turn on --require-hashes mode implicitly.
* Internet-derived URL-based hashes are "necessary but not sufficient": they do not satisfy --require-hashes mode when they match, but they are still used to guard against transmission errors.
* Other URL-based requirements (#md5=...) are treated just like flag-based ones, except they don't turn on --require-hashes.
* Complain informatively, with the most devastating errors first so you don't chase your tail all day only to run up against a brick wall at the end. This also means we don't complain that a hash is missing, only for the user to find, after fixing it, that we have no idea how to even compute a hash for that type of requirement.
* Complain about unpinned requirements when hash-checking mode is on, lest they cause the user surprise later.
* Complain about missing hashes.
* Complain about requirement types we don't know how to hash (like VCS ones and local dirs).
* Have InstallRequirement keep its original Link around (original_link) so we can differentiate between URL hashes from requirements files and ones downloaded from the (untrustworthy) internet.
* Remove test_download_hashes, which is obsolete. Similar coverage is provided in test_utils.TestHashes and the various hash cases in test_req.py.
2015-09-09 19:01:53 +02:00
|
|
|
from pip.utils.hashes import Hashes
|
2016-02-19 20:15:03 +01:00
|
|
|
from pip.utils.deprecation import RemovedInPip9Warning, RemovedInPip10Warning
|
2014-08-31 01:52:28 +02:00
|
|
|
from pip.utils.logging import indent_log
|
2015-11-26 09:36:18 +01:00
|
|
|
from pip.utils.setuptools_build import SETUPTOOLS_SHIM
|
2015-11-04 09:06:50 +01:00
|
|
|
from pip.utils.ui import open_spinner
|
2014-01-12 19:05:11 +01:00
|
|
|
from pip.req.req_uninstall import UninstallPathSet
|
2014-01-12 07:50:07 +01:00
|
|
|
from pip.vcs import vcs
|
2014-12-28 23:52:32 +01:00
|
|
|
from pip.wheel import move_wheel_files, Wheel
|
2014-01-12 07:50:07 +01:00
|
|
|
|
2014-01-12 01:50:11 +01:00
|
|
|
|
2014-08-31 01:52:28 +02:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2015-10-23 22:59:38 +02:00
|
|
|
operators = specifiers.Specifier._operators.keys()
|
|
|
|
|
2014-08-31 01:52:28 +02:00
|
|
|
|
2015-05-14 18:24:49 +02:00
|
|
|
def _strip_extras(path):
|
|
|
|
m = re.match(r'^(.+)(\[[^\]]+\])$', path)
|
|
|
|
extras = None
|
|
|
|
if m:
|
|
|
|
path_no_extras = m.group(1)
|
|
|
|
extras = m.group(2)
|
|
|
|
else:
|
|
|
|
path_no_extras = path
|
|
|
|
|
|
|
|
return path_no_extras, extras
|
|
|
|
|
|
|
|
|
2014-01-12 01:50:11 +01:00
|
|
|
class InstallRequirement(object):
|
|
|
|
|
|
|
|
def __init__(self, req, comes_from, source_dir=None, editable=False,
|
2016-02-09 10:40:48 +01:00
|
|
|
link=None, as_egg=False, update=True,
|
2015-03-30 23:44:02 +02:00
|
|
|
pycompile=True, markers=None, isolated=False, options=None,
|
2015-06-02 05:39:10 +02:00
|
|
|
wheel_cache=None, constraint=False):
|
2014-01-12 01:50:11 +01:00
|
|
|
self.extras = ()
|
2014-06-06 20:21:51 +02:00
|
|
|
if isinstance(req, six.string_types):
|
2015-10-23 22:59:38 +02:00
|
|
|
try:
|
2015-11-12 00:51:46 +01:00
|
|
|
req = Requirement(req)
|
|
|
|
except InvalidRequirement:
|
2015-10-23 22:59:38 +02:00
|
|
|
if os.path.sep in req:
|
|
|
|
add_msg = "It looks like a path. Does it exist ?"
|
|
|
|
elif '=' in req and not any(op in req for op in operators):
|
|
|
|
add_msg = "= is not a valid operator. Did you mean == ?"
|
|
|
|
else:
|
|
|
|
add_msg = traceback.format_exc()
|
|
|
|
raise InstallationError(
|
|
|
|
"Invalid requirement: '%s'\n%s" % (req, add_msg))
|
2014-01-12 01:50:11 +01:00
|
|
|
self.extras = req.extras
|
2014-07-04 00:56:26 +02:00
|
|
|
|
2014-01-12 01:50:11 +01:00
|
|
|
self.req = req
|
|
|
|
self.comes_from = comes_from
|
2015-06-02 05:39:10 +02:00
|
|
|
self.constraint = constraint
|
2014-01-12 01:50:11 +01:00
|
|
|
self.source_dir = source_dir
|
|
|
|
self.editable = editable
|
|
|
|
|
2015-04-20 06:43:02 +02:00
|
|
|
self._wheel_cache = wheel_cache
|
Add checks against requirements-file-dwelling hashes for most kinds of packages. Close #1175.
* Add --require-hashes option. This is handy in deployment scripts to force application authors to hash their requirements. It is also a convenient way to get pip to show computed hashes for a virgin, unhashed requirements file. Eventually, additions to `pip freeze` should fill a superset of this use case.
* In --require-hashes mode, at least one hash is required to match for each requirement.
* Option-based requirements (--sha256=...) turn on --require-hashes mode implicitly.
* Internet-derived URL-based hashes are "necessary but not sufficient": they do not satisfy --require-hashes mode when they match, but they are still used to guard against transmission errors.
* Other URL-based requirements (#md5=...) are treated just like flag-based ones, except they don't turn on --require-hashes.
* Complain informatively, with the most devastating errors first so you don't chase your tail all day only to run up against a brick wall at the end. This also means we don't complain that a hash is missing, only for the user to find, after fixing it, that we have no idea how to even compute a hash for that type of requirement.
* Complain about unpinned requirements when hash-checking mode is on, lest they cause the user surprise later.
* Complain about missing hashes.
* Complain about requirement types we don't know how to hash (like VCS ones and local dirs).
* Have InstallRequirement keep its original Link around (original_link) so we can differentiate between URL hashes from requirements files and ones downloaded from the (untrustworthy) internet.
* Remove test_download_hashes, which is obsolete. Similar coverage is provided in test_utils.TestHashes and the various hash cases in test_req.py.
2015-09-09 19:01:53 +02:00
|
|
|
self.link = self.original_link = link
|
2014-01-12 01:50:11 +01:00
|
|
|
self.as_egg = as_egg
|
2014-01-14 18:36:47 +01:00
|
|
|
self.markers = markers
|
2014-01-12 01:50:11 +01:00
|
|
|
self._egg_info_path = None
|
|
|
|
# This holds the pkg_resources.Distribution object if this requirement
|
|
|
|
# is already available:
|
|
|
|
self.satisfied_by = None
|
|
|
|
# This hold the pkg_resources.Distribution object if this requirement
|
|
|
|
# conflicts with another installed distribution:
|
|
|
|
self.conflicts_with = None
|
2014-12-21 14:17:37 +01:00
|
|
|
# Temporary build location
|
2014-01-12 01:50:11 +01:00
|
|
|
self._temp_build_dir = None
|
2014-12-21 14:17:37 +01:00
|
|
|
# Used to store the global directory where the _temp_build_dir should
|
|
|
|
# have been created. Cf _correct_build_location method.
|
2015-09-11 17:23:17 +02:00
|
|
|
self._ideal_build_dir = None
|
2014-01-12 01:50:11 +01:00
|
|
|
# True if the editable should be updated:
|
|
|
|
self.update = update
|
|
|
|
# Set to True after successful installation
|
|
|
|
self.install_succeeded = None
|
|
|
|
# UninstallPathSet of uninstalled distribution (for possible rollback)
|
|
|
|
self.uninstalled = None
|
2016-01-20 21:26:06 +01:00
|
|
|
# Set True if a legitimate do-nothing-on-uninstall has happened - e.g.
|
|
|
|
# system site packages, stdlib packages.
|
|
|
|
self.nothing_to_uninstall = False
|
2014-01-12 01:50:11 +01:00
|
|
|
self.use_user_site = False
|
|
|
|
self.target_dir = None
|
2015-03-19 02:34:56 +01:00
|
|
|
self.options = options if options else {}
|
2014-01-12 01:50:11 +01:00
|
|
|
self.pycompile = pycompile
|
2015-06-18 08:58:20 +02:00
|
|
|
# Set to True after successful preparation of this requirement
|
|
|
|
self.prepared = False
|
2014-01-12 01:50:11 +01:00
|
|
|
|
2014-12-11 15:44:53 +01:00
|
|
|
self.isolated = isolated
|
|
|
|
|
2014-01-12 01:50:11 +01:00
|
|
|
@classmethod
|
2014-12-11 15:44:53 +01:00
|
|
|
def from_editable(cls, editable_req, comes_from=None, default_vcs=None,
|
2015-06-02 05:39:10 +02:00
|
|
|
isolated=False, options=None, wheel_cache=None,
|
|
|
|
constraint=False):
|
2014-12-28 16:50:16 +01:00
|
|
|
from pip.index import Link
|
|
|
|
|
2016-02-09 10:40:48 +01:00
|
|
|
name, url, extras_override = parse_editable(
|
2014-12-07 23:49:26 +01:00
|
|
|
editable_req, default_vcs)
|
2014-01-12 01:50:11 +01:00
|
|
|
if url.startswith('file:'):
|
|
|
|
source_dir = url_to_path(url)
|
|
|
|
else:
|
|
|
|
source_dir = None
|
|
|
|
|
|
|
|
res = cls(name, comes_from, source_dir=source_dir,
|
|
|
|
editable=True,
|
2014-12-28 16:50:16 +01:00
|
|
|
link=Link(url),
|
2015-06-02 05:39:10 +02:00
|
|
|
constraint=constraint,
|
2015-03-19 02:34:56 +01:00
|
|
|
isolated=isolated,
|
2015-03-30 23:44:02 +02:00
|
|
|
options=options if options else {},
|
2015-04-20 06:43:02 +02:00
|
|
|
wheel_cache=wheel_cache)
|
2014-01-12 01:50:11 +01:00
|
|
|
|
|
|
|
if extras_override is not None:
|
|
|
|
res.extras = extras_override
|
|
|
|
|
|
|
|
return res
|
|
|
|
|
|
|
|
@classmethod
|
2015-03-30 23:44:02 +02:00
|
|
|
def from_line(
|
|
|
|
cls, name, comes_from=None, isolated=False, options=None,
|
2015-06-02 05:39:10 +02:00
|
|
|
wheel_cache=None, constraint=False):
|
2014-01-12 01:50:11 +01:00
|
|
|
"""Creates an InstallRequirement from a name, which might be a
|
|
|
|
requirement, directory containing 'setup.py', filename, or URL.
|
|
|
|
"""
|
2014-07-04 00:56:26 +02:00
|
|
|
from pip.index import Link
|
|
|
|
|
2014-01-15 16:57:13 +01:00
|
|
|
if is_url(name):
|
|
|
|
marker_sep = '; '
|
|
|
|
else:
|
|
|
|
marker_sep = ';'
|
|
|
|
if marker_sep in name:
|
|
|
|
name, markers = name.split(marker_sep, 1)
|
2014-01-14 18:36:47 +01:00
|
|
|
markers = markers.strip()
|
|
|
|
if not markers:
|
|
|
|
markers = None
|
|
|
|
else:
|
|
|
|
markers = None
|
2014-01-12 01:50:11 +01:00
|
|
|
name = name.strip()
|
|
|
|
req = None
|
|
|
|
path = os.path.normpath(os.path.abspath(name))
|
|
|
|
link = None
|
2015-05-14 18:24:49 +02:00
|
|
|
extras = None
|
2014-01-12 01:50:11 +01:00
|
|
|
|
|
|
|
if is_url(name):
|
|
|
|
link = Link(name)
|
2015-05-14 18:24:49 +02:00
|
|
|
else:
|
|
|
|
p, extras = _strip_extras(path)
|
|
|
|
if (os.path.isdir(p) and
|
|
|
|
(os.path.sep in name or name.startswith('.'))):
|
|
|
|
|
|
|
|
if not is_installable_dir(p):
|
|
|
|
raise InstallationError(
|
|
|
|
"Directory %r is not installable. File 'setup.py' "
|
|
|
|
"not found." % name
|
|
|
|
)
|
|
|
|
link = Link(path_to_url(p))
|
|
|
|
elif is_archive_file(p):
|
|
|
|
if not os.path.isfile(p):
|
|
|
|
logger.warning(
|
|
|
|
'Requirement %r looks like a filename, but the '
|
|
|
|
'file does not exist',
|
|
|
|
name
|
|
|
|
)
|
|
|
|
link = Link(path_to_url(p))
|
2014-01-12 01:50:11 +01:00
|
|
|
|
2014-05-27 00:06:36 +02:00
|
|
|
# it's a local file, dir, or url
|
|
|
|
if link:
|
2014-01-12 01:50:11 +01:00
|
|
|
# Handle relative file URLs
|
2014-12-28 16:50:16 +01:00
|
|
|
if link.scheme == 'file' and re.search(r'\.\./', link.url):
|
|
|
|
link = Link(
|
|
|
|
path_to_url(os.path.normpath(os.path.abspath(link.path))))
|
2014-05-27 00:06:36 +02:00
|
|
|
# wheel file
|
2014-12-28 23:52:32 +01:00
|
|
|
if link.is_wheel:
|
2014-01-27 15:07:10 +01:00
|
|
|
wheel = Wheel(link.filename) # can raise InvalidWheelFilename
|
2014-05-27 00:06:36 +02:00
|
|
|
req = "%s==%s" % (wheel.name, wheel.version)
|
|
|
|
else:
|
|
|
|
# set the req to the egg fragment. when it's not there, this
|
|
|
|
# will become an 'unnamed' requirement
|
|
|
|
req = link.egg_fragment
|
2014-01-12 01:50:11 +01:00
|
|
|
|
2014-05-27 00:06:36 +02:00
|
|
|
# a requirement specifier
|
2014-01-12 01:50:11 +01:00
|
|
|
else:
|
|
|
|
req = name
|
|
|
|
|
2015-03-19 02:34:56 +01:00
|
|
|
options = options if options else {}
|
2015-05-14 18:24:49 +02:00
|
|
|
res = cls(req, comes_from, link=link, markers=markers,
|
|
|
|
isolated=isolated, options=options,
|
2015-06-02 05:39:10 +02:00
|
|
|
wheel_cache=wheel_cache, constraint=constraint)
|
2015-05-14 18:24:49 +02:00
|
|
|
|
|
|
|
if extras:
|
2015-11-12 00:51:46 +01:00
|
|
|
res.extras = Requirement('placeholder' + extras).extras
|
2015-05-14 18:24:49 +02:00
|
|
|
|
|
|
|
return res
|
2014-01-12 01:50:11 +01:00
|
|
|
|
|
|
|
def __str__(self):
|
|
|
|
if self.req:
|
|
|
|
s = str(self.req)
|
2014-12-28 16:50:16 +01:00
|
|
|
if self.link:
|
|
|
|
s += ' from %s' % self.link.url
|
2014-01-12 01:50:11 +01:00
|
|
|
else:
|
2014-12-28 16:50:16 +01:00
|
|
|
s = self.link.url if self.link else None
|
2014-01-12 01:50:11 +01:00
|
|
|
if self.satisfied_by is not None:
|
|
|
|
s += ' in %s' % display_path(self.satisfied_by.location)
|
|
|
|
if self.comes_from:
|
2014-06-06 20:21:51 +02:00
|
|
|
if isinstance(self.comes_from, six.string_types):
|
2014-01-12 01:50:11 +01:00
|
|
|
comes_from = self.comes_from
|
|
|
|
else:
|
|
|
|
comes_from = self.comes_from.from_path()
|
|
|
|
if comes_from:
|
|
|
|
s += ' (from %s)' % comes_from
|
|
|
|
return s
|
|
|
|
|
2015-03-06 16:58:34 +01:00
|
|
|
def __repr__(self):
|
2015-03-13 23:06:58 +01:00
|
|
|
return '<%s object: %s editable=%r>' % (
|
|
|
|
self.__class__.__name__, str(self), self.editable)
|
2015-03-06 16:58:34 +01:00
|
|
|
|
Fix false hash mismatches when installing a package that has a cached wheel.
This would occur when, for example, installing from a requirements file that references a certain hashed sdist, a common situation.
As of pip 7, pip always tries to build a wheel for each requirement (if one wasn't provided directly) and installs from that. The way this was implemented, InstallRequirement.link pointed to the cached wheel, which obviously had a different hash than the index-sourced archive, so spurious mismatch errors would result.
Now we no longer read from the wheel cache in hash-checking mode.
Make populate_link(), rather than the `link` setter, responsible for mapping InstallRequirement.link to a cached wheel. populate_link() isn't called until until prepare_files(). At that point, when we've examined all InstallRequirements and their potential --hash options, we know whether we should be requiring hashes and thus whether to use the wheel cache at all.
The only place that sets InstallRequirement.link other than InstallRequirement itself is pip.wheel, which does so long after hashes have been checked, when it's unpacking the wheel it just built, so it won't cause spurious hash mismatches.
2015-10-16 21:58:59 +02:00
|
|
|
def populate_link(self, finder, upgrade, require_hashes):
|
2015-03-20 00:08:42 +01:00
|
|
|
"""Ensure that if a link can be found for this, that it is found.
|
|
|
|
|
|
|
|
Note that self.link may still be None - if Upgrade is False and the
|
|
|
|
requirement is already installed.
|
Fix false hash mismatches when installing a package that has a cached wheel.
This would occur when, for example, installing from a requirements file that references a certain hashed sdist, a common situation.
As of pip 7, pip always tries to build a wheel for each requirement (if one wasn't provided directly) and installs from that. The way this was implemented, InstallRequirement.link pointed to the cached wheel, which obviously had a different hash than the index-sourced archive, so spurious mismatch errors would result.
Now we no longer read from the wheel cache in hash-checking mode.
Make populate_link(), rather than the `link` setter, responsible for mapping InstallRequirement.link to a cached wheel. populate_link() isn't called until until prepare_files(). At that point, when we've examined all InstallRequirements and their potential --hash options, we know whether we should be requiring hashes and thus whether to use the wheel cache at all.
The only place that sets InstallRequirement.link other than InstallRequirement itself is pip.wheel, which does so long after hashes have been checked, when it's unpacking the wheel it just built, so it won't cause spurious hash mismatches.
2015-10-16 21:58:59 +02:00
|
|
|
|
|
|
|
If require_hashes is True, don't use the wheel cache, because cached
|
|
|
|
wheels, always built locally, have different hashes than the files
|
|
|
|
downloaded from the index server and thus throw false hash mismatches.
|
|
|
|
Furthermore, cached wheels at present have undeterministic contents due
|
|
|
|
to file modification times.
|
2015-03-20 00:08:42 +01:00
|
|
|
"""
|
|
|
|
if self.link is None:
|
|
|
|
self.link = finder.find_requirement(self, upgrade)
|
Fix false hash mismatches when installing a package that has a cached wheel.
This would occur when, for example, installing from a requirements file that references a certain hashed sdist, a common situation.
As of pip 7, pip always tries to build a wheel for each requirement (if one wasn't provided directly) and installs from that. The way this was implemented, InstallRequirement.link pointed to the cached wheel, which obviously had a different hash than the index-sourced archive, so spurious mismatch errors would result.
Now we no longer read from the wheel cache in hash-checking mode.
Make populate_link(), rather than the `link` setter, responsible for mapping InstallRequirement.link to a cached wheel. populate_link() isn't called until until prepare_files(). At that point, when we've examined all InstallRequirements and their potential --hash options, we know whether we should be requiring hashes and thus whether to use the wheel cache at all.
The only place that sets InstallRequirement.link other than InstallRequirement itself is pip.wheel, which does so long after hashes have been checked, when it's unpacking the wheel it just built, so it won't cause spurious hash mismatches.
2015-10-16 21:58:59 +02:00
|
|
|
if self._wheel_cache is not None and not require_hashes:
|
|
|
|
old_link = self.link
|
|
|
|
self.link = self._wheel_cache.cached_wheel(self.link, self.name)
|
|
|
|
if old_link != self.link:
|
|
|
|
logger.debug('Using cached wheel link: %s', self.link)
|
2015-03-30 23:44:02 +02:00
|
|
|
|
2014-07-04 00:56:26 +02:00
|
|
|
@property
|
|
|
|
def specifier(self):
|
|
|
|
return self.req.specifier
|
|
|
|
|
Add checks against requirements-file-dwelling hashes for most kinds of packages. Close #1175.
* Add --require-hashes option. This is handy in deployment scripts to force application authors to hash their requirements. It is also a convenient way to get pip to show computed hashes for a virgin, unhashed requirements file. Eventually, additions to `pip freeze` should fill a superset of this use case.
* In --require-hashes mode, at least one hash is required to match for each requirement.
* Option-based requirements (--sha256=...) turn on --require-hashes mode implicitly.
* Internet-derived URL-based hashes are "necessary but not sufficient": they do not satisfy --require-hashes mode when they match, but they are still used to guard against transmission errors.
* Other URL-based requirements (#md5=...) are treated just like flag-based ones, except they don't turn on --require-hashes.
* Complain informatively, with the most devastating errors first so you don't chase your tail all day only to run up against a brick wall at the end. This also means we don't complain that a hash is missing, only for the user to find, after fixing it, that we have no idea how to even compute a hash for that type of requirement.
* Complain about unpinned requirements when hash-checking mode is on, lest they cause the user surprise later.
* Complain about missing hashes.
* Complain about requirement types we don't know how to hash (like VCS ones and local dirs).
* Have InstallRequirement keep its original Link around (original_link) so we can differentiate between URL hashes from requirements files and ones downloaded from the (untrustworthy) internet.
* Remove test_download_hashes, which is obsolete. Similar coverage is provided in test_utils.TestHashes and the various hash cases in test_req.py.
2015-09-09 19:01:53 +02:00
|
|
|
@property
|
|
|
|
def is_pinned(self):
|
|
|
|
"""Return whether I am pinned to an exact version.
|
|
|
|
|
|
|
|
For example, some-package==1.2 is pinned; some-package>1.2 is not.
|
|
|
|
"""
|
|
|
|
specifiers = self.specifier
|
2015-10-11 16:13:13 +02:00
|
|
|
return (len(specifiers) == 1 and
|
|
|
|
next(iter(specifiers)).operator in ('==', '==='))
|
Add checks against requirements-file-dwelling hashes for most kinds of packages. Close #1175.
* Add --require-hashes option. This is handy in deployment scripts to force application authors to hash their requirements. It is also a convenient way to get pip to show computed hashes for a virgin, unhashed requirements file. Eventually, additions to `pip freeze` should fill a superset of this use case.
* In --require-hashes mode, at least one hash is required to match for each requirement.
* Option-based requirements (--sha256=...) turn on --require-hashes mode implicitly.
* Internet-derived URL-based hashes are "necessary but not sufficient": they do not satisfy --require-hashes mode when they match, but they are still used to guard against transmission errors.
* Other URL-based requirements (#md5=...) are treated just like flag-based ones, except they don't turn on --require-hashes.
* Complain informatively, with the most devastating errors first so you don't chase your tail all day only to run up against a brick wall at the end. This also means we don't complain that a hash is missing, only for the user to find, after fixing it, that we have no idea how to even compute a hash for that type of requirement.
* Complain about unpinned requirements when hash-checking mode is on, lest they cause the user surprise later.
* Complain about missing hashes.
* Complain about requirement types we don't know how to hash (like VCS ones and local dirs).
* Have InstallRequirement keep its original Link around (original_link) so we can differentiate between URL hashes from requirements files and ones downloaded from the (untrustworthy) internet.
* Remove test_download_hashes, which is obsolete. Similar coverage is provided in test_utils.TestHashes and the various hash cases in test_req.py.
2015-09-09 19:01:53 +02:00
|
|
|
|
2014-01-12 01:50:11 +01:00
|
|
|
def from_path(self):
|
|
|
|
if self.req is None:
|
|
|
|
return None
|
|
|
|
s = str(self.req)
|
|
|
|
if self.comes_from:
|
2014-06-06 20:21:51 +02:00
|
|
|
if isinstance(self.comes_from, six.string_types):
|
2014-01-12 01:50:11 +01:00
|
|
|
comes_from = self.comes_from
|
|
|
|
else:
|
|
|
|
comes_from = self.comes_from.from_path()
|
|
|
|
if comes_from:
|
|
|
|
s += '->' + comes_from
|
|
|
|
return s
|
|
|
|
|
2014-12-17 22:39:14 +01:00
|
|
|
def build_location(self, build_dir):
|
2014-01-12 01:50:11 +01:00
|
|
|
if self._temp_build_dir is not None:
|
|
|
|
return self._temp_build_dir
|
|
|
|
if self.req is None:
|
2014-12-21 14:17:37 +01:00
|
|
|
# for requirement via a path to a directory: the name of the
|
|
|
|
# package is not available yet so we create a temp directory
|
|
|
|
# Once run_egg_info will have run, we'll be able
|
|
|
|
# to fix it via _correct_build_location
|
2016-05-20 03:02:26 +02:00
|
|
|
# Some systems have /tmp as a symlink which confuses custom
|
|
|
|
# builds (such as numpy). Thus, we ensure that the real path
|
|
|
|
# is returned.
|
|
|
|
self._temp_build_dir = os.path.realpath(
|
|
|
|
tempfile.mkdtemp('-build', 'pip-')
|
|
|
|
)
|
2014-01-12 01:50:11 +01:00
|
|
|
self._ideal_build_dir = build_dir
|
|
|
|
return self._temp_build_dir
|
|
|
|
if self.editable:
|
|
|
|
name = self.name.lower()
|
|
|
|
else:
|
|
|
|
name = self.name
|
2014-01-27 15:07:10 +01:00
|
|
|
# FIXME: Is there a better place to create the build_dir? (hg and bzr
|
|
|
|
# need this)
|
2014-01-12 01:50:11 +01:00
|
|
|
if not os.path.exists(build_dir):
|
2014-12-21 14:17:37 +01:00
|
|
|
logger.debug('Creating directory %s', build_dir)
|
2014-01-12 01:50:11 +01:00
|
|
|
_make_build_dir(build_dir)
|
|
|
|
return os.path.join(build_dir, name)
|
|
|
|
|
2014-12-21 14:17:37 +01:00
|
|
|
def _correct_build_location(self):
|
|
|
|
"""Move self._temp_build_dir to self._ideal_build_dir/self.req.name
|
|
|
|
|
|
|
|
For some requirements (e.g. a path to a directory), the name of the
|
|
|
|
package is not available until we run egg_info, so the build_location
|
|
|
|
will return a temporary directory and store the _ideal_build_dir.
|
|
|
|
|
|
|
|
This is only called by self.egg_info_path to fix the temporary build
|
|
|
|
directory.
|
|
|
|
"""
|
2014-01-12 01:50:11 +01:00
|
|
|
if self.source_dir is not None:
|
|
|
|
return
|
|
|
|
assert self.req is not None
|
|
|
|
assert self._temp_build_dir
|
2014-12-21 14:17:37 +01:00
|
|
|
assert self._ideal_build_dir
|
2014-01-12 01:50:11 +01:00
|
|
|
old_location = self._temp_build_dir
|
2014-12-21 14:17:37 +01:00
|
|
|
self._temp_build_dir = None
|
|
|
|
new_location = self.build_location(self._ideal_build_dir)
|
2014-01-12 01:50:11 +01:00
|
|
|
if os.path.exists(new_location):
|
|
|
|
raise InstallationError(
|
|
|
|
'A package already exists in %s; please remove it to continue'
|
|
|
|
% display_path(new_location))
|
2014-01-27 15:07:10 +01:00
|
|
|
logger.debug(
|
2014-08-31 01:52:28 +02:00
|
|
|
'Moving package %s from %s to new location %s',
|
|
|
|
self, display_path(old_location), display_path(new_location),
|
2014-01-27 15:07:10 +01:00
|
|
|
)
|
2014-01-12 01:50:11 +01:00
|
|
|
shutil.move(old_location, new_location)
|
|
|
|
self._temp_build_dir = new_location
|
2014-12-21 14:17:37 +01:00
|
|
|
self._ideal_build_dir = None
|
2014-01-12 01:50:11 +01:00
|
|
|
self.source_dir = new_location
|
|
|
|
self._egg_info_path = None
|
|
|
|
|
|
|
|
@property
|
|
|
|
def name(self):
|
|
|
|
if self.req is None:
|
|
|
|
return None
|
2015-11-12 00:51:46 +01:00
|
|
|
return native_str(pkg_resources.safe_name(self.req.name))
|
2014-01-12 01:50:11 +01:00
|
|
|
|
2016-02-08 14:04:10 +01:00
|
|
|
@property
|
|
|
|
def setup_py_dir(self):
|
|
|
|
return os.path.join(
|
|
|
|
self.source_dir,
|
|
|
|
self.link and self.link.subdirectory_fragment or '')
|
|
|
|
|
2014-01-12 01:50:11 +01:00
|
|
|
@property
|
|
|
|
def setup_py(self):
|
2015-03-25 01:53:10 +01:00
|
|
|
assert self.source_dir, "No source dir for %s" % self
|
2014-01-12 01:50:11 +01:00
|
|
|
try:
|
2014-02-27 16:41:50 +01:00
|
|
|
import setuptools # noqa
|
2014-01-12 01:50:11 +01:00
|
|
|
except ImportError:
|
2015-10-22 23:14:30 +02:00
|
|
|
if get_installed_version('setuptools') is None:
|
|
|
|
add_msg = "Please install setuptools."
|
2015-10-19 17:04:02 +02:00
|
|
|
else:
|
2015-10-23 23:49:38 +02:00
|
|
|
add_msg = traceback.format_exc()
|
2014-01-12 01:50:11 +01:00
|
|
|
# Setuptools is not available
|
|
|
|
raise InstallationError(
|
2015-10-19 17:04:02 +02:00
|
|
|
"Could not import setuptools which is required to "
|
2015-10-22 23:14:30 +02:00
|
|
|
"install from a source distribution.\n%s" % add_msg
|
2014-01-12 01:50:11 +01:00
|
|
|
)
|
|
|
|
|
2016-02-08 14:04:10 +01:00
|
|
|
setup_py = os.path.join(self.setup_py_dir, 'setup.py')
|
2014-01-12 01:50:11 +01:00
|
|
|
|
2014-02-21 02:33:59 +01:00
|
|
|
# Python2 __file__ should not be unicode
|
|
|
|
if six.PY2 and isinstance(setup_py, six.text_type):
|
|
|
|
setup_py = setup_py.encode(sys.getfilesystemencoding())
|
|
|
|
|
2014-01-12 01:50:11 +01:00
|
|
|
return setup_py
|
|
|
|
|
2014-05-08 18:12:34 +02:00
|
|
|
def run_egg_info(self):
|
2014-01-12 01:50:11 +01:00
|
|
|
assert self.source_dir
|
|
|
|
if self.name:
|
2014-12-12 20:37:08 +01:00
|
|
|
logger.debug(
|
2014-08-31 01:52:28 +02:00
|
|
|
'Running setup.py (path:%s) egg_info for package %s',
|
|
|
|
self.setup_py, self.name,
|
2014-01-27 15:07:10 +01:00
|
|
|
)
|
2014-01-12 01:50:11 +01:00
|
|
|
else:
|
2014-12-12 20:37:08 +01:00
|
|
|
logger.debug(
|
2014-08-31 01:52:28 +02:00
|
|
|
'Running setup.py (path:%s) egg_info for package from %s',
|
2014-12-28 16:50:16 +01:00
|
|
|
self.setup_py, self.link,
|
2014-01-27 15:07:10 +01:00
|
|
|
)
|
2014-01-12 01:50:11 +01:00
|
|
|
|
2014-08-31 01:52:28 +02:00
|
|
|
with indent_log():
|
2015-11-26 09:37:41 +01:00
|
|
|
script = SETUPTOOLS_SHIM % self.setup_py
|
2014-12-11 15:44:53 +01:00
|
|
|
base_cmd = [sys.executable, '-c', script]
|
|
|
|
if self.isolated:
|
|
|
|
base_cmd += ["--no-user-cfg"]
|
|
|
|
egg_info_cmd = base_cmd + ['egg_info']
|
2014-01-27 15:07:10 +01:00
|
|
|
# We can't put the .egg-info files at the root, because then the
|
|
|
|
# source code will be mistaken for an installed egg, causing
|
|
|
|
# problems
|
2014-05-08 18:12:34 +02:00
|
|
|
if self.editable:
|
2014-01-12 01:50:11 +01:00
|
|
|
egg_base_option = []
|
|
|
|
else:
|
2016-02-08 14:04:10 +01:00
|
|
|
egg_info_dir = os.path.join(self.setup_py_dir, 'pip-egg-info')
|
2015-03-31 03:40:30 +02:00
|
|
|
ensure_dir(egg_info_dir)
|
2014-01-12 01:50:11 +01:00
|
|
|
egg_base_option = ['--egg-base', 'pip-egg-info']
|
|
|
|
call_subprocess(
|
|
|
|
egg_info_cmd + egg_base_option,
|
2016-02-08 14:04:10 +01:00
|
|
|
cwd=self.setup_py_dir,
|
2014-01-27 15:07:10 +01:00
|
|
|
show_stdout=False,
|
2014-08-31 01:52:28 +02:00
|
|
|
command_level=logging.DEBUG,
|
2014-01-12 01:50:11 +01:00
|
|
|
command_desc='python setup.py egg_info')
|
2014-08-31 01:52:28 +02:00
|
|
|
|
2014-01-12 01:50:11 +01:00
|
|
|
if not self.req:
|
2016-05-17 22:32:43 +02:00
|
|
|
if isinstance(parse_version(self.pkg_info()["Version"]), Version):
|
2014-07-04 00:56:26 +02:00
|
|
|
op = "=="
|
|
|
|
else:
|
|
|
|
op = "==="
|
2015-11-12 00:51:46 +01:00
|
|
|
self.req = Requirement(
|
2014-07-04 00:56:26 +02:00
|
|
|
"".join([
|
|
|
|
self.pkg_info()["Name"],
|
|
|
|
op,
|
|
|
|
self.pkg_info()["Version"],
|
2015-11-12 00:51:46 +01:00
|
|
|
])
|
|
|
|
)
|
2014-12-21 14:17:37 +01:00
|
|
|
self._correct_build_location()
|
2015-09-30 22:04:59 +02:00
|
|
|
else:
|
|
|
|
metadata_name = canonicalize_name(self.pkg_info()["Name"])
|
2015-11-12 00:51:46 +01:00
|
|
|
if canonicalize_name(self.req.name) != metadata_name:
|
2016-01-08 00:13:58 +01:00
|
|
|
logger.warning(
|
2015-09-30 22:04:59 +02:00
|
|
|
'Running setup.py (path:%s) egg_info for package %s '
|
2016-01-08 00:13:58 +01:00
|
|
|
'produced metadata for project name %s. Fix your '
|
|
|
|
'#egg=%s fragments.',
|
|
|
|
self.setup_py, self.name, metadata_name, self.name
|
2015-09-30 22:04:59 +02:00
|
|
|
)
|
2015-11-12 00:51:46 +01:00
|
|
|
self.req = Requirement(metadata_name)
|
2014-01-12 01:50:11 +01:00
|
|
|
|
|
|
|
def egg_info_data(self, filename):
|
|
|
|
if self.satisfied_by is not None:
|
|
|
|
if not self.satisfied_by.has_metadata(filename):
|
|
|
|
return None
|
|
|
|
return self.satisfied_by.get_metadata(filename)
|
|
|
|
assert self.source_dir
|
|
|
|
filename = self.egg_info_path(filename)
|
|
|
|
if not os.path.exists(filename):
|
|
|
|
return None
|
|
|
|
data = read_text_file(filename)
|
|
|
|
return data
|
|
|
|
|
|
|
|
def egg_info_path(self, filename):
|
|
|
|
if self._egg_info_path is None:
|
|
|
|
if self.editable:
|
|
|
|
base = self.source_dir
|
|
|
|
else:
|
2016-02-08 14:04:10 +01:00
|
|
|
base = os.path.join(self.setup_py_dir, 'pip-egg-info')
|
2014-01-12 01:50:11 +01:00
|
|
|
filenames = os.listdir(base)
|
|
|
|
if self.editable:
|
|
|
|
filenames = []
|
|
|
|
for root, dirs, files in os.walk(base):
|
|
|
|
for dir in vcs.dirnames:
|
|
|
|
if dir in dirs:
|
|
|
|
dirs.remove(dir)
|
|
|
|
# Iterate over a copy of ``dirs``, since mutating
|
|
|
|
# a list while iterating over it can cause trouble.
|
|
|
|
# (See https://github.com/pypa/pip/pull/462.)
|
|
|
|
for dir in list(dirs):
|
2014-01-27 15:07:10 +01:00
|
|
|
# Don't search in anything that looks like a virtualenv
|
|
|
|
# environment
|
|
|
|
if (
|
|
|
|
os.path.exists(
|
|
|
|
os.path.join(root, dir, 'bin', 'python')
|
2015-02-24 13:46:10 +01:00
|
|
|
) or
|
|
|
|
os.path.exists(
|
2014-01-27 15:07:10 +01:00
|
|
|
os.path.join(
|
|
|
|
root, dir, 'Scripts', 'Python.exe'
|
|
|
|
)
|
|
|
|
)):
|
2014-01-12 01:50:11 +01:00
|
|
|
dirs.remove(dir)
|
|
|
|
# Also don't search through tests
|
2014-03-28 06:46:19 +01:00
|
|
|
elif dir == 'test' or dir == 'tests':
|
2014-01-12 01:50:11 +01:00
|
|
|
dirs.remove(dir)
|
|
|
|
filenames.extend([os.path.join(root, dir)
|
|
|
|
for dir in dirs])
|
|
|
|
filenames = [f for f in filenames if f.endswith('.egg-info')]
|
|
|
|
|
|
|
|
if not filenames:
|
2014-01-27 15:07:10 +01:00
|
|
|
raise InstallationError(
|
|
|
|
'No files/directories in %s (from %s)' % (base, filename)
|
|
|
|
)
|
|
|
|
assert filenames, \
|
|
|
|
"No files/directories in %s (from %s)" % (base, filename)
|
|
|
|
|
|
|
|
# if we have more than one match, we pick the toplevel one. This
|
|
|
|
# can easily be the case if there is a dist folder which contains
|
|
|
|
# an extracted tarball for testing purposes.
|
2014-01-12 01:50:11 +01:00
|
|
|
if len(filenames) > 1:
|
2014-01-27 15:07:10 +01:00
|
|
|
filenames.sort(
|
2015-02-24 13:46:10 +01:00
|
|
|
key=lambda x: x.count(os.path.sep) +
|
|
|
|
(os.path.altsep and x.count(os.path.altsep) or 0)
|
2014-01-27 15:07:10 +01:00
|
|
|
)
|
2014-01-12 01:50:11 +01:00
|
|
|
self._egg_info_path = os.path.join(base, filenames[0])
|
|
|
|
return os.path.join(self._egg_info_path, filename)
|
|
|
|
|
|
|
|
def pkg_info(self):
|
|
|
|
p = FeedParser()
|
|
|
|
data = self.egg_info_data('PKG-INFO')
|
|
|
|
if not data:
|
2014-08-31 01:52:28 +02:00
|
|
|
logger.warning(
|
|
|
|
'No PKG-INFO file found in %s',
|
|
|
|
display_path(self.egg_info_path('PKG-INFO')),
|
2014-01-27 15:07:10 +01:00
|
|
|
)
|
2014-01-12 01:50:11 +01:00
|
|
|
p.feed(data or '')
|
|
|
|
return p.close()
|
|
|
|
|
|
|
|
_requirements_section_re = re.compile(r'\[(.*?)\]')
|
|
|
|
|
|
|
|
@property
|
|
|
|
def installed_version(self):
|
2015-07-03 17:39:01 +02:00
|
|
|
return get_installed_version(self.name)
|
2014-01-12 01:50:11 +01:00
|
|
|
|
|
|
|
def assert_source_matches_version(self):
|
|
|
|
assert self.source_dir
|
2014-12-23 12:14:35 +01:00
|
|
|
version = self.pkg_info()['version']
|
2015-11-12 00:51:46 +01:00
|
|
|
if self.req.specifier and version not in self.req.specifier:
|
2014-08-31 01:52:28 +02:00
|
|
|
logger.warning(
|
|
|
|
'Requested %s, but installing version %s',
|
|
|
|
self,
|
|
|
|
self.installed_version,
|
2014-01-27 15:07:10 +01:00
|
|
|
)
|
2014-01-12 01:50:11 +01:00
|
|
|
else:
|
2014-01-27 15:07:10 +01:00
|
|
|
logger.debug(
|
2014-08-31 01:52:28 +02:00
|
|
|
'Source in %s has version %s, which satisfies requirement %s',
|
|
|
|
display_path(self.source_dir),
|
|
|
|
version,
|
|
|
|
self,
|
2014-01-27 15:07:10 +01:00
|
|
|
)
|
2014-01-12 01:50:11 +01:00
|
|
|
|
|
|
|
def update_editable(self, obtain=True):
|
2014-12-28 16:50:16 +01:00
|
|
|
if not self.link:
|
2014-08-31 01:52:28 +02:00
|
|
|
logger.debug(
|
2014-01-27 15:07:10 +01:00
|
|
|
"Cannot update repository at %s; repository location is "
|
2014-08-31 01:52:28 +02:00
|
|
|
"unknown",
|
|
|
|
self.source_dir,
|
2014-01-27 15:07:10 +01:00
|
|
|
)
|
2014-01-12 01:50:11 +01:00
|
|
|
return
|
|
|
|
assert self.editable
|
|
|
|
assert self.source_dir
|
2014-12-28 16:50:16 +01:00
|
|
|
if self.link.scheme == 'file':
|
2014-01-12 01:50:11 +01:00
|
|
|
# Static paths don't get updated
|
|
|
|
return
|
2014-12-28 16:50:16 +01:00
|
|
|
assert '+' in self.link.url, "bad url: %r" % self.link.url
|
2014-01-12 01:50:11 +01:00
|
|
|
if not self.update:
|
|
|
|
return
|
2014-12-28 16:50:16 +01:00
|
|
|
vc_type, url = self.link.url.split('+', 1)
|
2014-01-12 01:50:11 +01:00
|
|
|
backend = vcs.get_backend(vc_type)
|
|
|
|
if backend:
|
2014-12-28 16:50:16 +01:00
|
|
|
vcs_backend = backend(self.link.url)
|
2014-01-12 01:50:11 +01:00
|
|
|
if obtain:
|
|
|
|
vcs_backend.obtain(self.source_dir)
|
|
|
|
else:
|
|
|
|
vcs_backend.export(self.source_dir)
|
|
|
|
else:
|
|
|
|
assert 0, (
|
|
|
|
'Unexpected version control type (in %s): %s'
|
2014-12-28 16:50:16 +01:00
|
|
|
% (self.link, vc_type))
|
2014-01-12 01:50:11 +01:00
|
|
|
|
|
|
|
def uninstall(self, auto_confirm=False):
|
|
|
|
"""
|
|
|
|
Uninstall the distribution currently satisfying this requirement.
|
|
|
|
|
|
|
|
Prompts before removing or modifying files unless
|
|
|
|
``auto_confirm`` is True.
|
|
|
|
|
|
|
|
Refuses to delete or modify files outside of ``sys.prefix`` -
|
|
|
|
thus uninstallation within a virtual environment can only
|
|
|
|
modify that virtual environment, even if the virtualenv is
|
|
|
|
linked to global site-packages.
|
|
|
|
|
|
|
|
"""
|
|
|
|
if not self.check_if_exists():
|
2014-01-27 15:07:10 +01:00
|
|
|
raise UninstallationError(
|
|
|
|
"Cannot uninstall requirement %s, not installed" % (self.name,)
|
|
|
|
)
|
2014-01-12 01:50:11 +01:00
|
|
|
dist = self.satisfied_by or self.conflicts_with
|
|
|
|
|
2016-01-20 21:26:06 +01:00
|
|
|
dist_path = normalize_path(dist.location)
|
|
|
|
if not dist_is_local(dist):
|
|
|
|
logger.info(
|
|
|
|
"Not uninstalling %s at %s, outside environment %s",
|
|
|
|
dist.key,
|
|
|
|
dist_path,
|
|
|
|
sys.prefix,
|
|
|
|
)
|
|
|
|
self.nothing_to_uninstall = True
|
|
|
|
return
|
|
|
|
|
2016-01-21 02:43:54 +01:00
|
|
|
if dist_path in get_stdlib():
|
2016-01-20 21:26:06 +01:00
|
|
|
logger.info(
|
|
|
|
"Not uninstalling %s at %s, as it is in the standard library.",
|
|
|
|
dist.key,
|
|
|
|
dist_path,
|
|
|
|
)
|
|
|
|
self.nothing_to_uninstall = True
|
|
|
|
return
|
|
|
|
|
2014-01-12 01:50:11 +01:00
|
|
|
paths_to_remove = UninstallPathSet(dist)
|
|
|
|
develop_egg_link = egg_link_path(dist)
|
2015-02-24 15:53:25 +01:00
|
|
|
develop_egg_link_egg_info = '{0}.egg-info'.format(
|
|
|
|
pkg_resources.to_filename(dist.project_name))
|
2015-01-12 15:39:10 +01:00
|
|
|
egg_info_exists = dist.egg_info and os.path.exists(dist.egg_info)
|
2015-01-31 00:10:28 +01:00
|
|
|
# Special case for distutils installed package
|
|
|
|
distutils_egg_info = getattr(dist._provider, 'path', None)
|
2015-02-24 15:53:25 +01:00
|
|
|
|
|
|
|
# Uninstall cases order do matter as in the case of 2 installs of the
|
|
|
|
# same package, pip needs to uninstall the currently detected version
|
|
|
|
if (egg_info_exists and dist.egg_info.endswith('.egg-info') and
|
|
|
|
not dist.egg_info.endswith(develop_egg_link_egg_info)):
|
|
|
|
# if dist.egg_info.endswith(develop_egg_link_egg_info), we
|
|
|
|
# are in fact in the develop_egg_link case
|
2015-01-12 15:39:10 +01:00
|
|
|
paths_to_remove.add(dist.egg_info)
|
2014-01-12 01:50:11 +01:00
|
|
|
if dist.has_metadata('installed-files.txt'):
|
2014-01-27 15:07:10 +01:00
|
|
|
for installed_file in dist.get_metadata(
|
|
|
|
'installed-files.txt').splitlines():
|
|
|
|
path = os.path.normpath(
|
2015-01-12 15:39:10 +01:00
|
|
|
os.path.join(dist.egg_info, installed_file)
|
2014-01-27 15:07:10 +01:00
|
|
|
)
|
2014-01-12 01:50:11 +01:00
|
|
|
paths_to_remove.add(path)
|
2014-01-27 15:07:10 +01:00
|
|
|
# FIXME: need a test for this elif block
|
|
|
|
# occurs with --single-version-externally-managed/--record outside
|
|
|
|
# of pip
|
2014-01-12 01:50:11 +01:00
|
|
|
elif dist.has_metadata('top_level.txt'):
|
|
|
|
if dist.has_metadata('namespace_packages.txt'):
|
|
|
|
namespaces = dist.get_metadata('namespace_packages.txt')
|
|
|
|
else:
|
|
|
|
namespaces = []
|
2014-01-27 15:07:10 +01:00
|
|
|
for top_level_pkg in [
|
|
|
|
p for p
|
|
|
|
in dist.get_metadata('top_level.txt').splitlines()
|
|
|
|
if p and p not in namespaces]:
|
2014-01-12 01:50:11 +01:00
|
|
|
path = os.path.join(dist.location, top_level_pkg)
|
|
|
|
paths_to_remove.add(path)
|
|
|
|
paths_to_remove.add(path + '.py')
|
|
|
|
paths_to_remove.add(path + '.pyc')
|
2015-09-11 06:07:35 +02:00
|
|
|
paths_to_remove.add(path + '.pyo')
|
2014-01-12 01:50:11 +01:00
|
|
|
|
2015-01-31 00:10:28 +01:00
|
|
|
elif distutils_egg_info:
|
2016-01-20 06:50:18 +01:00
|
|
|
warnings.warn(
|
|
|
|
"Uninstalling a distutils installed project ({0}) has been "
|
|
|
|
"deprecated and will be removed in a future version. This is "
|
|
|
|
"due to the fact that uninstalling a distutils project will "
|
|
|
|
"only partially uninstall the project.".format(self.name),
|
|
|
|
RemovedInPip10Warning,
|
2015-01-31 01:01:13 +01:00
|
|
|
)
|
2016-01-20 06:50:18 +01:00
|
|
|
paths_to_remove.add(distutils_egg_info)
|
2015-01-31 00:10:28 +01:00
|
|
|
|
2015-01-14 14:07:14 +01:00
|
|
|
elif dist.location.endswith('.egg'):
|
2014-01-12 01:50:11 +01:00
|
|
|
# package installed by easy_install
|
2015-01-14 14:07:14 +01:00
|
|
|
# We cannot match on dist.egg_name because it can slightly vary
|
|
|
|
# i.e. setuptools-0.6c11-py2.6.egg vs setuptools-0.6rc11-py2.6.egg
|
2014-01-12 01:50:11 +01:00
|
|
|
paths_to_remove.add(dist.location)
|
2015-01-14 14:07:14 +01:00
|
|
|
easy_install_egg = os.path.split(dist.location)[1]
|
2014-01-12 01:50:11 +01:00
|
|
|
easy_install_pth = os.path.join(os.path.dirname(dist.location),
|
|
|
|
'easy-install.pth')
|
|
|
|
paths_to_remove.add_pth(easy_install_pth, './' + easy_install_egg)
|
|
|
|
|
2015-02-24 15:53:25 +01:00
|
|
|
elif develop_egg_link:
|
|
|
|
# develop egg
|
|
|
|
with open(develop_egg_link, 'r') as fh:
|
|
|
|
link_pointer = os.path.normcase(fh.readline().strip())
|
|
|
|
assert (link_pointer == dist.location), (
|
|
|
|
'Egg-link %s does not match installed location of %s '
|
|
|
|
'(at %s)' % (link_pointer, self.name, dist.location)
|
|
|
|
)
|
|
|
|
paths_to_remove.add(develop_egg_link)
|
|
|
|
easy_install_pth = os.path.join(os.path.dirname(develop_egg_link),
|
|
|
|
'easy-install.pth')
|
|
|
|
paths_to_remove.add_pth(easy_install_pth, dist.location)
|
|
|
|
|
2015-01-12 15:39:10 +01:00
|
|
|
elif egg_info_exists and dist.egg_info.endswith('.dist-info'):
|
2014-01-12 01:50:11 +01:00
|
|
|
for path in pip.wheel.uninstallation_paths(dist):
|
|
|
|
paths_to_remove.add(path)
|
2015-02-24 15:53:25 +01:00
|
|
|
|
2015-01-14 14:07:14 +01:00
|
|
|
else:
|
|
|
|
logger.debug(
|
|
|
|
'Not sure how to uninstall: %s - Check: %s',
|
|
|
|
dist, dist.location)
|
2014-01-12 01:50:11 +01:00
|
|
|
|
|
|
|
# find distutils scripts= scripts
|
|
|
|
if dist.has_metadata('scripts') and dist.metadata_isdir('scripts'):
|
|
|
|
for script in dist.metadata_listdir('scripts'):
|
|
|
|
if dist_in_usersite(dist):
|
|
|
|
bin_dir = bin_user
|
|
|
|
else:
|
|
|
|
bin_dir = bin_py
|
|
|
|
paths_to_remove.add(os.path.join(bin_dir, script))
|
2014-06-26 14:15:31 +02:00
|
|
|
if WINDOWS:
|
2014-01-12 01:50:11 +01:00
|
|
|
paths_to_remove.add(os.path.join(bin_dir, script) + '.bat')
|
|
|
|
|
|
|
|
# find console_scripts
|
|
|
|
if dist.has_metadata('entry_points.txt'):
|
2016-01-29 17:21:07 +01:00
|
|
|
if six.PY2:
|
|
|
|
options = {}
|
|
|
|
else:
|
2016-02-02 13:48:01 +01:00
|
|
|
options = {"delimiters": ('=', )}
|
2016-01-29 17:21:07 +01:00
|
|
|
config = configparser.SafeConfigParser(**options)
|
2014-01-27 15:07:10 +01:00
|
|
|
config.readfp(
|
|
|
|
FakeFile(dist.get_metadata_lines('entry_points.txt'))
|
|
|
|
)
|
2014-01-12 01:50:11 +01:00
|
|
|
if config.has_section('console_scripts'):
|
|
|
|
for name, value in config.items('console_scripts'):
|
|
|
|
if dist_in_usersite(dist):
|
|
|
|
bin_dir = bin_user
|
|
|
|
else:
|
|
|
|
bin_dir = bin_py
|
|
|
|
paths_to_remove.add(os.path.join(bin_dir, name))
|
2014-06-26 14:15:31 +02:00
|
|
|
if WINDOWS:
|
2014-01-27 15:07:10 +01:00
|
|
|
paths_to_remove.add(
|
|
|
|
os.path.join(bin_dir, name) + '.exe'
|
|
|
|
)
|
|
|
|
paths_to_remove.add(
|
|
|
|
os.path.join(bin_dir, name) + '.exe.manifest'
|
|
|
|
)
|
|
|
|
paths_to_remove.add(
|
|
|
|
os.path.join(bin_dir, name) + '-script.py'
|
|
|
|
)
|
2014-01-12 01:50:11 +01:00
|
|
|
|
|
|
|
paths_to_remove.remove(auto_confirm)
|
|
|
|
self.uninstalled = paths_to_remove
|
|
|
|
|
|
|
|
def rollback_uninstall(self):
|
|
|
|
if self.uninstalled:
|
|
|
|
self.uninstalled.rollback()
|
|
|
|
else:
|
2014-08-31 01:52:28 +02:00
|
|
|
logger.error(
|
2016-01-20 19:53:13 +01:00
|
|
|
"Can't rollback %s, nothing uninstalled.", self.name,
|
2014-08-31 01:52:28 +02:00
|
|
|
)
|
2014-01-12 01:50:11 +01:00
|
|
|
|
|
|
|
def commit_uninstall(self):
|
|
|
|
if self.uninstalled:
|
|
|
|
self.uninstalled.commit()
|
2016-01-20 21:26:06 +01:00
|
|
|
elif not self.nothing_to_uninstall:
|
2014-08-31 01:52:28 +02:00
|
|
|
logger.error(
|
2016-01-20 21:26:06 +01:00
|
|
|
"Can't commit %s, nothing uninstalled.", self.name,
|
2014-08-31 01:52:28 +02:00
|
|
|
)
|
2014-01-12 01:50:11 +01:00
|
|
|
|
|
|
|
def archive(self, build_dir):
|
|
|
|
assert self.source_dir
|
|
|
|
create_archive = True
|
2014-12-23 12:14:35 +01:00
|
|
|
archive_name = '%s-%s.zip' % (self.name, self.pkg_info()["version"])
|
2014-01-12 01:50:11 +01:00
|
|
|
archive_path = os.path.join(build_dir, archive_name)
|
|
|
|
if os.path.exists(archive_path):
|
|
|
|
response = ask_path_exists(
|
|
|
|
'The file %s exists. (i)gnore, (w)ipe, (b)ackup ' %
|
|
|
|
display_path(archive_path), ('i', 'w', 'b'))
|
|
|
|
if response == 'i':
|
|
|
|
create_archive = False
|
|
|
|
elif response == 'w':
|
2014-08-31 01:52:28 +02:00
|
|
|
logger.warning('Deleting %s', display_path(archive_path))
|
2014-01-12 01:50:11 +01:00
|
|
|
os.remove(archive_path)
|
|
|
|
elif response == 'b':
|
|
|
|
dest_file = backup_dir(archive_path)
|
2014-08-31 01:52:28 +02:00
|
|
|
logger.warning(
|
|
|
|
'Backing up %s to %s',
|
|
|
|
display_path(archive_path),
|
|
|
|
display_path(dest_file),
|
2014-01-27 15:07:10 +01:00
|
|
|
)
|
2014-01-12 01:50:11 +01:00
|
|
|
shutil.move(archive_path, dest_file)
|
|
|
|
if create_archive:
|
2014-06-13 13:15:03 +02:00
|
|
|
zip = zipfile.ZipFile(
|
|
|
|
archive_path, 'w', zipfile.ZIP_DEFLATED,
|
|
|
|
allowZip64=True
|
|
|
|
)
|
2016-02-09 10:39:46 +01:00
|
|
|
dir = os.path.normcase(os.path.abspath(self.setup_py_dir))
|
2014-01-12 01:50:11 +01:00
|
|
|
for dirpath, dirnames, filenames in os.walk(dir):
|
|
|
|
if 'pip-egg-info' in dirnames:
|
|
|
|
dirnames.remove('pip-egg-info')
|
|
|
|
for dirname in dirnames:
|
|
|
|
dirname = os.path.join(dirpath, dirname)
|
|
|
|
name = self._clean_zip_name(dirname, dir)
|
|
|
|
zipdir = zipfile.ZipInfo(self.name + '/' + name + '/')
|
2014-01-27 15:07:10 +01:00
|
|
|
zipdir.external_attr = 0x1ED << 16 # 0o755
|
2014-01-12 01:50:11 +01:00
|
|
|
zip.writestr(zipdir, '')
|
|
|
|
for filename in filenames:
|
|
|
|
if filename == PIP_DELETE_MARKER_FILENAME:
|
|
|
|
continue
|
|
|
|
filename = os.path.join(dirpath, filename)
|
|
|
|
name = self._clean_zip_name(filename, dir)
|
|
|
|
zip.write(filename, self.name + '/' + name)
|
|
|
|
zip.close()
|
2014-08-31 01:52:28 +02:00
|
|
|
logger.info('Saved %s', display_path(archive_path))
|
2014-01-12 01:50:11 +01:00
|
|
|
|
|
|
|
def _clean_zip_name(self, name, prefix):
|
2014-02-24 22:52:23 +01:00
|
|
|
assert name.startswith(prefix + os.path.sep), (
|
|
|
|
"name %r doesn't start with prefix %r" % (name, prefix)
|
|
|
|
)
|
|
|
|
name = name[len(prefix) + 1:]
|
2014-01-12 01:50:11 +01:00
|
|
|
name = name.replace(os.path.sep, '/')
|
|
|
|
return name
|
|
|
|
|
2014-01-14 18:36:47 +01:00
|
|
|
def match_markers(self):
|
|
|
|
if self.markers is not None:
|
2016-04-25 02:55:41 +02:00
|
|
|
return Marker(self.markers).evaluate()
|
2014-01-14 18:36:47 +01:00
|
|
|
else:
|
|
|
|
return True
|
|
|
|
|
2015-11-16 17:39:44 +01:00
|
|
|
def install(self, install_options, global_options=[], root=None,
|
|
|
|
prefix=None):
|
2014-01-12 01:50:11 +01:00
|
|
|
if self.editable:
|
2015-11-23 18:36:57 +01:00
|
|
|
self.install_editable(
|
|
|
|
install_options, global_options, prefix=prefix)
|
2014-01-12 01:50:11 +01:00
|
|
|
return
|
|
|
|
if self.is_wheel:
|
2014-02-19 00:31:12 +01:00
|
|
|
version = pip.wheel.wheel_version(self.source_dir)
|
2014-02-18 05:16:54 +01:00
|
|
|
pip.wheel.check_compatibility(version, self.name)
|
|
|
|
|
2015-11-16 17:39:44 +01:00
|
|
|
self.move_wheel_files(self.source_dir, root=root, prefix=prefix)
|
2014-01-12 01:50:11 +01:00
|
|
|
self.install_succeeded = True
|
|
|
|
return
|
|
|
|
|
2015-03-14 21:37:09 +01:00
|
|
|
# Extend the list of global and install options passed on to
|
|
|
|
# the setup.py call with the ones from the requirements file.
|
|
|
|
# Options specified in requirements file override those
|
|
|
|
# specified on the command line, since the last option given
|
|
|
|
# to setup.py is the one that is used.
|
2015-03-18 21:00:21 +01:00
|
|
|
global_options += self.options.get('global_options', [])
|
|
|
|
install_options += self.options.get('install_options', [])
|
2015-03-14 21:37:09 +01:00
|
|
|
|
2014-12-11 15:44:53 +01:00
|
|
|
if self.isolated:
|
|
|
|
global_options = list(global_options) + ["--no-user-cfg"]
|
|
|
|
|
2014-01-12 01:50:11 +01:00
|
|
|
temp_location = tempfile.mkdtemp('-record', 'pip-')
|
|
|
|
record_filename = os.path.join(temp_location, 'install-record.txt')
|
|
|
|
try:
|
2015-11-04 09:06:50 +01:00
|
|
|
install_args = [sys.executable, "-u"]
|
2014-01-12 01:50:11 +01:00
|
|
|
install_args.append('-c')
|
2015-11-24 22:37:11 +01:00
|
|
|
install_args.append(SETUPTOOLS_SHIM % self.setup_py)
|
2014-01-27 15:07:10 +01:00
|
|
|
install_args += list(global_options) + \
|
|
|
|
['install', '--record', record_filename]
|
2014-01-12 01:50:11 +01:00
|
|
|
|
|
|
|
if not self.as_egg:
|
|
|
|
install_args += ['--single-version-externally-managed']
|
|
|
|
|
|
|
|
if root is not None:
|
|
|
|
install_args += ['--root', root]
|
2015-11-16 17:39:44 +01:00
|
|
|
if prefix is not None:
|
|
|
|
install_args += ['--prefix', prefix]
|
2014-01-12 01:50:11 +01:00
|
|
|
|
|
|
|
if self.pycompile:
|
|
|
|
install_args += ["--compile"]
|
|
|
|
else:
|
|
|
|
install_args += ["--no-compile"]
|
|
|
|
|
|
|
|
if running_under_virtualenv():
|
2014-04-15 22:09:23 +02:00
|
|
|
py_ver_str = 'python' + sysconfig.get_python_version()
|
2014-01-12 01:50:11 +01:00
|
|
|
install_args += ['--install-headers',
|
|
|
|
os.path.join(sys.prefix, 'include', 'site',
|
2015-02-12 20:55:44 +01:00
|
|
|
py_ver_str, self.name)]
|
2015-11-04 09:06:50 +01:00
|
|
|
msg = 'Running setup.py install for %s' % (self.name,)
|
|
|
|
with open_spinner(msg) as spinner:
|
|
|
|
with indent_log():
|
|
|
|
call_subprocess(
|
|
|
|
install_args + install_options,
|
2016-02-08 14:04:10 +01:00
|
|
|
cwd=self.setup_py_dir,
|
2015-11-04 09:06:50 +01:00
|
|
|
show_stdout=False,
|
|
|
|
spinner=spinner,
|
|
|
|
)
|
2014-08-31 01:52:28 +02:00
|
|
|
|
2014-01-12 01:50:11 +01:00
|
|
|
if not os.path.exists(record_filename):
|
2014-08-31 01:52:28 +02:00
|
|
|
logger.debug('Record file %s not found', record_filename)
|
2014-01-12 01:50:11 +01:00
|
|
|
return
|
|
|
|
self.install_succeeded = True
|
|
|
|
if self.as_egg:
|
2014-01-27 15:07:10 +01:00
|
|
|
# there's no --always-unzip option we can pass to install
|
|
|
|
# command so we unable to save the installed-files.txt
|
2014-01-12 01:50:11 +01:00
|
|
|
return
|
|
|
|
|
|
|
|
def prepend_root(path):
|
|
|
|
if root is None or not os.path.isabs(path):
|
|
|
|
return path
|
|
|
|
else:
|
|
|
|
return change_root(root, path)
|
|
|
|
|
2014-10-02 23:45:37 +02:00
|
|
|
with open(record_filename) as f:
|
|
|
|
for line in f:
|
|
|
|
directory = os.path.dirname(line)
|
|
|
|
if directory.endswith('.egg-info'):
|
|
|
|
egg_info_dir = prepend_root(directory)
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
logger.warning(
|
|
|
|
'Could not find .egg-info directory in install record'
|
|
|
|
' for %s',
|
|
|
|
self,
|
|
|
|
)
|
|
|
|
# FIXME: put the record somewhere
|
|
|
|
# FIXME: should this be an error?
|
|
|
|
return
|
2014-01-12 01:50:11 +01:00
|
|
|
new_lines = []
|
2014-10-02 23:45:37 +02:00
|
|
|
with open(record_filename) as f:
|
|
|
|
for line in f:
|
|
|
|
filename = line.strip()
|
|
|
|
if os.path.isdir(filename):
|
|
|
|
filename += os.path.sep
|
|
|
|
new_lines.append(
|
2015-09-01 14:16:12 +02:00
|
|
|
os.path.relpath(
|
2014-10-02 23:45:37 +02:00
|
|
|
prepend_root(filename), egg_info_dir)
|
|
|
|
)
|
|
|
|
inst_files_path = os.path.join(egg_info_dir, 'installed-files.txt')
|
|
|
|
with open(inst_files_path, 'w') as f:
|
|
|
|
f.write('\n'.join(new_lines) + '\n')
|
2014-01-12 01:50:11 +01:00
|
|
|
finally:
|
|
|
|
if os.path.exists(record_filename):
|
|
|
|
os.remove(record_filename)
|
2015-02-04 17:46:42 +01:00
|
|
|
rmtree(temp_location)
|
2014-01-12 01:50:11 +01:00
|
|
|
|
2015-03-23 00:36:10 +01:00
|
|
|
def ensure_has_source_dir(self, parent_dir):
|
|
|
|
"""Ensure that a source_dir is set.
|
|
|
|
|
|
|
|
This will create a temporary build dir if the name of the requirement
|
|
|
|
isn't known yet.
|
|
|
|
|
|
|
|
:param parent_dir: The ideal pip parent_dir for the source_dir.
|
|
|
|
Generally src_dir for editables and build_dir for sdists.
|
|
|
|
:return: self.source_dir
|
|
|
|
"""
|
|
|
|
if self.source_dir is None:
|
|
|
|
self.source_dir = self.build_location(parent_dir)
|
|
|
|
return self.source_dir
|
|
|
|
|
2014-01-12 01:50:11 +01:00
|
|
|
def remove_temporary_source(self):
|
|
|
|
"""Remove the source files from this requirement, if they are marked
|
|
|
|
for deletion"""
|
2014-12-19 15:33:59 +01:00
|
|
|
if self.source_dir and os.path.exists(
|
|
|
|
os.path.join(self.source_dir, PIP_DELETE_MARKER_FILENAME)):
|
2014-08-31 01:52:28 +02:00
|
|
|
logger.debug('Removing source in %s', self.source_dir)
|
2014-12-19 15:33:59 +01:00
|
|
|
rmtree(self.source_dir)
|
|
|
|
self.source_dir = None
|
2014-01-12 01:50:11 +01:00
|
|
|
if self._temp_build_dir and os.path.exists(self._temp_build_dir):
|
|
|
|
rmtree(self._temp_build_dir)
|
|
|
|
self._temp_build_dir = None
|
|
|
|
|
2015-11-23 18:36:57 +01:00
|
|
|
def install_editable(self, install_options,
|
|
|
|
global_options=(), prefix=None):
|
2014-08-31 01:52:28 +02:00
|
|
|
logger.info('Running setup.py develop for %s', self.name)
|
2014-12-11 15:44:53 +01:00
|
|
|
|
|
|
|
if self.isolated:
|
|
|
|
global_options = list(global_options) + ["--no-user-cfg"]
|
|
|
|
|
2015-11-23 18:36:57 +01:00
|
|
|
if prefix:
|
|
|
|
prefix_param = ['--prefix={0}'.format(prefix)]
|
|
|
|
install_options = list(install_options) + prefix_param
|
|
|
|
|
2014-08-31 01:52:28 +02:00
|
|
|
with indent_log():
|
2014-03-26 23:24:19 +01:00
|
|
|
# FIXME: should we do --install-headers here too?
|
2014-01-12 01:50:11 +01:00
|
|
|
call_subprocess(
|
2014-01-27 15:07:10 +01:00
|
|
|
[
|
|
|
|
sys.executable,
|
|
|
|
'-c',
|
2015-11-24 22:37:11 +01:00
|
|
|
SETUPTOOLS_SHIM % self.setup_py
|
2015-02-24 13:46:10 +01:00
|
|
|
] +
|
|
|
|
list(global_options) +
|
|
|
|
['develop', '--no-deps'] +
|
|
|
|
list(install_options),
|
2014-01-12 01:50:11 +01:00
|
|
|
|
2016-02-08 14:04:10 +01:00
|
|
|
cwd=self.setup_py_dir,
|
2014-01-12 01:50:11 +01:00
|
|
|
show_stdout=False)
|
2014-08-31 01:52:28 +02:00
|
|
|
|
2014-01-12 01:50:11 +01:00
|
|
|
self.install_succeeded = True
|
|
|
|
|
|
|
|
def check_if_exists(self):
|
|
|
|
"""Find an installed distribution that satisfies or conflicts
|
|
|
|
with this requirement, and set self.satisfied_by or
|
2015-03-20 03:36:47 +01:00
|
|
|
self.conflicts_with appropriately.
|
|
|
|
"""
|
2014-01-12 01:50:11 +01:00
|
|
|
if self.req is None:
|
|
|
|
return False
|
|
|
|
try:
|
2016-04-26 19:54:05 +02:00
|
|
|
# get_distribution() will resolve the entire list of requirements
|
|
|
|
# anyway, and we've already determined that we need the requirement
|
|
|
|
# in question, so strip the marker so that we don't try to
|
|
|
|
# evaluate it.
|
|
|
|
no_marker = Requirement(str(self.req))
|
|
|
|
no_marker.marker = None
|
|
|
|
self.satisfied_by = pkg_resources.get_distribution(str(no_marker))
|
2014-01-12 01:50:11 +01:00
|
|
|
except pkg_resources.DistributionNotFound:
|
|
|
|
return False
|
|
|
|
except pkg_resources.VersionConflict:
|
2014-01-27 15:07:10 +01:00
|
|
|
existing_dist = pkg_resources.get_distribution(
|
2015-11-12 00:51:46 +01:00
|
|
|
self.req.name
|
2014-01-27 15:07:10 +01:00
|
|
|
)
|
2014-01-12 01:50:11 +01:00
|
|
|
if self.use_user_site:
|
|
|
|
if dist_in_usersite(existing_dist):
|
|
|
|
self.conflicts_with = existing_dist
|
2015-02-24 13:46:10 +01:00
|
|
|
elif (running_under_virtualenv() and
|
|
|
|
dist_in_site_packages(existing_dist)):
|
2014-01-27 15:07:10 +01:00
|
|
|
raise InstallationError(
|
|
|
|
"Will not install to the user site because it will "
|
|
|
|
"lack sys.path precedence to %s in %s" %
|
|
|
|
(existing_dist.project_name, existing_dist.location)
|
|
|
|
)
|
2014-01-12 01:50:11 +01:00
|
|
|
else:
|
|
|
|
self.conflicts_with = existing_dist
|
|
|
|
return True
|
|
|
|
|
|
|
|
@property
|
|
|
|
def is_wheel(self):
|
2014-12-28 23:52:32 +01:00
|
|
|
return self.link and self.link.is_wheel
|
2014-01-12 01:50:11 +01:00
|
|
|
|
2015-11-16 17:39:44 +01:00
|
|
|
def move_wheel_files(self, wheeldir, root=None, prefix=None):
|
2014-01-12 01:50:11 +01:00
|
|
|
move_wheel_files(
|
|
|
|
self.name, self.req, wheeldir,
|
|
|
|
user=self.use_user_site,
|
|
|
|
home=self.target_dir,
|
|
|
|
root=root,
|
2015-11-16 17:39:44 +01:00
|
|
|
prefix=prefix,
|
2014-01-12 01:50:11 +01:00
|
|
|
pycompile=self.pycompile,
|
2014-12-11 15:44:53 +01:00
|
|
|
isolated=self.isolated,
|
2014-01-12 01:50:11 +01:00
|
|
|
)
|
|
|
|
|
2014-12-07 23:51:46 +01:00
|
|
|
def get_dist(self):
|
|
|
|
"""Return a pkg_resources.Distribution built from self.egg_info_path"""
|
2015-03-12 20:52:07 +01:00
|
|
|
egg_info = self.egg_info_path('').rstrip('/')
|
2014-12-07 23:51:46 +01:00
|
|
|
base_dir = os.path.dirname(egg_info)
|
|
|
|
metadata = pkg_resources.PathMetadata(base_dir, egg_info)
|
|
|
|
dist_name = os.path.splitext(os.path.basename(egg_info))[0]
|
|
|
|
return pkg_resources.Distribution(
|
|
|
|
os.path.dirname(egg_info),
|
|
|
|
project_name=dist_name,
|
|
|
|
metadata=metadata)
|
|
|
|
|
Add checks against requirements-file-dwelling hashes for most kinds of packages. Close #1175.
* Add --require-hashes option. This is handy in deployment scripts to force application authors to hash their requirements. It is also a convenient way to get pip to show computed hashes for a virgin, unhashed requirements file. Eventually, additions to `pip freeze` should fill a superset of this use case.
* In --require-hashes mode, at least one hash is required to match for each requirement.
* Option-based requirements (--sha256=...) turn on --require-hashes mode implicitly.
* Internet-derived URL-based hashes are "necessary but not sufficient": they do not satisfy --require-hashes mode when they match, but they are still used to guard against transmission errors.
* Other URL-based requirements (#md5=...) are treated just like flag-based ones, except they don't turn on --require-hashes.
* Complain informatively, with the most devastating errors first so you don't chase your tail all day only to run up against a brick wall at the end. This also means we don't complain that a hash is missing, only for the user to find, after fixing it, that we have no idea how to even compute a hash for that type of requirement.
* Complain about unpinned requirements when hash-checking mode is on, lest they cause the user surprise later.
* Complain about missing hashes.
* Complain about requirement types we don't know how to hash (like VCS ones and local dirs).
* Have InstallRequirement keep its original Link around (original_link) so we can differentiate between URL hashes from requirements files and ones downloaded from the (untrustworthy) internet.
* Remove test_download_hashes, which is obsolete. Similar coverage is provided in test_utils.TestHashes and the various hash cases in test_req.py.
2015-09-09 19:01:53 +02:00
|
|
|
@property
|
|
|
|
def has_hash_options(self):
|
|
|
|
"""Return whether any known-good hashes are specified as options.
|
|
|
|
|
|
|
|
These activate --require-hashes mode; hashes specified as part of a
|
|
|
|
URL do not.
|
|
|
|
|
|
|
|
"""
|
|
|
|
return bool(self.options.get('hashes', {}))
|
|
|
|
|
|
|
|
def hashes(self, trust_internet=True):
|
|
|
|
"""Return a hash-comparer that considers my option- and URL-based
|
|
|
|
hashes to be known-good.
|
|
|
|
|
2015-10-09 18:27:10 +02:00
|
|
|
Hashes in URLs--ones embedded in the requirements file, not ones
|
|
|
|
downloaded from an index server--are almost peers with ones from
|
|
|
|
flags. They satisfy --require-hashes (whether it was implicitly or
|
|
|
|
explicitly activated) but do not activate it. md5 and sha224 are not
|
|
|
|
allowed in flags, which should nudge people toward good algos. We
|
|
|
|
always OR all hashes together, even ones from URLs.
|
Add checks against requirements-file-dwelling hashes for most kinds of packages. Close #1175.
* Add --require-hashes option. This is handy in deployment scripts to force application authors to hash their requirements. It is also a convenient way to get pip to show computed hashes for a virgin, unhashed requirements file. Eventually, additions to `pip freeze` should fill a superset of this use case.
* In --require-hashes mode, at least one hash is required to match for each requirement.
* Option-based requirements (--sha256=...) turn on --require-hashes mode implicitly.
* Internet-derived URL-based hashes are "necessary but not sufficient": they do not satisfy --require-hashes mode when they match, but they are still used to guard against transmission errors.
* Other URL-based requirements (#md5=...) are treated just like flag-based ones, except they don't turn on --require-hashes.
* Complain informatively, with the most devastating errors first so you don't chase your tail all day only to run up against a brick wall at the end. This also means we don't complain that a hash is missing, only for the user to find, after fixing it, that we have no idea how to even compute a hash for that type of requirement.
* Complain about unpinned requirements when hash-checking mode is on, lest they cause the user surprise later.
* Complain about missing hashes.
* Complain about requirement types we don't know how to hash (like VCS ones and local dirs).
* Have InstallRequirement keep its original Link around (original_link) so we can differentiate between URL hashes from requirements files and ones downloaded from the (untrustworthy) internet.
* Remove test_download_hashes, which is obsolete. Similar coverage is provided in test_utils.TestHashes and the various hash cases in test_req.py.
2015-09-09 19:01:53 +02:00
|
|
|
|
|
|
|
:param trust_internet: Whether to trust URL-based (#md5=...) hashes
|
|
|
|
downloaded from the internet, as by populate_link()
|
|
|
|
|
|
|
|
"""
|
|
|
|
good_hashes = self.options.get('hashes', {}).copy()
|
|
|
|
link = self.link if trust_internet else self.original_link
|
|
|
|
if link and link.hash:
|
|
|
|
good_hashes.setdefault(link.hash_name, []).append(link.hash)
|
|
|
|
return Hashes(good_hashes)
|
|
|
|
|
2014-01-12 01:50:11 +01:00
|
|
|
|
|
|
|
def _strip_postfix(req):
|
|
|
|
"""
|
|
|
|
Strip req postfix ( -dev, 0.2, etc )
|
|
|
|
"""
|
2014-03-26 23:24:19 +01:00
|
|
|
# FIXME: use package_to_requirement?
|
2014-01-12 01:50:11 +01:00
|
|
|
match = re.search(r'^(.*?)(?:-dev|-\d.*)$', req)
|
|
|
|
if match:
|
|
|
|
# Strip off -dev, -0.2, etc.
|
|
|
|
req = match.group(1)
|
|
|
|
return req
|
|
|
|
|
2014-01-27 15:07:10 +01:00
|
|
|
|
2014-01-12 01:50:11 +01:00
|
|
|
def _build_req_from_url(url):
|
|
|
|
|
|
|
|
parts = [p for p in url.split('#', 1)[0].split('/') if p]
|
|
|
|
|
|
|
|
req = None
|
2016-03-04 12:04:43 +01:00
|
|
|
if len(parts) > 2 and parts[-2] in ('tags', 'branches', 'tag', 'branch'):
|
2014-01-12 01:50:11 +01:00
|
|
|
req = parts[-3]
|
2016-03-04 12:04:43 +01:00
|
|
|
elif len(parts) > 1 and parts[-1] == 'trunk':
|
2014-01-12 01:50:11 +01:00
|
|
|
req = parts[-2]
|
2016-02-19 20:15:03 +01:00
|
|
|
if req:
|
|
|
|
warnings.warn(
|
|
|
|
'Sniffing the requirement name from the url is deprecated and '
|
|
|
|
'will be removed in the future. Please specify an #egg segment '
|
|
|
|
'instead.', RemovedInPip9Warning,
|
|
|
|
stacklevel=2)
|
2014-01-12 01:50:11 +01:00
|
|
|
return req
|
|
|
|
|
2014-01-27 15:07:10 +01:00
|
|
|
|
2014-01-12 01:50:11 +01:00
|
|
|
def parse_editable(editable_req, default_vcs=None):
|
2014-12-07 23:49:26 +01:00
|
|
|
"""Parses an editable requirement into:
|
|
|
|
- a requirement name
|
|
|
|
- an URL
|
|
|
|
- extras
|
|
|
|
- editable options
|
|
|
|
Accepted requirements:
|
|
|
|
svn+http://blahblah@rev#egg=Foobar[baz]&subdirectory=version_subdir
|
|
|
|
.[some_extra]
|
|
|
|
"""
|
2014-01-12 01:50:11 +01:00
|
|
|
|
2015-08-18 03:25:00 +02:00
|
|
|
from pip.index import Link
|
|
|
|
|
2014-01-12 01:50:11 +01:00
|
|
|
url = editable_req
|
|
|
|
extras = None
|
|
|
|
|
|
|
|
# If a file path is specified with extras, strip off the extras.
|
|
|
|
m = re.match(r'^(.+)(\[[^\]]+\])$', url)
|
|
|
|
if m:
|
|
|
|
url_no_extras = m.group(1)
|
|
|
|
extras = m.group(2)
|
|
|
|
else:
|
|
|
|
url_no_extras = url
|
|
|
|
|
|
|
|
if os.path.isdir(url_no_extras):
|
|
|
|
if not os.path.exists(os.path.join(url_no_extras, 'setup.py')):
|
2014-01-27 15:07:10 +01:00
|
|
|
raise InstallationError(
|
|
|
|
"Directory %r is not installable. File 'setup.py' not found." %
|
|
|
|
url_no_extras
|
|
|
|
)
|
2014-01-12 01:50:11 +01:00
|
|
|
# Treating it as code that has already been checked out
|
|
|
|
url_no_extras = path_to_url(url_no_extras)
|
|
|
|
|
|
|
|
if url_no_extras.lower().startswith('file:'):
|
2015-08-18 03:25:00 +02:00
|
|
|
package_name = Link(url_no_extras).egg_fragment
|
2014-01-12 01:50:11 +01:00
|
|
|
if extras:
|
2014-01-27 15:07:10 +01:00
|
|
|
return (
|
2015-08-18 03:25:00 +02:00
|
|
|
package_name,
|
2014-01-27 15:07:10 +01:00
|
|
|
url_no_extras,
|
2015-11-12 00:51:46 +01:00
|
|
|
Requirement("placeholder" + extras).extras,
|
2014-01-27 15:07:10 +01:00
|
|
|
)
|
2014-01-12 01:50:11 +01:00
|
|
|
else:
|
2016-02-09 10:40:48 +01:00
|
|
|
return package_name, url_no_extras, None
|
2014-01-12 01:50:11 +01:00
|
|
|
|
|
|
|
for version_control in vcs:
|
|
|
|
if url.lower().startswith('%s:' % version_control):
|
|
|
|
url = '%s+%s' % (version_control, url)
|
|
|
|
break
|
|
|
|
|
|
|
|
if '+' not in url:
|
|
|
|
if default_vcs:
|
|
|
|
url = default_vcs + '+' + url
|
|
|
|
else:
|
|
|
|
raise InstallationError(
|
2014-01-27 15:07:10 +01:00
|
|
|
'%s should either be a path to a local project or a VCS url '
|
|
|
|
'beginning with svn+, git+, hg+, or bzr+' %
|
|
|
|
editable_req
|
|
|
|
)
|
2014-01-12 01:50:11 +01:00
|
|
|
|
|
|
|
vc_type = url.split('+', 1)[0].lower()
|
|
|
|
|
|
|
|
if not vcs.get_backend(vc_type):
|
|
|
|
error_message = 'For --editable=%s only ' % editable_req + \
|
|
|
|
', '.join([backend.name + '+URL' for backend in vcs.backends]) + \
|
|
|
|
' is currently supported'
|
|
|
|
raise InstallationError(error_message)
|
|
|
|
|
2016-02-09 10:40:48 +01:00
|
|
|
package_name = Link(url).egg_fragment
|
2016-02-09 22:45:30 +01:00
|
|
|
if not package_name:
|
|
|
|
package_name = _build_req_from_url(editable_req)
|
2016-02-09 10:40:48 +01:00
|
|
|
if not package_name:
|
2014-01-12 01:50:11 +01:00
|
|
|
raise InstallationError(
|
2016-02-09 10:40:48 +01:00
|
|
|
'--editable=%s is not the right format; it must have '
|
|
|
|
'#egg=Package' % editable_req
|
2014-02-18 05:16:54 +01:00
|
|
|
)
|
2016-02-09 10:40:48 +01:00
|
|
|
return _strip_postfix(package_name), url, None
|