2017-10-06 21:51:42 +02:00
|
|
|
import os
|
2010-02-24 11:24:55 +01:00
|
|
|
import textwrap
|
2013-08-18 11:59:44 +02:00
|
|
|
|
|
|
|
import pytest
|
2017-06-13 14:17:00 +02:00
|
|
|
|
2017-05-16 12:16:30 +02:00
|
|
|
from tests.lib import (
|
2017-10-06 21:51:42 +02:00
|
|
|
_create_test_package_with_subdirectory, path_to_url, pyversion,
|
2017-11-21 08:50:32 +01:00
|
|
|
requirements_file,
|
2017-05-16 12:16:30 +02:00
|
|
|
)
|
2013-05-27 19:46:31 +02:00
|
|
|
from tests.lib.local_repos import local_checkout
|
2010-06-03 04:25:26 +02:00
|
|
|
|
2011-05-04 09:44:02 +02:00
|
|
|
|
2015-01-15 00:53:15 +01:00
|
|
|
@pytest.mark.network
|
2013-08-22 06:40:46 +02:00
|
|
|
def test_requirements_file(script):
|
2010-02-25 00:16:43 +01:00
|
|
|
"""
|
|
|
|
Test installing from a requirements file.
|
2010-06-03 04:25:26 +02:00
|
|
|
|
2010-02-25 00:16:43 +01:00
|
|
|
"""
|
2011-03-19 01:31:36 +01:00
|
|
|
other_lib_name, other_lib_version = 'anyjson', '0.3'
|
2013-08-21 11:16:07 +02:00
|
|
|
script.scratch_path.join("initools-req.txt").write(textwrap.dedent("""\
|
2010-02-24 11:24:55 +01:00
|
|
|
INITools==0.2
|
|
|
|
# and something else to test out:
|
2011-03-15 20:49:48 +01:00
|
|
|
%s<=%s
|
|
|
|
""" % (other_lib_name, other_lib_version)))
|
2014-01-28 15:17:51 +01:00
|
|
|
result = script.pip(
|
|
|
|
'install', '-r', script.scratch_path / 'initools-req.txt'
|
|
|
|
)
|
|
|
|
assert (
|
2014-02-24 22:52:23 +01:00
|
|
|
script.site_packages / 'INITools-0.2-py%s.egg-info' %
|
2014-01-28 15:17:51 +01:00
|
|
|
pyversion in result.files_created
|
|
|
|
)
|
2014-02-24 22:52:23 +01:00
|
|
|
assert script.site_packages / 'initools' in result.files_created
|
|
|
|
assert result.files_created[script.site_packages / other_lib_name].dir
|
2011-03-15 20:49:48 +01:00
|
|
|
fn = '%s-%s-py%s.egg-info' % (other_lib_name, other_lib_version, pyversion)
|
2014-02-24 22:52:23 +01:00
|
|
|
assert result.files_created[script.site_packages / fn].dir
|
2010-02-24 11:24:55 +01:00
|
|
|
|
2013-03-18 17:43:59 +01:00
|
|
|
|
2013-08-22 06:40:46 +02:00
|
|
|
def test_schema_check_in_requirements_file(script):
|
2012-03-13 23:38:47 +01:00
|
|
|
"""
|
|
|
|
Test installing from a requirements file with an invalid vcs schema..
|
|
|
|
|
|
|
|
"""
|
2014-01-28 15:17:51 +01:00
|
|
|
script.scratch_path.join("file-egg-req.txt").write(
|
|
|
|
"\n%s\n" % (
|
|
|
|
"git://github.com/alex/django-fixture-generator.git"
|
|
|
|
"#egg=fixture_generator"
|
|
|
|
)
|
|
|
|
)
|
2013-08-18 11:59:44 +02:00
|
|
|
|
|
|
|
with pytest.raises(AssertionError):
|
2014-01-28 15:17:51 +01:00
|
|
|
script.pip(
|
|
|
|
"install", "-vvv", "-r", script.scratch_path / "file-egg-req.txt"
|
|
|
|
)
|
2011-05-04 09:44:02 +02:00
|
|
|
|
2012-05-14 05:13:50 +02:00
|
|
|
|
2013-08-23 13:12:37 +02:00
|
|
|
def test_relative_requirements_file(script, data):
|
2011-03-23 00:13:04 +01:00
|
|
|
"""
|
2017-04-06 00:32:36 +02:00
|
|
|
Test installing from a requirements file with a relative path. For path
|
|
|
|
URLs, use an egg= definition.
|
2011-03-23 00:13:04 +01:00
|
|
|
|
|
|
|
"""
|
2017-04-06 00:32:36 +02:00
|
|
|
egg_info_file = (
|
2014-11-19 23:05:14 +01:00
|
|
|
script.site_packages / 'FSPkg-0.1.dev0-py%s.egg-info' % pyversion
|
2014-01-28 15:17:51 +01:00
|
|
|
)
|
2017-04-06 00:32:36 +02:00
|
|
|
egg_link_file = (
|
|
|
|
script.site_packages / 'FSPkg.egg-link'
|
|
|
|
)
|
|
|
|
package_folder = script.site_packages / 'fspkg'
|
|
|
|
|
|
|
|
# Compute relative install path to FSPkg from scratch path.
|
|
|
|
full_rel_path = data.packages.join('FSPkg') - script.scratch_path
|
2017-10-06 21:51:42 +02:00
|
|
|
full_rel_url = 'file:' + full_rel_path + '#egg=FSPkg'
|
2017-04-06 00:32:36 +02:00
|
|
|
embedded_rel_path = script.scratch_path.join(full_rel_path)
|
|
|
|
|
|
|
|
# For each relative path, install as either editable or not using either
|
|
|
|
# URLs with egg links or not.
|
2017-10-06 21:51:42 +02:00
|
|
|
for req_path in (full_rel_path, full_rel_url, embedded_rel_path):
|
|
|
|
req_path = req_path.replace(os.path.sep, '/')
|
2017-04-06 00:32:36 +02:00
|
|
|
# Regular install.
|
|
|
|
with requirements_file(req_path + '\n',
|
|
|
|
script.scratch_path) as reqs_file:
|
|
|
|
result = script.pip('install', '-vvv', '-r', reqs_file.name,
|
|
|
|
cwd=script.scratch_path)
|
|
|
|
assert egg_info_file in result.files_created, str(result)
|
|
|
|
assert package_folder in result.files_created, str(result)
|
|
|
|
script.pip('uninstall', '-y', 'fspkg')
|
|
|
|
|
|
|
|
# Editable install.
|
|
|
|
with requirements_file('-e ' + req_path + '\n',
|
|
|
|
script.scratch_path) as reqs_file:
|
|
|
|
result = script.pip('install', '-vvv', '-r', reqs_file.name,
|
|
|
|
cwd=script.scratch_path)
|
|
|
|
assert egg_link_file in result.files_created, str(result)
|
|
|
|
script.pip('uninstall', '-y', 'fspkg')
|
2010-06-03 04:25:26 +02:00
|
|
|
|
2011-05-04 09:44:02 +02:00
|
|
|
|
2015-01-15 00:53:15 +01:00
|
|
|
@pytest.mark.network
|
2018-11-10 12:43:20 +01:00
|
|
|
@pytest.mark.svn
|
2013-08-22 12:14:09 +02:00
|
|
|
def test_multiple_requirements_files(script, tmpdir):
|
2010-02-25 00:16:43 +01:00
|
|
|
"""
|
|
|
|
Test installing from multiple nested requirements files.
|
2010-06-03 04:25:26 +02:00
|
|
|
|
2010-02-25 00:16:43 +01:00
|
|
|
"""
|
2011-03-19 01:31:36 +01:00
|
|
|
other_lib_name, other_lib_version = 'anyjson', '0.3'
|
2014-01-28 15:17:51 +01:00
|
|
|
script.scratch_path.join("initools-req.txt").write(
|
|
|
|
textwrap.dedent("""
|
2017-02-13 10:02:20 +01:00
|
|
|
-e %s@10#egg=INITools
|
2014-01-28 15:17:51 +01:00
|
|
|
-r %s-req.txt
|
|
|
|
""") %
|
|
|
|
(
|
|
|
|
local_checkout(
|
|
|
|
'svn+http://svn.colorstudy.com/INITools/trunk',
|
|
|
|
tmpdir.join("cache"),
|
|
|
|
),
|
|
|
|
other_lib_name
|
|
|
|
),
|
|
|
|
)
|
|
|
|
script.scratch_path.join("%s-req.txt" % other_lib_name).write(
|
|
|
|
"%s<=%s" % (other_lib_name, other_lib_version)
|
|
|
|
)
|
|
|
|
result = script.pip(
|
|
|
|
'install', '-r', script.scratch_path / 'initools-req.txt'
|
|
|
|
)
|
2014-02-24 22:52:23 +01:00
|
|
|
assert result.files_created[script.site_packages / other_lib_name].dir
|
2011-03-15 20:49:48 +01:00
|
|
|
fn = '%s-%s-py%s.egg-info' % (other_lib_name, other_lib_version, pyversion)
|
2014-02-24 22:52:23 +01:00
|
|
|
assert result.files_created[script.site_packages / fn].dir
|
|
|
|
assert script.venv / 'src' / 'initools' in result.files_created
|
2010-02-24 11:24:55 +01:00
|
|
|
|
2011-01-04 06:05:10 +01:00
|
|
|
|
2015-06-18 08:58:20 +02:00
|
|
|
def test_package_in_constraints_and_dependencies(script, data):
|
|
|
|
script.scratch_path.join("constraints.txt").write(
|
|
|
|
"TopoRequires2==0.0.1\nTopoRequires==0.0.1"
|
|
|
|
)
|
|
|
|
result = script.pip('install', '--no-index', '-f',
|
|
|
|
data.find_links, '-c', script.scratch_path /
|
|
|
|
'constraints.txt', 'TopoRequires2')
|
|
|
|
assert 'installed TopoRequires-0.0.1' in result.stdout
|
|
|
|
|
|
|
|
|
2015-06-02 05:39:10 +02:00
|
|
|
def test_multiple_constraints_files(script, data):
|
|
|
|
script.scratch_path.join("outer.txt").write("-c inner.txt")
|
|
|
|
script.scratch_path.join("inner.txt").write(
|
|
|
|
"Upper==1.0")
|
|
|
|
result = script.pip(
|
|
|
|
'install', '--no-index', '-f', data.find_links, '-c',
|
|
|
|
script.scratch_path / 'outer.txt', 'Upper')
|
|
|
|
assert 'installed Upper-1.0' in result.stdout
|
|
|
|
|
|
|
|
|
2013-08-23 13:12:37 +02:00
|
|
|
def test_respect_order_in_requirements_file(script, data):
|
2013-08-21 11:16:07 +02:00
|
|
|
script.scratch_path.join("frameworks-req.txt").write(textwrap.dedent("""\
|
2012-10-04 03:51:49 +02:00
|
|
|
parent
|
|
|
|
child
|
|
|
|
simple
|
2011-01-04 06:05:10 +01:00
|
|
|
"""))
|
2012-10-04 03:51:49 +02:00
|
|
|
|
2014-01-28 15:17:51 +01:00
|
|
|
result = script.pip(
|
|
|
|
'install', '--no-index', '-f', data.find_links, '-r',
|
|
|
|
script.scratch_path / 'frameworks-req.txt'
|
|
|
|
)
|
2012-10-04 03:51:49 +02:00
|
|
|
|
2011-01-04 06:05:10 +01:00
|
|
|
downloaded = [line for line in result.stdout.split('\n')
|
2014-12-14 09:06:08 +01:00
|
|
|
if 'Collecting' in line]
|
2011-03-19 01:31:36 +01:00
|
|
|
|
2014-01-28 15:17:51 +01:00
|
|
|
assert 'parent' in downloaded[0], (
|
|
|
|
'First download should be "parent" but was "%s"' % downloaded[0]
|
|
|
|
)
|
|
|
|
assert 'child' in downloaded[1], (
|
|
|
|
'Second download should be "child" but was "%s"' % downloaded[1]
|
|
|
|
)
|
|
|
|
assert 'simple' in downloaded[2], (
|
|
|
|
'Third download should be "simple" but was "%s"' % downloaded[2]
|
|
|
|
)
|
2011-01-04 06:05:10 +01:00
|
|
|
|
|
|
|
|
2014-08-01 22:20:23 +02:00
|
|
|
def test_install_local_editable_with_extras(script, data):
|
|
|
|
to_install = data.packages.join("LocalExtras")
|
|
|
|
res = script.pip(
|
|
|
|
'install', '-e', to_install + '[bar]', '--process-dependency-links',
|
|
|
|
expect_error=False,
|
2015-03-16 13:34:52 +01:00
|
|
|
expect_stderr=True,
|
2014-08-01 22:20:23 +02:00
|
|
|
)
|
|
|
|
assert script.site_packages / 'easy-install.pth' in res.files_updated, (
|
|
|
|
str(res)
|
|
|
|
)
|
|
|
|
assert (
|
|
|
|
script.site_packages / 'LocalExtras.egg-link' in res.files_created
|
|
|
|
), str(res)
|
|
|
|
assert script.site_packages / 'simple' in res.files_created, str(res)
|
|
|
|
|
|
|
|
|
2016-06-10 21:27:07 +02:00
|
|
|
def test_install_collected_dependencies_first(script):
|
2016-12-23 11:09:45 +01:00
|
|
|
result = script.pip_install_local(
|
|
|
|
'toporequires2',
|
2014-10-02 09:17:27 +02:00
|
|
|
)
|
|
|
|
text = [line for line in result.stdout.split('\n')
|
|
|
|
if 'Installing' in line][0]
|
2016-12-23 11:09:45 +01:00
|
|
|
assert text.endswith('toporequires2')
|
2014-10-02 09:17:27 +02:00
|
|
|
|
|
|
|
|
2015-01-15 00:53:15 +01:00
|
|
|
@pytest.mark.network
|
2013-09-27 20:42:05 +02:00
|
|
|
def test_install_local_editable_with_subdirectory(script):
|
|
|
|
version_pkg_path = _create_test_package_with_subdirectory(script,
|
2014-03-08 19:33:05 +01:00
|
|
|
'version_subdir')
|
2014-01-28 15:17:51 +01:00
|
|
|
result = script.pip(
|
|
|
|
'install', '-e',
|
2014-03-08 19:33:05 +01:00
|
|
|
'%s#egg=version_subpkg&subdirectory=version_subdir' %
|
2017-10-06 21:51:42 +02:00
|
|
|
('git+%s' % path_to_url(version_pkg_path),)
|
2014-01-28 15:17:51 +01:00
|
|
|
)
|
2012-11-14 06:29:58 +01:00
|
|
|
|
2014-03-08 19:33:05 +01:00
|
|
|
result.assert_installed('version-subpkg', sub_dir='version_subdir')
|
2015-04-09 06:13:54 +02:00
|
|
|
|
2015-04-18 21:48:25 +02:00
|
|
|
|
2016-02-08 14:04:10 +01:00
|
|
|
@pytest.mark.network
|
|
|
|
def test_install_local_with_subdirectory(script):
|
|
|
|
version_pkg_path = _create_test_package_with_subdirectory(script,
|
|
|
|
'version_subdir')
|
|
|
|
result = script.pip(
|
|
|
|
'install',
|
|
|
|
'%s#egg=version_subpkg&subdirectory=version_subdir' %
|
2017-10-06 21:51:42 +02:00
|
|
|
('git+' + path_to_url(version_pkg_path),)
|
2016-02-08 14:04:10 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
result.assert_installed('version_subpkg.py', editable=False)
|
|
|
|
|
|
|
|
|
2017-05-14 00:23:17 +02:00
|
|
|
def test_wheel_user_with_prefix_in_pydistutils_cfg(
|
2018-10-08 18:09:53 +02:00
|
|
|
script, data, with_wheel):
|
2017-10-06 21:51:42 +02:00
|
|
|
if os.name == 'posix':
|
|
|
|
user_filename = ".pydistutils.cfg"
|
|
|
|
else:
|
|
|
|
user_filename = "pydistutils.cfg"
|
|
|
|
user_cfg = os.path.join(os.path.expanduser('~'), user_filename)
|
2015-06-26 04:23:50 +02:00
|
|
|
script.scratch_path.join("bin").mkdir()
|
2017-10-06 21:51:42 +02:00
|
|
|
with open(user_cfg, "w") as cfg:
|
2015-06-26 04:23:50 +02:00
|
|
|
cfg.write(textwrap.dedent("""
|
|
|
|
[install]
|
|
|
|
prefix=%s""" % script.scratch_path))
|
|
|
|
|
2017-05-14 00:23:17 +02:00
|
|
|
result = script.pip(
|
|
|
|
'install', '--user', '--no-index',
|
2018-10-08 18:09:53 +02:00
|
|
|
'-f', data.find_links,
|
2017-05-14 00:23:17 +02:00
|
|
|
'requiresupper')
|
2015-12-17 00:08:50 +01:00
|
|
|
# Check that we are really installing a wheel
|
|
|
|
assert 'Running setup.py install for requiresupper' not in result.stdout
|
2015-06-26 04:23:50 +02:00
|
|
|
assert 'installed requiresupper' in result.stdout
|
|
|
|
|
|
|
|
|
2015-04-09 06:13:54 +02:00
|
|
|
def test_install_option_in_requirements_file(script, data, virtualenv):
|
2015-04-18 21:48:25 +02:00
|
|
|
"""
|
|
|
|
Test --install-option in requirements file overrides same option in cli
|
|
|
|
"""
|
|
|
|
|
|
|
|
script.scratch_path.join("home1").mkdir()
|
|
|
|
script.scratch_path.join("home2").mkdir()
|
|
|
|
|
2015-04-18 22:55:48 +02:00
|
|
|
script.scratch_path.join("reqs.txt").write(
|
|
|
|
textwrap.dedent(
|
|
|
|
"""simple --install-option='--home=%s'"""
|
|
|
|
% script.scratch_path.join("home1")))
|
2015-04-18 21:48:25 +02:00
|
|
|
|
2015-04-09 06:13:54 +02:00
|
|
|
result = script.pip(
|
|
|
|
'install', '--no-index', '-f', data.find_links, '-r',
|
2015-04-18 21:48:25 +02:00
|
|
|
script.scratch_path / 'reqs.txt',
|
2015-04-22 01:53:05 +02:00
|
|
|
'--install-option=--home=%s' % script.scratch_path.join("home2"),
|
|
|
|
expect_stderr=True)
|
2015-04-18 21:48:25 +02:00
|
|
|
|
|
|
|
package_dir = script.scratch / 'home1' / 'lib' / 'python' / 'simple'
|
2015-04-09 06:13:54 +02:00
|
|
|
assert package_dir in result.files_created
|
2015-06-02 05:39:10 +02:00
|
|
|
|
|
|
|
|
|
|
|
def test_constraints_not_installed_by_default(script, data):
|
|
|
|
script.scratch_path.join("c.txt").write("requiresupper")
|
|
|
|
result = script.pip(
|
|
|
|
'install', '--no-index', '-f', data.find_links, '-c',
|
|
|
|
script.scratch_path / 'c.txt', 'Upper')
|
|
|
|
assert 'requiresupper' not in result.stdout
|
|
|
|
|
|
|
|
|
|
|
|
def test_constraints_only_causes_error(script, data):
|
|
|
|
script.scratch_path.join("c.txt").write("requiresupper")
|
|
|
|
result = script.pip(
|
|
|
|
'install', '--no-index', '-f', data.find_links, '-c',
|
|
|
|
script.scratch_path / 'c.txt', expect_error=True)
|
|
|
|
assert 'installed requiresupper' not in result.stdout
|
2015-06-29 07:40:57 +02:00
|
|
|
|
|
|
|
|
|
|
|
def test_constraints_local_editable_install_causes_error(script, data):
|
|
|
|
script.scratch_path.join("constraints.txt").write(
|
|
|
|
"singlemodule==0.0.0"
|
|
|
|
)
|
|
|
|
to_install = data.src.join("singlemodule")
|
|
|
|
result = script.pip(
|
|
|
|
'install', '--no-index', '-f', data.find_links, '-c',
|
|
|
|
script.scratch_path / 'constraints.txt', '-e',
|
|
|
|
to_install, expect_error=True)
|
|
|
|
assert 'Could not satisfy constraints for' in result.stderr
|
|
|
|
|
|
|
|
|
2018-01-28 23:44:38 +01:00
|
|
|
def test_constraints_local_editable_install_pep518(script, data):
|
|
|
|
to_install = data.src.join("pep518-3.0")
|
2018-01-29 00:10:56 +01:00
|
|
|
|
|
|
|
script.pip('download', 'setuptools', 'wheel', '-d', data.packages)
|
2018-03-01 19:41:32 +01:00
|
|
|
script.pip(
|
2018-01-28 23:44:38 +01:00
|
|
|
'install', '--no-index', '-f', data.find_links, '-e', to_install)
|
|
|
|
|
|
|
|
|
2015-06-29 07:40:57 +02:00
|
|
|
def test_constraints_local_install_causes_error(script, data):
|
|
|
|
script.scratch_path.join("constraints.txt").write(
|
|
|
|
"singlemodule==0.0.0"
|
|
|
|
)
|
|
|
|
to_install = data.src.join("singlemodule")
|
|
|
|
result = script.pip(
|
|
|
|
'install', '--no-index', '-f', data.find_links, '-c',
|
|
|
|
script.scratch_path / 'constraints.txt',
|
|
|
|
to_install, expect_error=True)
|
|
|
|
assert 'Could not satisfy constraints for' in result.stderr
|
|
|
|
|
|
|
|
|
|
|
|
def test_constraints_constrain_to_local_editable(script, data):
|
|
|
|
to_install = data.src.join("singlemodule")
|
|
|
|
script.scratch_path.join("constraints.txt").write(
|
2017-10-06 21:51:42 +02:00
|
|
|
"-e %s#egg=singlemodule" % path_to_url(to_install)
|
2015-06-29 07:40:57 +02:00
|
|
|
)
|
|
|
|
result = script.pip(
|
|
|
|
'install', '--no-index', '-f', data.find_links, '-c',
|
|
|
|
script.scratch_path / 'constraints.txt', 'singlemodule')
|
|
|
|
assert 'Running setup.py develop for singlemodule' in result.stdout
|
|
|
|
|
|
|
|
|
|
|
|
def test_constraints_constrain_to_local(script, data):
|
|
|
|
to_install = data.src.join("singlemodule")
|
|
|
|
script.scratch_path.join("constraints.txt").write(
|
2017-10-06 21:51:42 +02:00
|
|
|
"%s#egg=singlemodule" % path_to_url(to_install)
|
2015-06-29 07:40:57 +02:00
|
|
|
)
|
|
|
|
result = script.pip(
|
|
|
|
'install', '--no-index', '-f', data.find_links, '-c',
|
|
|
|
script.scratch_path / 'constraints.txt', 'singlemodule')
|
|
|
|
assert 'Running setup.py install for singlemodule' in result.stdout
|
|
|
|
|
|
|
|
|
|
|
|
def test_constrained_to_url_install_same_url(script, data):
|
|
|
|
to_install = data.src.join("singlemodule")
|
2017-10-06 21:51:42 +02:00
|
|
|
constraints = path_to_url(to_install) + "#egg=singlemodule"
|
|
|
|
script.scratch_path.join("constraints.txt").write(constraints)
|
2015-06-29 07:40:57 +02:00
|
|
|
result = script.pip(
|
|
|
|
'install', '--no-index', '-f', data.find_links, '-c',
|
|
|
|
script.scratch_path / 'constraints.txt', to_install)
|
2017-10-06 21:51:42 +02:00
|
|
|
assert ('Running setup.py install for singlemodule'
|
|
|
|
in result.stdout), str(result)
|
Fix false hash mismatches when installing a package that has a cached wheel.
This would occur when, for example, installing from a requirements file that references a certain hashed sdist, a common situation.
As of pip 7, pip always tries to build a wheel for each requirement (if one wasn't provided directly) and installs from that. The way this was implemented, InstallRequirement.link pointed to the cached wheel, which obviously had a different hash than the index-sourced archive, so spurious mismatch errors would result.
Now we no longer read from the wheel cache in hash-checking mode.
Make populate_link(), rather than the `link` setter, responsible for mapping InstallRequirement.link to a cached wheel. populate_link() isn't called until until prepare_files(). At that point, when we've examined all InstallRequirements and their potential --hash options, we know whether we should be requiring hashes and thus whether to use the wheel cache at all.
The only place that sets InstallRequirement.link other than InstallRequirement itself is pip.wheel, which does so long after hashes have been checked, when it's unpacking the wheel it just built, so it won't cause spurious hash mismatches.
2015-10-16 21:58:59 +02:00
|
|
|
|
|
|
|
|
2017-05-14 17:41:03 +02:00
|
|
|
def test_double_install_spurious_hash_mismatch(
|
2018-10-08 18:09:53 +02:00
|
|
|
script, tmpdir, data, with_wheel):
|
Fix false hash mismatches when installing a package that has a cached wheel.
This would occur when, for example, installing from a requirements file that references a certain hashed sdist, a common situation.
As of pip 7, pip always tries to build a wheel for each requirement (if one wasn't provided directly) and installs from that. The way this was implemented, InstallRequirement.link pointed to the cached wheel, which obviously had a different hash than the index-sourced archive, so spurious mismatch errors would result.
Now we no longer read from the wheel cache in hash-checking mode.
Make populate_link(), rather than the `link` setter, responsible for mapping InstallRequirement.link to a cached wheel. populate_link() isn't called until until prepare_files(). At that point, when we've examined all InstallRequirements and their potential --hash options, we know whether we should be requiring hashes and thus whether to use the wheel cache at all.
The only place that sets InstallRequirement.link other than InstallRequirement itself is pip.wheel, which does so long after hashes have been checked, when it's unpacking the wheel it just built, so it won't cause spurious hash mismatches.
2015-10-16 21:58:59 +02:00
|
|
|
"""Make sure installing the same hashed sdist twice doesn't throw hash
|
|
|
|
mismatch errors.
|
|
|
|
|
|
|
|
Really, this is a test that we disable reads from the wheel cache in
|
|
|
|
hash-checking mode. Locally, implicitly built wheels of sdists obviously
|
|
|
|
have different hashes from the original archives. Comparing against those
|
|
|
|
causes spurious mismatch errors.
|
|
|
|
|
|
|
|
"""
|
2017-05-14 00:23:17 +02:00
|
|
|
# Install wheel package, otherwise, it won't try to build wheels.
|
Fix false hash mismatches when installing a package that has a cached wheel.
This would occur when, for example, installing from a requirements file that references a certain hashed sdist, a common situation.
As of pip 7, pip always tries to build a wheel for each requirement (if one wasn't provided directly) and installs from that. The way this was implemented, InstallRequirement.link pointed to the cached wheel, which obviously had a different hash than the index-sourced archive, so spurious mismatch errors would result.
Now we no longer read from the wheel cache in hash-checking mode.
Make populate_link(), rather than the `link` setter, responsible for mapping InstallRequirement.link to a cached wheel. populate_link() isn't called until until prepare_files(). At that point, when we've examined all InstallRequirements and their potential --hash options, we know whether we should be requiring hashes and thus whether to use the wheel cache at all.
The only place that sets InstallRequirement.link other than InstallRequirement itself is pip.wheel, which does so long after hashes have been checked, when it's unpacking the wheel it just built, so it won't cause spurious hash mismatches.
2015-10-16 21:58:59 +02:00
|
|
|
with requirements_file('simple==1.0 --hash=sha256:393043e672415891885c9a2a'
|
|
|
|
'0929b1af95fb866d6ca016b42d2e6ce53619b653',
|
|
|
|
tmpdir) as reqs_file:
|
|
|
|
# Install a package (and build its wheel):
|
2017-05-14 00:10:34 +02:00
|
|
|
result = script.pip_install_local(
|
2018-10-08 18:09:53 +02:00
|
|
|
'--find-links', data.find_links,
|
Fix false hash mismatches when installing a package that has a cached wheel.
This would occur when, for example, installing from a requirements file that references a certain hashed sdist, a common situation.
As of pip 7, pip always tries to build a wheel for each requirement (if one wasn't provided directly) and installs from that. The way this was implemented, InstallRequirement.link pointed to the cached wheel, which obviously had a different hash than the index-sourced archive, so spurious mismatch errors would result.
Now we no longer read from the wheel cache in hash-checking mode.
Make populate_link(), rather than the `link` setter, responsible for mapping InstallRequirement.link to a cached wheel. populate_link() isn't called until until prepare_files(). At that point, when we've examined all InstallRequirements and their potential --hash options, we know whether we should be requiring hashes and thus whether to use the wheel cache at all.
The only place that sets InstallRequirement.link other than InstallRequirement itself is pip.wheel, which does so long after hashes have been checked, when it's unpacking the wheel it just built, so it won't cause spurious hash mismatches.
2015-10-16 21:58:59 +02:00
|
|
|
'-r', reqs_file.abspath, expect_error=False)
|
|
|
|
assert 'Successfully installed simple-1.0' in str(result)
|
|
|
|
|
|
|
|
# Uninstall it:
|
|
|
|
script.pip('uninstall', '-y', 'simple', expect_error=False)
|
|
|
|
|
|
|
|
# Then install it again. We should not hit a hash mismatch, and the
|
|
|
|
# package should install happily.
|
2017-05-14 00:10:34 +02:00
|
|
|
result = script.pip_install_local(
|
2018-10-08 18:09:53 +02:00
|
|
|
'--find-links', data.find_links,
|
Fix false hash mismatches when installing a package that has a cached wheel.
This would occur when, for example, installing from a requirements file that references a certain hashed sdist, a common situation.
As of pip 7, pip always tries to build a wheel for each requirement (if one wasn't provided directly) and installs from that. The way this was implemented, InstallRequirement.link pointed to the cached wheel, which obviously had a different hash than the index-sourced archive, so spurious mismatch errors would result.
Now we no longer read from the wheel cache in hash-checking mode.
Make populate_link(), rather than the `link` setter, responsible for mapping InstallRequirement.link to a cached wheel. populate_link() isn't called until until prepare_files(). At that point, when we've examined all InstallRequirements and their potential --hash options, we know whether we should be requiring hashes and thus whether to use the wheel cache at all.
The only place that sets InstallRequirement.link other than InstallRequirement itself is pip.wheel, which does so long after hashes have been checked, when it's unpacking the wheel it just built, so it won't cause spurious hash mismatches.
2015-10-16 21:58:59 +02:00
|
|
|
'-r', reqs_file.abspath, expect_error=False)
|
|
|
|
assert 'Successfully installed simple-1.0' in str(result)
|
2015-08-24 07:03:01 +02:00
|
|
|
|
|
|
|
|
|
|
|
def test_install_with_extras_from_constraints(script, data):
|
|
|
|
to_install = data.packages.join("LocalExtras")
|
|
|
|
script.scratch_path.join("constraints.txt").write(
|
2017-10-06 21:51:42 +02:00
|
|
|
"%s#egg=LocalExtras[bar]" % path_to_url(to_install)
|
2015-08-24 07:03:01 +02:00
|
|
|
)
|
|
|
|
result = script.pip_install_local(
|
|
|
|
'-c', script.scratch_path / 'constraints.txt', 'LocalExtras')
|
|
|
|
assert script.site_packages / 'simple' in result.files_created
|
|
|
|
|
|
|
|
|
|
|
|
def test_install_with_extras_from_install(script, data):
|
|
|
|
to_install = data.packages.join("LocalExtras")
|
|
|
|
script.scratch_path.join("constraints.txt").write(
|
2017-10-06 21:51:42 +02:00
|
|
|
"%s#egg=LocalExtras" % path_to_url(to_install)
|
2015-08-24 07:03:01 +02:00
|
|
|
)
|
|
|
|
result = script.pip_install_local(
|
|
|
|
'-c', script.scratch_path / 'constraints.txt', 'LocalExtras[baz]')
|
|
|
|
assert script.site_packages / 'singlemodule.py'in result.files_created
|
|
|
|
|
|
|
|
|
|
|
|
def test_install_with_extras_joined(script, data):
|
|
|
|
to_install = data.packages.join("LocalExtras")
|
|
|
|
script.scratch_path.join("constraints.txt").write(
|
2017-10-06 21:51:42 +02:00
|
|
|
"%s#egg=LocalExtras[bar]" % path_to_url(to_install)
|
2015-08-24 07:03:01 +02:00
|
|
|
)
|
|
|
|
result = script.pip_install_local(
|
|
|
|
'-c', script.scratch_path / 'constraints.txt', 'LocalExtras[baz]'
|
|
|
|
)
|
|
|
|
assert script.site_packages / 'simple' in result.files_created
|
|
|
|
assert script.site_packages / 'singlemodule.py'in result.files_created
|
|
|
|
|
|
|
|
|
|
|
|
def test_install_with_extras_editable_joined(script, data):
|
|
|
|
to_install = data.packages.join("LocalExtras")
|
|
|
|
script.scratch_path.join("constraints.txt").write(
|
2017-10-06 21:51:42 +02:00
|
|
|
"-e %s#egg=LocalExtras[bar]" % path_to_url(to_install)
|
2015-08-24 07:03:01 +02:00
|
|
|
)
|
|
|
|
result = script.pip_install_local(
|
|
|
|
'-c', script.scratch_path / 'constraints.txt', 'LocalExtras[baz]')
|
|
|
|
assert script.site_packages / 'simple' in result.files_created
|
|
|
|
assert script.site_packages / 'singlemodule.py'in result.files_created
|
|
|
|
|
|
|
|
|
|
|
|
def test_install_distribution_full_union(script, data):
|
|
|
|
to_install = data.packages.join("LocalExtras")
|
|
|
|
result = script.pip_install_local(
|
|
|
|
to_install, to_install + "[bar]", to_install + "[baz]")
|
|
|
|
assert 'Running setup.py install for LocalExtras' in result.stdout
|
|
|
|
assert script.site_packages / 'simple' in result.files_created
|
|
|
|
assert script.site_packages / 'singlemodule.py' in result.files_created
|
|
|
|
|
|
|
|
|
|
|
|
def test_install_distribution_duplicate_extras(script, data):
|
|
|
|
to_install = data.packages.join("LocalExtras")
|
|
|
|
package_name = to_install + "[bar]"
|
|
|
|
with pytest.raises(AssertionError):
|
|
|
|
result = script.pip_install_local(package_name, package_name)
|
|
|
|
assert 'Double requirement given: %s' % package_name in result.stderr
|
|
|
|
|
|
|
|
|
|
|
|
def test_install_distribution_union_with_constraints(script, data):
|
|
|
|
to_install = data.packages.join("LocalExtras")
|
|
|
|
script.scratch_path.join("constraints.txt").write(
|
|
|
|
"%s[bar]" % to_install)
|
|
|
|
result = script.pip_install_local(
|
|
|
|
'-c', script.scratch_path / 'constraints.txt', to_install + '[baz]')
|
|
|
|
assert 'Running setup.py install for LocalExtras' in result.stdout
|
|
|
|
assert script.site_packages / 'singlemodule.py' in result.files_created
|
|
|
|
|
|
|
|
|
|
|
|
def test_install_distribution_union_with_versions(script, data):
|
|
|
|
to_install_001 = data.packages.join("LocalExtras")
|
|
|
|
to_install_002 = data.packages.join("LocalExtras-0.0.2")
|
|
|
|
result = script.pip_install_local(
|
|
|
|
to_install_001 + "[bar]", to_install_002 + "[baz]")
|
|
|
|
assert ("Successfully installed LocalExtras-0.0.1 simple-3.0 " +
|
|
|
|
"singlemodule-0.0.1" in result.stdout)
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.xfail
|
|
|
|
def test_install_distribution_union_conflicting_extras(script, data):
|
|
|
|
# LocalExtras requires simple==1.0, LocalExtras[bar] requires simple==2.0;
|
|
|
|
# without a resolver, pip does not detect the conflict between simple==1.0
|
|
|
|
# and simple==2.0. Once a resolver is added, this conflict should be
|
|
|
|
# detected.
|
|
|
|
to_install = data.packages.join("LocalExtras-0.0.2")
|
|
|
|
result = script.pip_install_local(to_install, to_install + "[bar]",
|
|
|
|
expect_error=True)
|
|
|
|
assert 'installed' not in result.stdout
|
|
|
|
assert "Conflict" in result.stderr
|
2016-03-10 03:55:20 +01:00
|
|
|
|
|
|
|
|
2017-10-02 19:33:52 +02:00
|
|
|
def test_install_unsupported_wheel_link_with_marker(script):
|
2016-03-10 03:55:20 +01:00
|
|
|
script.scratch_path.join("with-marker.txt").write(
|
|
|
|
textwrap.dedent("""\
|
|
|
|
%s; %s
|
|
|
|
""") %
|
|
|
|
(
|
|
|
|
'https://github.com/a/b/c/asdf-1.5.2-cp27-none-xyz.whl',
|
|
|
|
'sys_platform == "xyz"'
|
|
|
|
)
|
|
|
|
)
|
|
|
|
result = script.pip(
|
|
|
|
'install', '-r', script.scratch_path / 'with-marker.txt',
|
|
|
|
expect_error=False,
|
|
|
|
)
|
|
|
|
|
2016-11-02 13:28:17 +01:00
|
|
|
assert ("Ignoring asdf: markers 'sys_platform == \"xyz\"' don't match "
|
2017-11-20 16:55:28 +01:00
|
|
|
"your environment") in result.stdout
|
2016-03-10 03:55:20 +01:00
|
|
|
assert len(result.files_created) == 0
|
|
|
|
|
|
|
|
|
|
|
|
def test_install_unsupported_wheel_file(script, data):
|
|
|
|
# Trying to install a local wheel with an incompatible version/type
|
|
|
|
# should fail.
|
|
|
|
script.scratch_path.join("wheel-file.txt").write(textwrap.dedent("""\
|
|
|
|
%s
|
|
|
|
""" % data.packages.join("simple.dist-0.1-py1-none-invalid.whl")))
|
|
|
|
result = script.pip(
|
|
|
|
'install', '-r', script.scratch_path / 'wheel-file.txt',
|
|
|
|
expect_error=True,
|
|
|
|
expect_stderr=True,
|
|
|
|
)
|
|
|
|
assert ("simple.dist-0.1-py1-none-invalid.whl is not a supported " +
|
|
|
|
"wheel on this platform" in result.stderr)
|
|
|
|
assert len(result.files_created) == 0
|
2018-03-20 01:30:09 +01:00
|
|
|
|
|
|
|
|
2018-03-20 22:32:22 +01:00
|
|
|
def test_install_options_local_to_package(script, data):
|
2018-03-20 01:30:09 +01:00
|
|
|
"""Make sure --install-options does not leak across packages.
|
|
|
|
|
|
|
|
A requirements.txt file can have per-package --install-options; these
|
|
|
|
should be isolated to just the package instead of leaking to subsequent
|
|
|
|
packages. This needs to be a functional test because the bug was around
|
|
|
|
cross-contamination at install time.
|
|
|
|
"""
|
|
|
|
home_simple = script.scratch_path.join("for-simple")
|
|
|
|
test_simple = script.scratch.join("for-simple")
|
|
|
|
home_simple.mkdir()
|
|
|
|
reqs_file = script.scratch_path.join("reqs.txt")
|
|
|
|
reqs_file.write(
|
2018-03-20 22:32:22 +01:00
|
|
|
textwrap.dedent("""
|
|
|
|
simple --install-option='--home=%s'
|
|
|
|
INITools
|
|
|
|
""" % home_simple))
|
2018-03-20 01:30:09 +01:00
|
|
|
result = script.pip(
|
2018-03-20 22:32:22 +01:00
|
|
|
'install',
|
|
|
|
'--no-index', '-f', data.find_links,
|
|
|
|
'-r', reqs_file,
|
|
|
|
expect_error=True,
|
2018-03-20 01:30:09 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
simple = test_simple / 'lib' / 'python' / 'simple'
|
|
|
|
bad = test_simple / 'lib' / 'python' / 'initools'
|
|
|
|
good = script.site_packages / 'initools'
|
|
|
|
assert simple in result.files_created
|
|
|
|
assert result.files_created[simple].dir
|
|
|
|
assert bad not in result.files_created
|
|
|
|
assert good in result.files_created
|
|
|
|
assert result.files_created[good].dir
|