2020-07-08 00:19:15 +02:00
|
|
|
import json
|
2017-10-06 21:51:42 +02:00
|
|
|
import os
|
2010-02-24 11:24:55 +01:00
|
|
|
import textwrap
|
2013-08-18 11:59:44 +02:00
|
|
|
|
|
|
|
import pytest
|
2017-06-13 14:17:00 +02:00
|
|
|
|
2017-05-16 12:16:30 +02:00
|
|
|
from tests.lib import (
|
2019-07-22 06:45:27 +02:00
|
|
|
_create_test_package_with_subdirectory,
|
2020-07-08 00:19:15 +02:00
|
|
|
create_basic_sdist_for_package,
|
2020-06-02 17:03:33 +02:00
|
|
|
create_basic_wheel_for_package,
|
2019-10-14 11:57:57 +02:00
|
|
|
need_svn,
|
2019-07-22 06:45:27 +02:00
|
|
|
path_to_url,
|
2017-11-21 08:50:32 +01:00
|
|
|
requirements_file,
|
2017-05-16 12:16:30 +02:00
|
|
|
)
|
2013-05-27 19:46:31 +02:00
|
|
|
from tests.lib.local_repos import local_checkout
|
2019-10-07 09:08:22 +02:00
|
|
|
from tests.lib.path import Path
|
2010-06-03 04:25:26 +02:00
|
|
|
|
2011-05-04 09:44:02 +02:00
|
|
|
|
2020-12-24 22:23:07 +01:00
|
|
|
class ArgRecordingSdist:
|
2020-07-08 00:19:15 +02:00
|
|
|
def __init__(self, sdist_path, args_path):
|
|
|
|
self.sdist_path = sdist_path
|
|
|
|
self._args_path = args_path
|
|
|
|
|
|
|
|
def args(self):
|
|
|
|
return json.loads(self._args_path.read_text())
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture()
|
|
|
|
def arg_recording_sdist_maker(script):
|
|
|
|
arg_writing_setup_py = textwrap.dedent(
|
|
|
|
"""
|
|
|
|
import io
|
|
|
|
import json
|
|
|
|
import os
|
|
|
|
import sys
|
|
|
|
|
|
|
|
from setuptools import setup
|
|
|
|
|
|
|
|
args_path = os.path.join(os.environ["OUTPUT_DIR"], "{name}.json")
|
|
|
|
with open(args_path, 'w') as f:
|
|
|
|
json.dump(sys.argv, f)
|
|
|
|
|
|
|
|
setup(name={name!r}, version="0.1.0")
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
output_dir = script.scratch_path.joinpath(
|
|
|
|
"args_recording_sdist_maker_output"
|
|
|
|
)
|
|
|
|
output_dir.mkdir(parents=True)
|
|
|
|
script.environ["OUTPUT_DIR"] = str(output_dir)
|
|
|
|
|
2021-08-08 02:14:17 +02:00
|
|
|
def _arg_recording_sdist_maker(name: str) -> ArgRecordingSdist:
|
2020-07-08 00:19:15 +02:00
|
|
|
extra_files = {"setup.py": arg_writing_setup_py.format(name=name)}
|
|
|
|
sdist_path = create_basic_sdist_for_package(
|
|
|
|
script, name, "0.1.0", extra_files
|
|
|
|
)
|
2020-12-23 20:25:12 +01:00
|
|
|
args_path = output_dir / f"{name}.json"
|
2020-07-08 00:19:15 +02:00
|
|
|
return ArgRecordingSdist(sdist_path, args_path)
|
|
|
|
|
|
|
|
return _arg_recording_sdist_maker
|
|
|
|
|
|
|
|
|
2015-01-15 00:53:15 +01:00
|
|
|
@pytest.mark.network
|
2020-07-25 13:02:20 +02:00
|
|
|
def test_requirements_file(script, with_wheel):
|
2010-02-25 00:16:43 +01:00
|
|
|
"""
|
|
|
|
Test installing from a requirements file.
|
2010-06-03 04:25:26 +02:00
|
|
|
|
2010-02-25 00:16:43 +01:00
|
|
|
"""
|
2011-03-19 01:31:36 +01:00
|
|
|
other_lib_name, other_lib_version = 'anyjson', '0.3'
|
2021-02-13 07:27:17 +01:00
|
|
|
script.scratch_path.joinpath("initools-req.txt").write_text(textwrap.dedent(f"""\
|
2010-02-24 11:24:55 +01:00
|
|
|
INITools==0.2
|
|
|
|
# and something else to test out:
|
2020-02-29 20:53:59 +01:00
|
|
|
{other_lib_name}<={other_lib_version}
|
2021-02-13 07:27:17 +01:00
|
|
|
"""))
|
2014-01-28 15:17:51 +01:00
|
|
|
result = script.pip(
|
|
|
|
'install', '-r', script.scratch_path / 'initools-req.txt'
|
|
|
|
)
|
2020-05-26 16:43:28 +02:00
|
|
|
result.did_create(
|
2020-07-25 13:02:20 +02:00
|
|
|
script.site_packages / 'INITools-0.2.dist-info'
|
2014-01-28 15:17:51 +01:00
|
|
|
)
|
2020-05-26 16:43:28 +02:00
|
|
|
result.did_create(script.site_packages / 'initools')
|
2014-02-24 22:52:23 +01:00
|
|
|
assert result.files_created[script.site_packages / other_lib_name].dir
|
2020-07-25 13:02:20 +02:00
|
|
|
fn = '{}-{}.dist-info'.format(
|
|
|
|
other_lib_name, other_lib_version)
|
2014-02-24 22:52:23 +01:00
|
|
|
assert result.files_created[script.site_packages / fn].dir
|
2010-02-24 11:24:55 +01:00
|
|
|
|
2013-03-18 17:43:59 +01:00
|
|
|
|
2013-08-22 06:40:46 +02:00
|
|
|
def test_schema_check_in_requirements_file(script):
|
2012-03-13 23:38:47 +01:00
|
|
|
"""
|
|
|
|
Test installing from a requirements file with an invalid vcs schema..
|
|
|
|
|
|
|
|
"""
|
2019-07-02 07:00:32 +02:00
|
|
|
script.scratch_path.joinpath("file-egg-req.txt").write_text(
|
2020-02-29 20:53:59 +01:00
|
|
|
"\n{}\n".format(
|
2014-01-28 15:17:51 +01:00
|
|
|
"git://github.com/alex/django-fixture-generator.git"
|
|
|
|
"#egg=fixture_generator"
|
|
|
|
)
|
|
|
|
)
|
2013-08-18 11:59:44 +02:00
|
|
|
|
|
|
|
with pytest.raises(AssertionError):
|
2014-01-28 15:17:51 +01:00
|
|
|
script.pip(
|
|
|
|
"install", "-vvv", "-r", script.scratch_path / "file-egg-req.txt"
|
|
|
|
)
|
2011-05-04 09:44:02 +02:00
|
|
|
|
2012-05-14 05:13:50 +02:00
|
|
|
|
2020-01-12 01:37:10 +01:00
|
|
|
@pytest.mark.parametrize("test_type,editable", [
|
|
|
|
("rel_path", False),
|
|
|
|
("rel_path", True),
|
|
|
|
("rel_url", False),
|
|
|
|
("rel_url", True),
|
|
|
|
("embedded_rel_path", False),
|
|
|
|
("embedded_rel_path", True),
|
2020-01-12 01:32:44 +01:00
|
|
|
])
|
2020-07-25 13:02:20 +02:00
|
|
|
def test_relative_requirements_file(
|
|
|
|
script, data, test_type, editable, with_wheel
|
|
|
|
):
|
2011-03-23 00:13:04 +01:00
|
|
|
"""
|
2017-04-06 00:32:36 +02:00
|
|
|
Test installing from a requirements file with a relative path. For path
|
|
|
|
URLs, use an egg= definition.
|
2011-03-23 00:13:04 +01:00
|
|
|
|
|
|
|
"""
|
2020-07-25 13:02:20 +02:00
|
|
|
dist_info_folder = (
|
2020-02-29 20:53:59 +01:00
|
|
|
script.site_packages /
|
2020-07-25 13:02:20 +02:00
|
|
|
'FSPkg-0.1.dev0.dist-info'
|
2014-01-28 15:17:51 +01:00
|
|
|
)
|
2017-04-06 00:32:36 +02:00
|
|
|
egg_link_file = (
|
|
|
|
script.site_packages / 'FSPkg.egg-link'
|
|
|
|
)
|
|
|
|
package_folder = script.site_packages / 'fspkg'
|
|
|
|
|
|
|
|
# Compute relative install path to FSPkg from scratch path.
|
2019-10-07 09:08:22 +02:00
|
|
|
full_rel_path = Path(
|
|
|
|
os.path.relpath(data.packages.joinpath('FSPkg'), script.scratch_path)
|
|
|
|
)
|
2017-10-06 21:51:42 +02:00
|
|
|
full_rel_url = 'file:' + full_rel_path + '#egg=FSPkg'
|
2019-07-02 07:00:32 +02:00
|
|
|
embedded_rel_path = script.scratch_path.joinpath(full_rel_path)
|
2017-04-06 00:32:36 +02:00
|
|
|
|
2020-01-12 01:32:44 +01:00
|
|
|
req_path = {
|
|
|
|
"rel_path": full_rel_path,
|
|
|
|
"rel_url": full_rel_url,
|
|
|
|
"embedded_rel_path": embedded_rel_path,
|
|
|
|
}[test_type]
|
|
|
|
|
2020-01-12 01:37:10 +01:00
|
|
|
req_path = req_path.replace(os.path.sep, '/')
|
2020-01-12 01:32:44 +01:00
|
|
|
# Install as either editable or not.
|
2020-01-12 01:37:10 +01:00
|
|
|
if not editable:
|
2017-04-06 00:32:36 +02:00
|
|
|
with requirements_file(req_path + '\n',
|
|
|
|
script.scratch_path) as reqs_file:
|
|
|
|
result = script.pip('install', '-vvv', '-r', reqs_file.name,
|
|
|
|
cwd=script.scratch_path)
|
2020-07-25 13:02:20 +02:00
|
|
|
result.did_create(dist_info_folder)
|
2020-05-26 16:43:28 +02:00
|
|
|
result.did_create(package_folder)
|
2020-01-12 01:37:10 +01:00
|
|
|
else:
|
2017-04-06 00:32:36 +02:00
|
|
|
with requirements_file('-e ' + req_path + '\n',
|
|
|
|
script.scratch_path) as reqs_file:
|
|
|
|
result = script.pip('install', '-vvv', '-r', reqs_file.name,
|
|
|
|
cwd=script.scratch_path)
|
2020-05-26 16:43:28 +02:00
|
|
|
result.did_create(egg_link_file)
|
2010-06-03 04:25:26 +02:00
|
|
|
|
2011-05-04 09:44:02 +02:00
|
|
|
|
2020-08-20 12:10:59 +02:00
|
|
|
@pytest.mark.xfail
|
2015-01-15 00:53:15 +01:00
|
|
|
@pytest.mark.network
|
2019-10-14 11:57:57 +02:00
|
|
|
@need_svn
|
2020-07-25 13:02:20 +02:00
|
|
|
def test_multiple_requirements_files(script, tmpdir, with_wheel):
|
2010-02-25 00:16:43 +01:00
|
|
|
"""
|
|
|
|
Test installing from multiple nested requirements files.
|
2010-06-03 04:25:26 +02:00
|
|
|
|
2010-02-25 00:16:43 +01:00
|
|
|
"""
|
2011-03-19 01:31:36 +01:00
|
|
|
other_lib_name, other_lib_version = 'anyjson', '0.3'
|
2019-07-02 07:00:32 +02:00
|
|
|
script.scratch_path.joinpath("initools-req.txt").write_text(
|
2014-01-28 15:17:51 +01:00
|
|
|
textwrap.dedent("""
|
2020-02-29 20:53:59 +01:00
|
|
|
-e {}@10#egg=INITools
|
|
|
|
-r {}-req.txt
|
|
|
|
""").format
|
2014-01-28 15:17:51 +01:00
|
|
|
(
|
2019-09-21 12:01:17 +02:00
|
|
|
local_checkout('svn+http://svn.colorstudy.com/INITools', tmpdir),
|
2014-01-28 15:17:51 +01:00
|
|
|
other_lib_name
|
|
|
|
),
|
|
|
|
)
|
2021-02-13 07:27:17 +01:00
|
|
|
script.scratch_path.joinpath(f"{other_lib_name}-req.txt").write_text(
|
|
|
|
f"{other_lib_name}<={other_lib_version}"
|
2014-01-28 15:17:51 +01:00
|
|
|
)
|
|
|
|
result = script.pip(
|
|
|
|
'install', '-r', script.scratch_path / 'initools-req.txt'
|
|
|
|
)
|
2014-02-24 22:52:23 +01:00
|
|
|
assert result.files_created[script.site_packages / other_lib_name].dir
|
2021-02-13 07:27:17 +01:00
|
|
|
fn = f'{other_lib_name}-{other_lib_version}.dist-info'
|
2014-02-24 22:52:23 +01:00
|
|
|
assert result.files_created[script.site_packages / fn].dir
|
2020-05-26 16:43:28 +02:00
|
|
|
result.did_create(script.venv / 'src' / 'initools')
|
2010-02-24 11:24:55 +01:00
|
|
|
|
2011-01-04 06:05:10 +01:00
|
|
|
|
2015-06-18 08:58:20 +02:00
|
|
|
def test_package_in_constraints_and_dependencies(script, data):
|
2019-07-02 07:00:32 +02:00
|
|
|
script.scratch_path.joinpath("constraints.txt").write_text(
|
2015-06-18 08:58:20 +02:00
|
|
|
"TopoRequires2==0.0.1\nTopoRequires==0.0.1"
|
|
|
|
)
|
|
|
|
result = script.pip('install', '--no-index', '-f',
|
|
|
|
data.find_links, '-c', script.scratch_path /
|
|
|
|
'constraints.txt', 'TopoRequires2')
|
|
|
|
assert 'installed TopoRequires-0.0.1' in result.stdout
|
|
|
|
|
|
|
|
|
2015-06-02 05:39:10 +02:00
|
|
|
def test_multiple_constraints_files(script, data):
|
2019-07-02 07:00:32 +02:00
|
|
|
script.scratch_path.joinpath("outer.txt").write_text("-c inner.txt")
|
|
|
|
script.scratch_path.joinpath("inner.txt").write_text(
|
2015-06-02 05:39:10 +02:00
|
|
|
"Upper==1.0")
|
|
|
|
result = script.pip(
|
|
|
|
'install', '--no-index', '-f', data.find_links, '-c',
|
|
|
|
script.scratch_path / 'outer.txt', 'Upper')
|
|
|
|
assert 'installed Upper-1.0' in result.stdout
|
|
|
|
|
|
|
|
|
2020-10-30 10:11:57 +01:00
|
|
|
@pytest.mark.xfail(reason="Unclear what this guarantee is for.")
|
2013-08-23 13:12:37 +02:00
|
|
|
def test_respect_order_in_requirements_file(script, data):
|
2019-07-02 07:00:32 +02:00
|
|
|
script.scratch_path.joinpath("frameworks-req.txt").write_text(textwrap.dedent("""\
|
2012-10-04 03:51:49 +02:00
|
|
|
parent
|
|
|
|
child
|
|
|
|
simple
|
2011-01-04 06:05:10 +01:00
|
|
|
"""))
|
2012-10-04 03:51:49 +02:00
|
|
|
|
2014-01-28 15:17:51 +01:00
|
|
|
result = script.pip(
|
|
|
|
'install', '--no-index', '-f', data.find_links, '-r',
|
|
|
|
script.scratch_path / 'frameworks-req.txt'
|
|
|
|
)
|
2012-10-04 03:51:49 +02:00
|
|
|
|
2011-01-04 06:05:10 +01:00
|
|
|
downloaded = [line for line in result.stdout.split('\n')
|
2019-07-29 17:01:52 +02:00
|
|
|
if 'Processing' in line]
|
2011-03-19 01:31:36 +01:00
|
|
|
|
2014-01-28 15:17:51 +01:00
|
|
|
assert 'parent' in downloaded[0], (
|
2020-02-29 20:53:59 +01:00
|
|
|
'First download should be "parent" but was "{}"'.format(downloaded[0])
|
2014-01-28 15:17:51 +01:00
|
|
|
)
|
|
|
|
assert 'child' in downloaded[1], (
|
2020-02-29 20:53:59 +01:00
|
|
|
'Second download should be "child" but was "{}"'.format(downloaded[1])
|
2014-01-28 15:17:51 +01:00
|
|
|
)
|
|
|
|
assert 'simple' in downloaded[2], (
|
2020-02-29 20:53:59 +01:00
|
|
|
'Third download should be "simple" but was "{}"'.format(downloaded[2])
|
2014-01-28 15:17:51 +01:00
|
|
|
)
|
2011-01-04 06:05:10 +01:00
|
|
|
|
|
|
|
|
2014-08-01 22:20:23 +02:00
|
|
|
def test_install_local_editable_with_extras(script, data):
|
2019-07-02 07:00:32 +02:00
|
|
|
to_install = data.packages.joinpath("LocalExtras")
|
2018-12-02 12:55:59 +01:00
|
|
|
res = script.pip_install_local(
|
2020-01-02 23:21:22 +01:00
|
|
|
'-e', to_install + '[bar]', allow_stderr_warning=True
|
2014-08-01 22:20:23 +02:00
|
|
|
)
|
2020-05-26 16:43:28 +02:00
|
|
|
res.did_update(script.site_packages / 'easy-install.pth')
|
|
|
|
res.did_create(script.site_packages / 'LocalExtras.egg-link')
|
|
|
|
res.did_create(script.site_packages / 'simple')
|
2014-08-01 22:20:23 +02:00
|
|
|
|
|
|
|
|
2016-06-10 21:27:07 +02:00
|
|
|
def test_install_collected_dependencies_first(script):
|
2016-12-23 11:09:45 +01:00
|
|
|
result = script.pip_install_local(
|
|
|
|
'toporequires2',
|
2014-10-02 09:17:27 +02:00
|
|
|
)
|
|
|
|
text = [line for line in result.stdout.split('\n')
|
|
|
|
if 'Installing' in line][0]
|
2016-12-23 11:09:45 +01:00
|
|
|
assert text.endswith('toporequires2')
|
2014-10-02 09:17:27 +02:00
|
|
|
|
|
|
|
|
2015-01-15 00:53:15 +01:00
|
|
|
@pytest.mark.network
|
2013-09-27 20:42:05 +02:00
|
|
|
def test_install_local_editable_with_subdirectory(script):
|
|
|
|
version_pkg_path = _create_test_package_with_subdirectory(script,
|
2014-03-08 19:33:05 +01:00
|
|
|
'version_subdir')
|
2014-01-28 15:17:51 +01:00
|
|
|
result = script.pip(
|
|
|
|
'install', '-e',
|
2020-02-29 20:53:59 +01:00
|
|
|
'{uri}#egg=version_subpkg&subdirectory=version_subdir'.format(
|
|
|
|
uri='git+' + path_to_url(version_pkg_path),
|
|
|
|
),
|
2014-01-28 15:17:51 +01:00
|
|
|
)
|
2012-11-14 06:29:58 +01:00
|
|
|
|
2014-03-08 19:33:05 +01:00
|
|
|
result.assert_installed('version-subpkg', sub_dir='version_subdir')
|
2015-04-09 06:13:54 +02:00
|
|
|
|
2015-04-18 21:48:25 +02:00
|
|
|
|
2016-02-08 14:04:10 +01:00
|
|
|
@pytest.mark.network
|
|
|
|
def test_install_local_with_subdirectory(script):
|
|
|
|
version_pkg_path = _create_test_package_with_subdirectory(script,
|
|
|
|
'version_subdir')
|
|
|
|
result = script.pip(
|
|
|
|
'install',
|
2020-02-29 20:53:59 +01:00
|
|
|
'{uri}#egg=version_subpkg&subdirectory=version_subdir'.format(
|
|
|
|
uri='git+' + path_to_url(version_pkg_path),
|
|
|
|
),
|
2016-02-08 14:04:10 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
result.assert_installed('version_subpkg.py', editable=False)
|
|
|
|
|
|
|
|
|
2019-11-05 16:35:08 +01:00
|
|
|
@pytest.mark.incompatible_with_test_venv
|
2017-05-14 00:23:17 +02:00
|
|
|
def test_wheel_user_with_prefix_in_pydistutils_cfg(
|
2018-10-08 18:09:53 +02:00
|
|
|
script, data, with_wheel):
|
2017-10-06 21:51:42 +02:00
|
|
|
if os.name == 'posix':
|
|
|
|
user_filename = ".pydistutils.cfg"
|
|
|
|
else:
|
|
|
|
user_filename = "pydistutils.cfg"
|
|
|
|
user_cfg = os.path.join(os.path.expanduser('~'), user_filename)
|
2019-07-02 07:00:32 +02:00
|
|
|
script.scratch_path.joinpath("bin").mkdir()
|
2017-10-06 21:51:42 +02:00
|
|
|
with open(user_cfg, "w") as cfg:
|
2021-02-13 07:27:17 +01:00
|
|
|
cfg.write(textwrap.dedent(f"""
|
2015-06-26 04:23:50 +02:00
|
|
|
[install]
|
2021-02-13 07:27:17 +01:00
|
|
|
prefix={script.scratch_path}"""))
|
2015-06-26 04:23:50 +02:00
|
|
|
|
2017-05-14 00:23:17 +02:00
|
|
|
result = script.pip(
|
|
|
|
'install', '--user', '--no-index',
|
2018-10-08 18:09:53 +02:00
|
|
|
'-f', data.find_links,
|
2017-05-14 00:23:17 +02:00
|
|
|
'requiresupper')
|
2015-12-17 00:08:50 +01:00
|
|
|
# Check that we are really installing a wheel
|
|
|
|
assert 'Running setup.py install for requiresupper' not in result.stdout
|
2015-06-26 04:23:50 +02:00
|
|
|
assert 'installed requiresupper' in result.stdout
|
|
|
|
|
|
|
|
|
2020-07-08 00:19:16 +02:00
|
|
|
def test_install_option_in_requirements_file_overrides_cli(
|
|
|
|
script, arg_recording_sdist_maker
|
|
|
|
):
|
|
|
|
simple_sdist = arg_recording_sdist_maker("simple")
|
2015-04-18 21:48:25 +02:00
|
|
|
|
2020-07-08 00:19:16 +02:00
|
|
|
reqs_file = script.scratch_path.joinpath("reqs.txt")
|
|
|
|
reqs_file.write_text("simple --install-option='-O0'")
|
2015-04-18 21:48:25 +02:00
|
|
|
|
2020-07-08 00:19:16 +02:00
|
|
|
script.pip(
|
|
|
|
'install', '--no-index', '-f', str(simple_sdist.sdist_path.parent),
|
|
|
|
'-r', str(reqs_file), '--install-option=-O1',
|
|
|
|
)
|
|
|
|
simple_args = simple_sdist.args()
|
|
|
|
assert 'install' in simple_args
|
|
|
|
assert simple_args.index('-O1') < simple_args.index('-O0')
|
2015-06-02 05:39:10 +02:00
|
|
|
|
|
|
|
|
|
|
|
def test_constraints_not_installed_by_default(script, data):
|
2019-07-02 07:00:32 +02:00
|
|
|
script.scratch_path.joinpath("c.txt").write_text("requiresupper")
|
2015-06-02 05:39:10 +02:00
|
|
|
result = script.pip(
|
|
|
|
'install', '--no-index', '-f', data.find_links, '-c',
|
|
|
|
script.scratch_path / 'c.txt', 'Upper')
|
|
|
|
assert 'requiresupper' not in result.stdout
|
|
|
|
|
|
|
|
|
|
|
|
def test_constraints_only_causes_error(script, data):
|
2019-07-02 07:00:32 +02:00
|
|
|
script.scratch_path.joinpath("c.txt").write_text("requiresupper")
|
2015-06-02 05:39:10 +02:00
|
|
|
result = script.pip(
|
|
|
|
'install', '--no-index', '-f', data.find_links, '-c',
|
|
|
|
script.scratch_path / 'c.txt', expect_error=True)
|
|
|
|
assert 'installed requiresupper' not in result.stdout
|
2015-06-29 07:40:57 +02:00
|
|
|
|
|
|
|
|
2020-11-03 18:30:12 +01:00
|
|
|
def test_constraints_local_editable_install_causes_error(
|
|
|
|
script,
|
|
|
|
data,
|
|
|
|
resolver_variant,
|
|
|
|
):
|
2019-07-02 07:00:32 +02:00
|
|
|
script.scratch_path.joinpath("constraints.txt").write_text(
|
2015-06-29 07:40:57 +02:00
|
|
|
"singlemodule==0.0.0"
|
|
|
|
)
|
2019-07-02 07:00:32 +02:00
|
|
|
to_install = data.src.joinpath("singlemodule")
|
2015-06-29 07:40:57 +02:00
|
|
|
result = script.pip(
|
|
|
|
'install', '--no-index', '-f', data.find_links, '-c',
|
|
|
|
script.scratch_path / 'constraints.txt', '-e',
|
|
|
|
to_install, expect_error=True)
|
2020-11-03 18:30:12 +01:00
|
|
|
if resolver_variant == "legacy-resolver":
|
|
|
|
assert 'Could not satisfy constraints' in result.stderr, str(result)
|
|
|
|
else:
|
|
|
|
# Because singlemodule only has 0.0.1 available.
|
2020-12-15 13:36:45 +01:00
|
|
|
assert 'Cannot install singlemodule 0.0.1' in result.stderr, str(result)
|
2015-06-29 07:40:57 +02:00
|
|
|
|
|
|
|
|
Mark 3 tests as network tests
________________ test_constraints_local_editable_install_pep518 ________________
...
----------------------------- Captured stdout call -----------------------------
Script result: python -m pip download setuptools wheel -d /tmp/pytest-of-mockbuild/pytest-0/test_constraints_local_editabl0/data/packages
return code: 1
-- stderr: --------------------
Retrying (Retry(total=4, connect=None, read=None, redirect=None, status=None)) after connection broken by 'NewConnectionError('<pip._vendor.urllib3.connection.VerifiedHTTPSConnection object at 0x7fba7fdeb160>: Failed to establish a new connection: [Errno -2] Name or service not known')': /simple/setuptools/
Retrying (Retry(total=3, connect=None, read=None, redirect=None, status=None)) after connection broken by 'NewConnectionError('<pip._vendor.urllib3.connection.VerifiedHTTPSConnection object at 0x7fba7fdeb0b8>: Failed to establish a new connection: [Errno -2] Name or service not known')': /simple/setuptools/
Retrying (Retry(total=2, connect=None, read=None, redirect=None, status=None)) after connection broken by 'NewConnectionError('<pip._vendor.urllib3.connection.VerifiedHTTPSConnection object at 0x7fba7fdeb4e0>: Failed to establish a new connection: [Errno -2] Name or service not known')': /simple/setuptools/
Retrying (Retry(total=1, connect=None, read=None, redirect=None, status=None)) after connection broken by 'NewConnectionError('<pip._vendor.urllib3.connection.VerifiedHTTPSConnection object at 0x7fba7fdeb6d8>: Failed to establish a new connection: [Errno -2] Name or service not known')': /simple/setuptools/
Retrying (Retry(total=0, connect=None, read=None, redirect=None, status=None)) after connection broken by 'NewConnectionError('<pip._vendor.urllib3.connection.VerifiedHTTPSConnection object at 0x7fba7fdeb860>: Failed to establish a new connection: [Errno -2] Name or service not known')': /simple/setuptools/
Could not find a version that satisfies the requirement setuptools (from versions: )
No matching distribution found for setuptools
_____________ test_pep517_wheels_are_not_confused_with_other_files _____________
...
-- stdout: --------------------
Processing /tmp/pytest-of-mockbuild/pytest-0/test_pep517_wheels_are_not_con0/data/src/withpyproject
Installing build dependencies: started
Installing build dependencies: finished with status 'error'
Complete output from command /tmp/pytest-of-mockbuild/pytest-0/test_pep517_wheels_are_not_con0/workspace/venv/bin/python /builddir/build/BUILDROOT/python-pip-19.0.2-1.fc30.x86_64/usr/lib/python3.7/site-packages/pip install --ignore-installed --no-user --prefix /tmp/pytest-of-mockbuild/pytest-0/test_pep517_wheels_are_not_con0/workspace/tmp/pip-build-env-3un7dqu3/overlay --no-warn-script-location --no-binary :none: --only-binary :none: -i https://pypi.org/simple -- setuptools wheel:
Collecting setuptools
Retrying (Retry(total=4, connect=None, read=None, redirect=None, status=None)) after connection broken by 'NewConnectionError('<pip._vendor.urllib3.connection.VerifiedHTTPSConnection object at 0x7fb7cf5a8898>: Failed to establish a new connection: [Errno -2] Name or service not known')': /simple/setuptools/
Retrying (Retry(total=3, connect=None, read=None, redirect=None, status=None)) after connection broken by 'NewConnectionError('<pip._vendor.urllib3.connection.VerifiedHTTPSConnection object at 0x7fb7cf5b9588>: Failed to establish a new connection: [Errno -2] Name or service not known')': /simple/setuptools/
Retrying (Retry(total=2, connect=None, read=None, redirect=None, status=None)) after connection broken by 'NewConnectionError('<pip._vendor.urllib3.connection.VerifiedHTTPSConnection object at 0x7fb7cf5b9470>: Failed to establish a new connection: [Errno -2] Name or service not known')': /simple/setuptools/
Retrying (Retry(total=1, connect=None, read=None, redirect=None, status=None)) after connection broken by 'NewConnectionError('<pip._vendor.urllib3.connection.VerifiedHTTPSConnection object at 0x7fb7cf5b9278>: Failed to establish a new connection: [Errno -2] Name or service not known')': /simple/setuptools/
Retrying (Retry(total=0, connect=None, read=None, redirect=None, status=None)) after connection broken by 'NewConnectionError('<pip._vendor.urllib3.connection.VerifiedHTTPSConnection object at 0x7fb7cf5b9208>: Failed to establish a new connection: [Errno -2] Name or service not known')': /simple/setuptools/
Could not find a version that satisfies the requirement setuptools (from versions: )
No matching distribution found for setuptools
________________________ test_upgrade_argparse_shadowed ________________________
...
----------------------------- Captured stdout call -----------------------------
Script result: python -m pip install argparse==1.3
return code: 1
-- stderr: --------------------
Retrying (Retry(total=4, connect=None, read=None, redirect=None, status=None)) after connection broken by 'NewConnectionError('<pip._vendor.urllib3.connection.VerifiedHTTPSConnection object at 0x7ff07d6db240>: Failed to establish a new connection: [Errno -2] Name or service not known')': /simple/argparse/
Retrying (Retry(total=3, connect=None, read=None, redirect=None, status=None)) after connection broken by 'NewConnectionError('<pip._vendor.urllib3.connection.VerifiedHTTPSConnection object at 0x7ff07d6db358>: Failed to establish a new connection: [Errno -2] Name or service not known')': /simple/argparse/
Retrying (Retry(total=2, connect=None, read=None, redirect=None, status=None)) after connection broken by 'NewConnectionError('<pip._vendor.urllib3.connection.VerifiedHTTPSConnection object at 0x7ff07d6db400>: Failed to establish a new connection: [Errno -2] Name or service not known')': /simple/argparse/
Retrying (Retry(total=1, connect=None, read=None, redirect=None, status=None)) after connection broken by 'NewConnectionError('<pip._vendor.urllib3.connection.VerifiedHTTPSConnection object at 0x7ff07d6db518>: Failed to establish a new connection: [Errno -2] Name or service not known')': /simple/argparse/
Retrying (Retry(total=0, connect=None, read=None, redirect=None, status=None)) after connection broken by 'NewConnectionError('<pip._vendor.urllib3.connection.VerifiedHTTPSConnection object at 0x7ff07d6c7710>: Failed to establish a new connection: [Errno -2] Name or service not known')': /simple/argparse/
Could not find a version that satisfies the requirement argparse==1.3 (from versions: )
No matching distribution found for argparse==1.3
2019-02-13 12:18:34 +01:00
|
|
|
@pytest.mark.network
|
2018-01-28 23:44:38 +01:00
|
|
|
def test_constraints_local_editable_install_pep518(script, data):
|
2019-07-02 07:00:32 +02:00
|
|
|
to_install = data.src.joinpath("pep518-3.0")
|
2018-01-29 00:10:56 +01:00
|
|
|
|
|
|
|
script.pip('download', 'setuptools', 'wheel', '-d', data.packages)
|
2018-03-01 19:41:32 +01:00
|
|
|
script.pip(
|
2019-04-27 05:19:05 +02:00
|
|
|
'install', '--no-index', '-f', data.find_links, '-e', to_install)
|
2018-01-28 23:44:38 +01:00
|
|
|
|
|
|
|
|
2020-11-03 18:30:12 +01:00
|
|
|
def test_constraints_local_install_causes_error(
|
|
|
|
script,
|
|
|
|
data,
|
|
|
|
resolver_variant,
|
|
|
|
):
|
2019-07-02 07:00:32 +02:00
|
|
|
script.scratch_path.joinpath("constraints.txt").write_text(
|
2015-06-29 07:40:57 +02:00
|
|
|
"singlemodule==0.0.0"
|
|
|
|
)
|
2019-07-02 07:00:32 +02:00
|
|
|
to_install = data.src.joinpath("singlemodule")
|
2015-06-29 07:40:57 +02:00
|
|
|
result = script.pip(
|
|
|
|
'install', '--no-index', '-f', data.find_links, '-c',
|
|
|
|
script.scratch_path / 'constraints.txt',
|
|
|
|
to_install, expect_error=True)
|
2020-11-03 18:30:12 +01:00
|
|
|
if resolver_variant == "legacy-resolver":
|
|
|
|
assert 'Could not satisfy constraints' in result.stderr, str(result)
|
|
|
|
else:
|
|
|
|
# Because singlemodule only has 0.0.1 available.
|
2020-12-15 13:36:45 +01:00
|
|
|
assert 'Cannot install singlemodule 0.0.1' in result.stderr, str(result)
|
2015-06-29 07:40:57 +02:00
|
|
|
|
|
|
|
|
2020-06-11 12:41:07 +02:00
|
|
|
def test_constraints_constrain_to_local_editable(
|
|
|
|
script,
|
|
|
|
data,
|
2020-10-30 01:28:02 +01:00
|
|
|
resolver_variant,
|
2020-06-11 12:41:07 +02:00
|
|
|
):
|
2019-07-02 07:00:32 +02:00
|
|
|
to_install = data.src.joinpath("singlemodule")
|
|
|
|
script.scratch_path.joinpath("constraints.txt").write_text(
|
2020-02-29 20:53:59 +01:00
|
|
|
"-e {url}#egg=singlemodule".format(url=path_to_url(to_install))
|
2015-06-29 07:40:57 +02:00
|
|
|
)
|
|
|
|
result = script.pip(
|
|
|
|
'install', '--no-index', '-f', data.find_links, '-c',
|
2020-06-11 12:41:07 +02:00
|
|
|
script.scratch_path / 'constraints.txt', 'singlemodule',
|
2020-07-04 21:31:02 +02:00
|
|
|
allow_stderr_warning=True,
|
2020-10-30 01:28:02 +01:00
|
|
|
expect_error=(resolver_variant == "2020-resolver"),
|
2020-06-11 12:41:07 +02:00
|
|
|
)
|
2020-10-30 01:28:02 +01:00
|
|
|
if resolver_variant == "2020-resolver":
|
2021-01-18 05:01:24 +01:00
|
|
|
assert 'Editable requirements are not allowed as constraints' in result.stderr
|
2020-06-11 12:41:07 +02:00
|
|
|
else:
|
|
|
|
assert 'Running setup.py develop for singlemodule' in result.stdout
|
2015-06-29 07:40:57 +02:00
|
|
|
|
|
|
|
|
2020-10-30 01:28:02 +01:00
|
|
|
def test_constraints_constrain_to_local(script, data, resolver_variant):
|
2019-07-02 07:00:32 +02:00
|
|
|
to_install = data.src.joinpath("singlemodule")
|
|
|
|
script.scratch_path.joinpath("constraints.txt").write_text(
|
2020-02-29 20:53:59 +01:00
|
|
|
"{url}#egg=singlemodule".format(url=path_to_url(to_install))
|
2015-06-29 07:40:57 +02:00
|
|
|
)
|
|
|
|
result = script.pip(
|
|
|
|
'install', '--no-index', '-f', data.find_links, '-c',
|
2020-06-11 12:41:07 +02:00
|
|
|
script.scratch_path / 'constraints.txt', 'singlemodule',
|
2020-07-04 21:31:02 +02:00
|
|
|
allow_stderr_warning=True,
|
2020-06-11 12:41:07 +02:00
|
|
|
)
|
2021-01-18 05:01:24 +01:00
|
|
|
assert 'Running setup.py install for singlemodule' in result.stdout
|
2015-06-29 07:40:57 +02:00
|
|
|
|
|
|
|
|
2021-06-18 21:20:16 +02:00
|
|
|
def test_constrained_to_url_install_same_url(script, data):
|
2019-07-02 07:00:32 +02:00
|
|
|
to_install = data.src.joinpath("singlemodule")
|
2017-10-06 21:51:42 +02:00
|
|
|
constraints = path_to_url(to_install) + "#egg=singlemodule"
|
2019-07-02 07:00:32 +02:00
|
|
|
script.scratch_path.joinpath("constraints.txt").write_text(constraints)
|
2015-06-29 07:40:57 +02:00
|
|
|
result = script.pip(
|
|
|
|
'install', '--no-index', '-f', data.find_links, '-c',
|
2020-06-11 12:41:07 +02:00
|
|
|
script.scratch_path / 'constraints.txt', to_install,
|
2020-07-04 21:31:02 +02:00
|
|
|
allow_stderr_warning=True,
|
2020-06-11 12:41:07 +02:00
|
|
|
)
|
2021-06-18 21:20:16 +02:00
|
|
|
assert 'Running setup.py install for singlemodule' in result.stdout, str(result)
|
Fix false hash mismatches when installing a package that has a cached wheel.
This would occur when, for example, installing from a requirements file that references a certain hashed sdist, a common situation.
As of pip 7, pip always tries to build a wheel for each requirement (if one wasn't provided directly) and installs from that. The way this was implemented, InstallRequirement.link pointed to the cached wheel, which obviously had a different hash than the index-sourced archive, so spurious mismatch errors would result.
Now we no longer read from the wheel cache in hash-checking mode.
Make populate_link(), rather than the `link` setter, responsible for mapping InstallRequirement.link to a cached wheel. populate_link() isn't called until until prepare_files(). At that point, when we've examined all InstallRequirements and their potential --hash options, we know whether we should be requiring hashes and thus whether to use the wheel cache at all.
The only place that sets InstallRequirement.link other than InstallRequirement itself is pip.wheel, which does so long after hashes have been checked, when it's unpacking the wheel it just built, so it won't cause spurious hash mismatches.
2015-10-16 21:58:59 +02:00
|
|
|
|
|
|
|
|
2017-05-14 17:41:03 +02:00
|
|
|
def test_double_install_spurious_hash_mismatch(
|
2018-10-08 18:09:53 +02:00
|
|
|
script, tmpdir, data, with_wheel):
|
Fix false hash mismatches when installing a package that has a cached wheel.
This would occur when, for example, installing from a requirements file that references a certain hashed sdist, a common situation.
As of pip 7, pip always tries to build a wheel for each requirement (if one wasn't provided directly) and installs from that. The way this was implemented, InstallRequirement.link pointed to the cached wheel, which obviously had a different hash than the index-sourced archive, so spurious mismatch errors would result.
Now we no longer read from the wheel cache in hash-checking mode.
Make populate_link(), rather than the `link` setter, responsible for mapping InstallRequirement.link to a cached wheel. populate_link() isn't called until until prepare_files(). At that point, when we've examined all InstallRequirements and their potential --hash options, we know whether we should be requiring hashes and thus whether to use the wheel cache at all.
The only place that sets InstallRequirement.link other than InstallRequirement itself is pip.wheel, which does so long after hashes have been checked, when it's unpacking the wheel it just built, so it won't cause spurious hash mismatches.
2015-10-16 21:58:59 +02:00
|
|
|
"""Make sure installing the same hashed sdist twice doesn't throw hash
|
|
|
|
mismatch errors.
|
|
|
|
|
|
|
|
Really, this is a test that we disable reads from the wheel cache in
|
|
|
|
hash-checking mode. Locally, implicitly built wheels of sdists obviously
|
|
|
|
have different hashes from the original archives. Comparing against those
|
|
|
|
causes spurious mismatch errors.
|
|
|
|
|
|
|
|
"""
|
2017-05-14 00:23:17 +02:00
|
|
|
# Install wheel package, otherwise, it won't try to build wheels.
|
Fix false hash mismatches when installing a package that has a cached wheel.
This would occur when, for example, installing from a requirements file that references a certain hashed sdist, a common situation.
As of pip 7, pip always tries to build a wheel for each requirement (if one wasn't provided directly) and installs from that. The way this was implemented, InstallRequirement.link pointed to the cached wheel, which obviously had a different hash than the index-sourced archive, so spurious mismatch errors would result.
Now we no longer read from the wheel cache in hash-checking mode.
Make populate_link(), rather than the `link` setter, responsible for mapping InstallRequirement.link to a cached wheel. populate_link() isn't called until until prepare_files(). At that point, when we've examined all InstallRequirements and their potential --hash options, we know whether we should be requiring hashes and thus whether to use the wheel cache at all.
The only place that sets InstallRequirement.link other than InstallRequirement itself is pip.wheel, which does so long after hashes have been checked, when it's unpacking the wheel it just built, so it won't cause spurious hash mismatches.
2015-10-16 21:58:59 +02:00
|
|
|
with requirements_file('simple==1.0 --hash=sha256:393043e672415891885c9a2a'
|
|
|
|
'0929b1af95fb866d6ca016b42d2e6ce53619b653',
|
|
|
|
tmpdir) as reqs_file:
|
|
|
|
# Install a package (and build its wheel):
|
2017-05-14 00:10:34 +02:00
|
|
|
result = script.pip_install_local(
|
2018-10-08 18:09:53 +02:00
|
|
|
'--find-links', data.find_links,
|
2020-01-02 23:21:22 +01:00
|
|
|
'-r', reqs_file.resolve(),
|
|
|
|
)
|
Fix false hash mismatches when installing a package that has a cached wheel.
This would occur when, for example, installing from a requirements file that references a certain hashed sdist, a common situation.
As of pip 7, pip always tries to build a wheel for each requirement (if one wasn't provided directly) and installs from that. The way this was implemented, InstallRequirement.link pointed to the cached wheel, which obviously had a different hash than the index-sourced archive, so spurious mismatch errors would result.
Now we no longer read from the wheel cache in hash-checking mode.
Make populate_link(), rather than the `link` setter, responsible for mapping InstallRequirement.link to a cached wheel. populate_link() isn't called until until prepare_files(). At that point, when we've examined all InstallRequirements and their potential --hash options, we know whether we should be requiring hashes and thus whether to use the wheel cache at all.
The only place that sets InstallRequirement.link other than InstallRequirement itself is pip.wheel, which does so long after hashes have been checked, when it's unpacking the wheel it just built, so it won't cause spurious hash mismatches.
2015-10-16 21:58:59 +02:00
|
|
|
assert 'Successfully installed simple-1.0' in str(result)
|
|
|
|
|
|
|
|
# Uninstall it:
|
2020-01-02 23:21:22 +01:00
|
|
|
script.pip('uninstall', '-y', 'simple')
|
Fix false hash mismatches when installing a package that has a cached wheel.
This would occur when, for example, installing from a requirements file that references a certain hashed sdist, a common situation.
As of pip 7, pip always tries to build a wheel for each requirement (if one wasn't provided directly) and installs from that. The way this was implemented, InstallRequirement.link pointed to the cached wheel, which obviously had a different hash than the index-sourced archive, so spurious mismatch errors would result.
Now we no longer read from the wheel cache in hash-checking mode.
Make populate_link(), rather than the `link` setter, responsible for mapping InstallRequirement.link to a cached wheel. populate_link() isn't called until until prepare_files(). At that point, when we've examined all InstallRequirements and their potential --hash options, we know whether we should be requiring hashes and thus whether to use the wheel cache at all.
The only place that sets InstallRequirement.link other than InstallRequirement itself is pip.wheel, which does so long after hashes have been checked, when it's unpacking the wheel it just built, so it won't cause spurious hash mismatches.
2015-10-16 21:58:59 +02:00
|
|
|
|
|
|
|
# Then install it again. We should not hit a hash mismatch, and the
|
|
|
|
# package should install happily.
|
2017-05-14 00:10:34 +02:00
|
|
|
result = script.pip_install_local(
|
2018-10-08 18:09:53 +02:00
|
|
|
'--find-links', data.find_links,
|
2020-01-02 23:21:22 +01:00
|
|
|
'-r', reqs_file.resolve(),
|
|
|
|
)
|
Fix false hash mismatches when installing a package that has a cached wheel.
This would occur when, for example, installing from a requirements file that references a certain hashed sdist, a common situation.
As of pip 7, pip always tries to build a wheel for each requirement (if one wasn't provided directly) and installs from that. The way this was implemented, InstallRequirement.link pointed to the cached wheel, which obviously had a different hash than the index-sourced archive, so spurious mismatch errors would result.
Now we no longer read from the wheel cache in hash-checking mode.
Make populate_link(), rather than the `link` setter, responsible for mapping InstallRequirement.link to a cached wheel. populate_link() isn't called until until prepare_files(). At that point, when we've examined all InstallRequirements and their potential --hash options, we know whether we should be requiring hashes and thus whether to use the wheel cache at all.
The only place that sets InstallRequirement.link other than InstallRequirement itself is pip.wheel, which does so long after hashes have been checked, when it's unpacking the wheel it just built, so it won't cause spurious hash mismatches.
2015-10-16 21:58:59 +02:00
|
|
|
assert 'Successfully installed simple-1.0' in str(result)
|
2015-08-24 07:03:01 +02:00
|
|
|
|
|
|
|
|
2020-10-30 01:28:02 +01:00
|
|
|
def test_install_with_extras_from_constraints(script, data, resolver_variant):
|
2019-07-02 07:00:32 +02:00
|
|
|
to_install = data.packages.joinpath("LocalExtras")
|
|
|
|
script.scratch_path.joinpath("constraints.txt").write_text(
|
2020-02-29 20:53:59 +01:00
|
|
|
"{url}#egg=LocalExtras[bar]".format(url=path_to_url(to_install))
|
2015-08-24 07:03:01 +02:00
|
|
|
)
|
|
|
|
result = script.pip_install_local(
|
2020-06-11 12:41:07 +02:00
|
|
|
'-c', script.scratch_path / 'constraints.txt', 'LocalExtras',
|
2020-07-04 21:31:02 +02:00
|
|
|
allow_stderr_warning=True,
|
2020-10-30 01:28:02 +01:00
|
|
|
expect_error=(resolver_variant == "2020-resolver"),
|
2020-06-11 12:41:07 +02:00
|
|
|
)
|
2020-10-30 01:28:02 +01:00
|
|
|
if resolver_variant == "2020-resolver":
|
2021-01-18 05:01:24 +01:00
|
|
|
assert 'Constraints cannot have extras' in result.stderr
|
2020-06-11 12:41:07 +02:00
|
|
|
else:
|
|
|
|
result.did_create(script.site_packages / 'simple')
|
2015-08-24 07:03:01 +02:00
|
|
|
|
|
|
|
|
2020-06-02 17:03:33 +02:00
|
|
|
def test_install_with_extras_from_install(script):
|
|
|
|
create_basic_wheel_for_package(
|
|
|
|
script,
|
|
|
|
name="LocalExtras",
|
|
|
|
version="0.0.1",
|
|
|
|
extras={"bar": "simple", "baz": ["singlemodule"]},
|
2015-08-24 07:03:01 +02:00
|
|
|
)
|
2020-06-02 17:03:33 +02:00
|
|
|
script.scratch_path.joinpath("constraints.txt").write_text("LocalExtras")
|
2015-08-24 07:03:01 +02:00
|
|
|
result = script.pip_install_local(
|
2020-06-02 17:03:33 +02:00
|
|
|
'--find-links', script.scratch_path,
|
|
|
|
'-c', script.scratch_path / 'constraints.txt',
|
|
|
|
'LocalExtras[baz]',
|
|
|
|
)
|
2020-05-26 16:43:28 +02:00
|
|
|
result.did_create(script.site_packages / 'singlemodule.py')
|
2015-08-24 07:03:01 +02:00
|
|
|
|
|
|
|
|
2020-10-30 01:28:02 +01:00
|
|
|
def test_install_with_extras_joined(script, data, resolver_variant):
|
2019-07-02 07:00:32 +02:00
|
|
|
to_install = data.packages.joinpath("LocalExtras")
|
|
|
|
script.scratch_path.joinpath("constraints.txt").write_text(
|
2020-02-29 20:53:59 +01:00
|
|
|
"{url}#egg=LocalExtras[bar]".format(url=path_to_url(to_install))
|
2015-08-24 07:03:01 +02:00
|
|
|
)
|
|
|
|
result = script.pip_install_local(
|
2020-06-11 12:41:07 +02:00
|
|
|
'-c', script.scratch_path / 'constraints.txt', 'LocalExtras[baz]',
|
2020-07-04 21:31:02 +02:00
|
|
|
allow_stderr_warning=True,
|
2020-10-30 01:28:02 +01:00
|
|
|
expect_error=(resolver_variant == "2020-resolver"),
|
2015-08-24 07:03:01 +02:00
|
|
|
)
|
2020-10-30 01:28:02 +01:00
|
|
|
if resolver_variant == "2020-resolver":
|
2021-01-18 05:01:24 +01:00
|
|
|
assert 'Constraints cannot have extras' in result.stderr
|
2020-06-11 12:41:07 +02:00
|
|
|
else:
|
|
|
|
result.did_create(script.site_packages / 'simple')
|
|
|
|
result.did_create(script.site_packages / 'singlemodule.py')
|
2015-08-24 07:03:01 +02:00
|
|
|
|
|
|
|
|
2020-10-30 01:28:02 +01:00
|
|
|
def test_install_with_extras_editable_joined(script, data, resolver_variant):
|
2019-07-02 07:00:32 +02:00
|
|
|
to_install = data.packages.joinpath("LocalExtras")
|
|
|
|
script.scratch_path.joinpath("constraints.txt").write_text(
|
2020-02-29 20:53:59 +01:00
|
|
|
"-e {url}#egg=LocalExtras[bar]".format(url=path_to_url(to_install))
|
2015-08-24 07:03:01 +02:00
|
|
|
)
|
|
|
|
result = script.pip_install_local(
|
2020-06-11 12:41:07 +02:00
|
|
|
'-c', script.scratch_path / 'constraints.txt', 'LocalExtras[baz]',
|
2020-07-04 21:31:02 +02:00
|
|
|
allow_stderr_warning=True,
|
2020-10-30 01:28:02 +01:00
|
|
|
expect_error=(resolver_variant == "2020-resolver"),
|
2020-06-11 12:41:07 +02:00
|
|
|
)
|
2020-10-30 01:28:02 +01:00
|
|
|
if resolver_variant == "2020-resolver":
|
2021-01-18 05:01:24 +01:00
|
|
|
assert 'Editable requirements are not allowed as constraints' in result.stderr
|
2020-06-11 12:41:07 +02:00
|
|
|
else:
|
|
|
|
result.did_create(script.site_packages / 'simple')
|
|
|
|
result.did_create(script.site_packages / 'singlemodule.py')
|
2015-08-24 07:03:01 +02:00
|
|
|
|
|
|
|
|
|
|
|
def test_install_distribution_full_union(script, data):
|
2019-07-02 07:00:32 +02:00
|
|
|
to_install = data.packages.joinpath("LocalExtras")
|
2015-08-24 07:03:01 +02:00
|
|
|
result = script.pip_install_local(
|
|
|
|
to_install, to_install + "[bar]", to_install + "[baz]")
|
|
|
|
assert 'Running setup.py install for LocalExtras' in result.stdout
|
2020-05-26 16:43:28 +02:00
|
|
|
result.did_create(script.site_packages / 'simple')
|
|
|
|
result.did_create(script.site_packages / 'singlemodule.py')
|
2015-08-24 07:03:01 +02:00
|
|
|
|
|
|
|
|
|
|
|
def test_install_distribution_duplicate_extras(script, data):
|
2019-07-02 07:00:32 +02:00
|
|
|
to_install = data.packages.joinpath("LocalExtras")
|
2015-08-24 07:03:01 +02:00
|
|
|
package_name = to_install + "[bar]"
|
|
|
|
with pytest.raises(AssertionError):
|
|
|
|
result = script.pip_install_local(package_name, package_name)
|
2021-02-13 07:27:17 +01:00
|
|
|
expected = (f'Double requirement given: {package_name}')
|
2020-02-29 20:53:59 +01:00
|
|
|
assert expected in result.stderr
|
2015-08-24 07:03:01 +02:00
|
|
|
|
|
|
|
|
2020-06-11 12:41:07 +02:00
|
|
|
def test_install_distribution_union_with_constraints(
|
|
|
|
script,
|
|
|
|
data,
|
2020-10-30 01:28:02 +01:00
|
|
|
resolver_variant,
|
2020-06-11 12:41:07 +02:00
|
|
|
):
|
2019-07-02 07:00:32 +02:00
|
|
|
to_install = data.packages.joinpath("LocalExtras")
|
|
|
|
script.scratch_path.joinpath("constraints.txt").write_text(
|
2021-02-13 07:27:17 +01:00
|
|
|
f"{to_install}[bar]")
|
2015-08-24 07:03:01 +02:00
|
|
|
result = script.pip_install_local(
|
2020-06-11 12:41:07 +02:00
|
|
|
'-c', script.scratch_path / 'constraints.txt', to_install + '[baz]',
|
2020-07-04 21:31:02 +02:00
|
|
|
allow_stderr_warning=True,
|
2020-10-30 01:28:02 +01:00
|
|
|
expect_error=(resolver_variant == "2020-resolver"),
|
2020-06-11 12:41:07 +02:00
|
|
|
)
|
2020-10-30 01:28:02 +01:00
|
|
|
if resolver_variant == "2020-resolver":
|
2020-06-11 12:41:07 +02:00
|
|
|
msg = 'Unnamed requirements are not allowed as constraints'
|
|
|
|
assert msg in result.stderr
|
|
|
|
else:
|
|
|
|
assert 'Running setup.py install for LocalExtras' in result.stdout
|
|
|
|
result.did_create(script.site_packages / 'singlemodule.py')
|
2015-08-24 07:03:01 +02:00
|
|
|
|
|
|
|
|
2020-06-23 14:45:41 +02:00
|
|
|
def test_install_distribution_union_with_versions(
|
|
|
|
script,
|
|
|
|
data,
|
2020-10-30 01:28:02 +01:00
|
|
|
resolver_variant,
|
2020-06-23 14:45:41 +02:00
|
|
|
):
|
2019-07-02 07:00:32 +02:00
|
|
|
to_install_001 = data.packages.joinpath("LocalExtras")
|
|
|
|
to_install_002 = data.packages.joinpath("LocalExtras-0.0.2")
|
2015-08-24 07:03:01 +02:00
|
|
|
result = script.pip_install_local(
|
2020-06-23 14:45:41 +02:00
|
|
|
to_install_001 + "[bar]",
|
|
|
|
to_install_002 + "[baz]",
|
2020-10-30 01:28:02 +01:00
|
|
|
expect_error=(resolver_variant == "2020-resolver"),
|
2020-06-23 14:45:41 +02:00
|
|
|
)
|
2020-10-30 01:28:02 +01:00
|
|
|
if resolver_variant == "2020-resolver":
|
2020-10-30 18:27:27 +01:00
|
|
|
assert "Cannot install localextras[bar]" in result.stderr
|
2020-06-23 14:45:41 +02:00
|
|
|
assert (
|
|
|
|
"localextras[bar] 0.0.1 depends on localextras 0.0.1"
|
2020-10-30 18:27:27 +01:00
|
|
|
) in result.stdout
|
2020-06-23 14:45:41 +02:00
|
|
|
assert (
|
|
|
|
"localextras[baz] 0.0.2 depends on localextras 0.0.2"
|
2020-10-30 18:27:27 +01:00
|
|
|
) in result.stdout
|
2020-06-23 14:45:41 +02:00
|
|
|
else:
|
|
|
|
assert (
|
|
|
|
"Successfully installed LocalExtras-0.0.1 simple-3.0 "
|
|
|
|
"singlemodule-0.0.1"
|
2020-10-30 18:27:27 +01:00
|
|
|
) in result.stdout
|
2015-08-24 07:03:01 +02:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.xfail
|
|
|
|
def test_install_distribution_union_conflicting_extras(script, data):
|
|
|
|
# LocalExtras requires simple==1.0, LocalExtras[bar] requires simple==2.0;
|
|
|
|
# without a resolver, pip does not detect the conflict between simple==1.0
|
|
|
|
# and simple==2.0. Once a resolver is added, this conflict should be
|
|
|
|
# detected.
|
2019-07-02 07:00:32 +02:00
|
|
|
to_install = data.packages.joinpath("LocalExtras-0.0.2")
|
2015-08-24 07:03:01 +02:00
|
|
|
result = script.pip_install_local(to_install, to_install + "[bar]",
|
|
|
|
expect_error=True)
|
|
|
|
assert 'installed' not in result.stdout
|
|
|
|
assert "Conflict" in result.stderr
|
2016-03-10 03:55:20 +01:00
|
|
|
|
|
|
|
|
2017-10-02 19:33:52 +02:00
|
|
|
def test_install_unsupported_wheel_link_with_marker(script):
|
2019-07-02 07:00:32 +02:00
|
|
|
script.scratch_path.joinpath("with-marker.txt").write_text(
|
2016-03-10 03:55:20 +01:00
|
|
|
textwrap.dedent("""\
|
2020-02-29 20:53:59 +01:00
|
|
|
{url}; {req}
|
|
|
|
""").format(
|
|
|
|
url='https://github.com/a/b/c/asdf-1.5.2-cp27-none-xyz.whl',
|
|
|
|
req='sys_platform == "xyz"',
|
2016-03-10 03:55:20 +01:00
|
|
|
)
|
|
|
|
)
|
|
|
|
result = script.pip(
|
2020-01-02 23:21:22 +01:00
|
|
|
'install', '-r', script.scratch_path / 'with-marker.txt'
|
2016-03-10 03:55:20 +01:00
|
|
|
)
|
|
|
|
|
2016-11-02 13:28:17 +01:00
|
|
|
assert ("Ignoring asdf: markers 'sys_platform == \"xyz\"' don't match "
|
2017-11-20 16:55:28 +01:00
|
|
|
"your environment") in result.stdout
|
2016-03-10 03:55:20 +01:00
|
|
|
assert len(result.files_created) == 0
|
|
|
|
|
|
|
|
|
|
|
|
def test_install_unsupported_wheel_file(script, data):
|
|
|
|
# Trying to install a local wheel with an incompatible version/type
|
|
|
|
# should fail.
|
2020-02-29 20:53:59 +01:00
|
|
|
path = data.packages.joinpath("simple.dist-0.1-py1-none-invalid.whl")
|
2021-02-18 09:39:30 +01:00
|
|
|
script.scratch_path.joinpath("wheel-file.txt").write_text(path + '\n')
|
2016-03-10 03:55:20 +01:00
|
|
|
result = script.pip(
|
|
|
|
'install', '-r', script.scratch_path / 'wheel-file.txt',
|
|
|
|
expect_error=True,
|
|
|
|
expect_stderr=True,
|
|
|
|
)
|
|
|
|
assert ("simple.dist-0.1-py1-none-invalid.whl is not a supported " +
|
|
|
|
"wheel on this platform" in result.stderr)
|
|
|
|
assert len(result.files_created) == 0
|
2018-03-20 01:30:09 +01:00
|
|
|
|
|
|
|
|
2020-07-08 00:19:15 +02:00
|
|
|
def test_install_options_local_to_package(script, arg_recording_sdist_maker):
|
|
|
|
"""Make sure --install-options does not leak across packages.
|
|
|
|
|
|
|
|
A requirements.txt file can have per-package --install-options; these
|
|
|
|
should be isolated to just the package instead of leaking to subsequent
|
|
|
|
packages. This needs to be a functional test because the bug was around
|
|
|
|
cross-contamination at install time.
|
|
|
|
"""
|
|
|
|
|
|
|
|
simple1_sdist = arg_recording_sdist_maker("simple1")
|
|
|
|
simple2_sdist = arg_recording_sdist_maker("simple2")
|
|
|
|
|
|
|
|
reqs_file = script.scratch_path.joinpath("reqs.txt")
|
|
|
|
reqs_file.write_text(
|
|
|
|
textwrap.dedent(
|
|
|
|
"""
|
|
|
|
simple1 --install-option='-O0'
|
|
|
|
simple2
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
)
|
|
|
|
script.pip(
|
|
|
|
'install',
|
|
|
|
'--no-index', '-f', str(simple1_sdist.sdist_path.parent),
|
|
|
|
'-r', reqs_file,
|
|
|
|
)
|
|
|
|
|
|
|
|
simple1_args = simple1_sdist.args()
|
|
|
|
assert 'install' in simple1_args
|
|
|
|
assert '-O0' in simple1_args
|
|
|
|
simple2_args = simple2_sdist.args()
|
|
|
|
assert 'install' in simple2_args
|
|
|
|
assert '-O0' not in simple2_args
|
|
|
|
|
|
|
|
|
2020-07-08 00:29:07 +02:00
|
|
|
def test_location_related_install_option_fails(script):
|
|
|
|
simple_sdist = create_basic_sdist_for_package(script, "simple", "0.1.0")
|
2019-07-02 07:00:32 +02:00
|
|
|
reqs_file = script.scratch_path.joinpath("reqs.txt")
|
2020-07-08 00:29:07 +02:00
|
|
|
reqs_file.write_text("simple --install-option='--home=/tmp'")
|
2018-03-20 01:30:09 +01:00
|
|
|
result = script.pip(
|
2018-03-20 22:32:22 +01:00
|
|
|
'install',
|
2020-07-08 00:29:07 +02:00
|
|
|
'--no-index', '-f', str(simple_sdist.parent),
|
2018-03-20 22:32:22 +01:00
|
|
|
'-r', reqs_file,
|
2020-07-08 00:29:07 +02:00
|
|
|
expect_error=True
|
2018-03-20 01:30:09 +01:00
|
|
|
)
|
2020-07-08 00:29:07 +02:00
|
|
|
assert "['--home'] from simple" in result.stderr
|