2020-05-12 15:18:27 +02:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
|
2013-05-28 23:58:08 +02:00
|
|
|
"""Tests for wheel binary packages and .dist-info."""
|
2019-01-24 03:44:54 +01:00
|
|
|
import csv
|
2018-06-23 23:07:39 +02:00
|
|
|
import logging
|
2018-06-23 23:10:40 +02:00
|
|
|
import os
|
2019-01-24 03:44:54 +01:00
|
|
|
import textwrap
|
2019-12-31 18:32:11 +01:00
|
|
|
from email import message_from_string
|
2014-04-25 23:42:14 +02:00
|
|
|
|
2017-05-16 12:16:30 +02:00
|
|
|
import pytest
|
2019-12-31 20:00:16 +01:00
|
|
|
from mock import patch
|
2017-06-13 14:17:00 +02:00
|
|
|
from pip._vendor.packaging.requirements import Requirement
|
|
|
|
|
2020-07-10 03:13:44 +02:00
|
|
|
from pip._internal.exceptions import InstallationError
|
2019-11-07 03:10:11 +01:00
|
|
|
from pip._internal.locations import get_scheme
|
2020-02-01 13:40:20 +01:00
|
|
|
from pip._internal.models.direct_url import (
|
|
|
|
DIRECT_URL_METADATA_NAME,
|
|
|
|
ArchiveInfo,
|
|
|
|
DirectUrl,
|
|
|
|
)
|
2019-11-07 03:10:11 +01:00
|
|
|
from pip._internal.models.scheme import Scheme
|
2020-09-23 16:27:09 +02:00
|
|
|
from pip._internal.operations.build.wheel_legacy import get_legacy_build_wheel_path
|
2019-12-03 23:02:39 +01:00
|
|
|
from pip._internal.operations.install import wheel
|
|
|
|
from pip._internal.utils.compat import WINDOWS
|
|
|
|
from pip._internal.utils.misc import hash_file
|
|
|
|
from pip._internal.utils.unpacking import unpack_file
|
2020-07-03 16:35:00 +02:00
|
|
|
from pip._internal.utils.wheel import pkg_resources_distribution_for_wheel
|
2020-05-28 10:08:17 +02:00
|
|
|
from tests.lib import DATA_DIR, assert_paths_equal, skip_if_python2
|
2020-07-03 16:35:00 +02:00
|
|
|
from tests.lib.wheel import make_wheel
|
2020-01-01 19:21:47 +01:00
|
|
|
|
2018-10-12 21:16:38 +02:00
|
|
|
|
2019-02-10 21:36:59 +01:00
|
|
|
def call_get_legacy_build_wheel_path(caplog, names):
|
2019-11-03 15:31:05 +01:00
|
|
|
wheel_path = get_legacy_build_wheel_path(
|
2019-02-10 21:36:59 +01:00
|
|
|
names=names,
|
|
|
|
temp_dir='/tmp/abcd',
|
2019-11-08 03:32:54 +01:00
|
|
|
name='pendulum',
|
2019-02-10 21:36:59 +01:00
|
|
|
command_args=['arg1', 'arg2'],
|
|
|
|
command_output='output line 1\noutput line 2\n',
|
|
|
|
)
|
|
|
|
return wheel_path
|
|
|
|
|
|
|
|
|
|
|
|
def test_get_legacy_build_wheel_path(caplog):
|
|
|
|
actual = call_get_legacy_build_wheel_path(caplog, names=['name'])
|
|
|
|
assert_paths_equal(actual, '/tmp/abcd/name')
|
|
|
|
assert not caplog.records
|
|
|
|
|
|
|
|
|
|
|
|
def test_get_legacy_build_wheel_path__no_names(caplog):
|
2019-09-14 02:51:02 +02:00
|
|
|
caplog.set_level(logging.INFO)
|
2019-02-10 21:36:59 +01:00
|
|
|
actual = call_get_legacy_build_wheel_path(caplog, names=[])
|
|
|
|
assert actual is None
|
|
|
|
assert len(caplog.records) == 1
|
|
|
|
record = caplog.records[0]
|
2019-02-12 21:02:37 +01:00
|
|
|
assert record.levelname == 'WARNING'
|
2019-02-10 21:36:59 +01:00
|
|
|
assert record.message.splitlines() == [
|
2019-02-12 21:02:37 +01:00
|
|
|
"Legacy build of wheel for 'pendulum' created no files.",
|
2019-02-22 09:30:53 +01:00
|
|
|
"Command arguments: arg1 arg2",
|
2019-02-12 21:02:37 +01:00
|
|
|
'Command output: [use --verbose to show]',
|
2019-02-10 21:36:59 +01:00
|
|
|
]
|
|
|
|
|
|
|
|
|
|
|
|
def test_get_legacy_build_wheel_path__multiple_names(caplog):
|
2019-09-14 02:51:02 +02:00
|
|
|
caplog.set_level(logging.INFO)
|
2019-02-10 21:36:59 +01:00
|
|
|
# Deliberately pass the names in non-sorted order.
|
|
|
|
actual = call_get_legacy_build_wheel_path(
|
|
|
|
caplog, names=['name2', 'name1'],
|
|
|
|
)
|
|
|
|
assert_paths_equal(actual, '/tmp/abcd/name1')
|
|
|
|
assert len(caplog.records) == 1
|
|
|
|
record = caplog.records[0]
|
|
|
|
assert record.levelname == 'WARNING'
|
|
|
|
assert record.message.splitlines() == [
|
2019-02-12 21:02:37 +01:00
|
|
|
"Legacy build of wheel for 'pendulum' created more than one file.",
|
|
|
|
"Filenames (choosing first): ['name1', 'name2']",
|
2019-02-22 09:30:53 +01:00
|
|
|
"Command arguments: arg1 arg2",
|
2019-02-12 21:02:37 +01:00
|
|
|
'Command output: [use --verbose to show]',
|
2019-02-10 21:36:59 +01:00
|
|
|
]
|
|
|
|
|
|
|
|
|
2020-05-28 10:08:17 +02:00
|
|
|
@pytest.mark.parametrize(
|
|
|
|
"console_scripts",
|
|
|
|
[
|
2020-05-28 10:15:40 +02:00
|
|
|
u"pip = pip._internal.main:pip",
|
|
|
|
u"pip:pip = pip._internal.main:pip",
|
|
|
|
pytest.param(u"進入點 = 套件.模組:函式", marks=skip_if_python2),
|
2020-05-28 10:08:17 +02:00
|
|
|
],
|
|
|
|
)
|
2020-07-03 16:49:44 +02:00
|
|
|
def test_get_entrypoints(console_scripts):
|
2020-07-03 15:43:11 +02:00
|
|
|
entry_points_text = u"""
|
|
|
|
[console_scripts]
|
|
|
|
{}
|
|
|
|
[section]
|
|
|
|
common:one = module:func
|
|
|
|
common:two = module:other_func
|
|
|
|
""".format(console_scripts)
|
|
|
|
|
2020-07-03 16:35:00 +02:00
|
|
|
wheel_zip = make_wheel(
|
|
|
|
"simple",
|
|
|
|
"0.1.0",
|
|
|
|
extra_metadata_files={
|
|
|
|
"entry_points.txt": entry_points_text,
|
|
|
|
},
|
|
|
|
).as_zipfile()
|
|
|
|
distribution = pkg_resources_distribution_for_wheel(
|
|
|
|
wheel_zip, "simple", "<in memory>"
|
|
|
|
)
|
|
|
|
|
2020-07-03 16:49:44 +02:00
|
|
|
assert wheel.get_entrypoints(distribution) == (
|
2016-11-19 00:27:16 +01:00
|
|
|
dict([console_scripts.split(' = ')]),
|
2013-11-19 14:32:58 +01:00
|
|
|
{},
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2020-07-03 16:49:44 +02:00
|
|
|
def test_get_entrypoints_no_entrypoints():
|
2020-07-03 16:35:00 +02:00
|
|
|
wheel_zip = make_wheel("simple", "0.1.0").as_zipfile()
|
|
|
|
distribution = pkg_resources_distribution_for_wheel(
|
|
|
|
wheel_zip, "simple", "<in memory>"
|
|
|
|
)
|
|
|
|
|
2020-07-03 16:49:44 +02:00
|
|
|
console, gui = wheel.get_entrypoints(distribution)
|
2020-07-03 15:43:12 +02:00
|
|
|
assert console == {}
|
|
|
|
assert gui == {}
|
|
|
|
|
|
|
|
|
2018-10-24 18:19:58 +02:00
|
|
|
@pytest.mark.parametrize("outrows, expected", [
|
|
|
|
([
|
2020-05-12 15:18:27 +02:00
|
|
|
(u'', '', 'a'),
|
|
|
|
(u'', '', ''),
|
2018-10-24 18:19:58 +02:00
|
|
|
], [
|
|
|
|
('', '', ''),
|
|
|
|
('', '', 'a'),
|
|
|
|
]),
|
|
|
|
([
|
|
|
|
# Include an int to check avoiding the following error:
|
|
|
|
# > TypeError: '<' not supported between instances of 'str' and 'int'
|
2020-05-12 15:18:27 +02:00
|
|
|
(u'', '', 1),
|
|
|
|
(u'', '', ''),
|
|
|
|
], [
|
2018-10-24 18:19:58 +02:00
|
|
|
('', '', ''),
|
2020-05-12 15:18:27 +02:00
|
|
|
('', '', '1'),
|
|
|
|
]),
|
|
|
|
([
|
2020-05-13 00:33:05 +02:00
|
|
|
# Test the normalization correctly encode everything for csv.writer().
|
2020-05-12 15:18:27 +02:00
|
|
|
(u'😉', '', 1),
|
|
|
|
(u'', '', ''),
|
2018-10-24 18:19:58 +02:00
|
|
|
], [
|
|
|
|
('', '', ''),
|
2020-05-12 15:18:27 +02:00
|
|
|
('😉', '', '1'),
|
2018-10-24 18:19:58 +02:00
|
|
|
]),
|
|
|
|
])
|
2020-05-12 15:18:27 +02:00
|
|
|
def test_normalized_outrows(outrows, expected):
|
|
|
|
actual = wheel._normalized_outrows(outrows)
|
2018-10-24 18:19:58 +02:00
|
|
|
assert actual == expected
|
|
|
|
|
|
|
|
|
2019-01-24 03:44:54 +01:00
|
|
|
def call_get_csv_rows_for_installed(tmpdir, text):
|
2019-07-02 07:00:32 +02:00
|
|
|
path = tmpdir.joinpath('temp.txt')
|
|
|
|
path.write_text(text)
|
2019-01-24 03:44:54 +01:00
|
|
|
|
2019-02-15 11:26:18 +01:00
|
|
|
# Test that an installed file appearing in RECORD has its filename
|
|
|
|
# updated in the new RECORD file.
|
2020-05-12 15:18:27 +02:00
|
|
|
installed = {u'a': 'z'}
|
2019-01-24 03:44:54 +01:00
|
|
|
changed = set()
|
|
|
|
generated = []
|
|
|
|
lib_dir = '/lib/dir'
|
|
|
|
|
2020-03-29 12:19:34 +02:00
|
|
|
with open(path, **wheel.csv_io_kwargs('r')) as f:
|
2020-07-03 18:25:12 +02:00
|
|
|
record_rows = list(csv.reader(f))
|
|
|
|
outrows = wheel.get_csv_rows_for_installed(
|
|
|
|
record_rows, installed=installed, changed=changed,
|
|
|
|
generated=generated, lib_dir=lib_dir,
|
|
|
|
)
|
2019-01-24 03:44:54 +01:00
|
|
|
return outrows
|
|
|
|
|
|
|
|
|
2019-01-24 04:16:10 +01:00
|
|
|
def test_get_csv_rows_for_installed(tmpdir, caplog):
|
2019-01-24 03:44:54 +01:00
|
|
|
text = textwrap.dedent("""\
|
|
|
|
a,b,c
|
|
|
|
d,e,f
|
|
|
|
""")
|
|
|
|
outrows = call_get_csv_rows_for_installed(tmpdir, text)
|
|
|
|
|
|
|
|
expected = [
|
2019-02-15 11:26:18 +01:00
|
|
|
('z', 'b', 'c'),
|
2019-01-24 03:44:54 +01:00
|
|
|
('d', 'e', 'f'),
|
|
|
|
]
|
|
|
|
assert outrows == expected
|
2019-01-24 04:16:10 +01:00
|
|
|
# Check there were no warnings.
|
|
|
|
assert len(caplog.records) == 0
|
2019-01-24 03:44:54 +01:00
|
|
|
|
|
|
|
|
2019-01-24 04:16:10 +01:00
|
|
|
def test_get_csv_rows_for_installed__long_lines(tmpdir, caplog):
|
2019-01-24 03:44:54 +01:00
|
|
|
text = textwrap.dedent("""\
|
|
|
|
a,b,c,d
|
2019-01-24 04:16:10 +01:00
|
|
|
e,f,g
|
|
|
|
h,i,j,k
|
2019-01-24 03:44:54 +01:00
|
|
|
""")
|
|
|
|
outrows = call_get_csv_rows_for_installed(tmpdir, text)
|
|
|
|
|
2019-01-24 04:16:10 +01:00
|
|
|
expected = [
|
2020-05-12 15:18:27 +02:00
|
|
|
('z', 'b', 'c'),
|
2019-01-24 04:16:10 +01:00
|
|
|
('e', 'f', 'g'),
|
2020-05-12 15:18:27 +02:00
|
|
|
('h', 'i', 'j'),
|
2019-01-24 04:16:10 +01:00
|
|
|
]
|
|
|
|
assert outrows == expected
|
|
|
|
|
|
|
|
messages = [rec.message for rec in caplog.records]
|
|
|
|
expected = [
|
|
|
|
"RECORD line has more than three elements: ['a', 'b', 'c', 'd']",
|
|
|
|
"RECORD line has more than three elements: ['h', 'i', 'j', 'k']"
|
|
|
|
]
|
|
|
|
assert messages == expected
|
|
|
|
|
2019-01-24 03:44:54 +01:00
|
|
|
|
2019-12-31 18:45:13 +01:00
|
|
|
@pytest.mark.parametrize("text,expected", [
|
|
|
|
("Root-Is-Purelib: true", True),
|
|
|
|
("Root-Is-Purelib: false", False),
|
|
|
|
("Root-Is-Purelib: hello", False),
|
|
|
|
("", False),
|
|
|
|
("root-is-purelib: true", True),
|
|
|
|
("root-is-purelib: True", True),
|
|
|
|
])
|
|
|
|
def test_wheel_root_is_purelib(text, expected):
|
|
|
|
assert wheel.wheel_root_is_purelib(message_from_string(text)) == expected
|
|
|
|
|
|
|
|
|
2013-05-28 23:58:08 +02:00
|
|
|
class TestWheelFile(object):
|
|
|
|
|
2019-09-18 02:18:52 +02:00
|
|
|
def test_unpack_wheel_no_flatten(self, tmpdir):
|
2017-08-31 19:17:56 +02:00
|
|
|
filepath = os.path.join(DATA_DIR, 'packages',
|
|
|
|
'meta-1.0-py2.py3-none-any.whl')
|
2019-09-20 02:02:47 +02:00
|
|
|
unpack_file(filepath, tmpdir)
|
2019-09-18 02:18:52 +02:00
|
|
|
assert os.path.isdir(os.path.join(tmpdir, 'meta-1.0.dist-info'))
|
2013-07-28 05:48:15 +02:00
|
|
|
|
2013-08-22 08:30:15 +02:00
|
|
|
|
2019-10-12 03:31:35 +02:00
|
|
|
class TestInstallUnpackedWheel(object):
|
2014-04-22 08:07:25 +02:00
|
|
|
"""
|
|
|
|
Tests for moving files from wheel src to scheme paths
|
|
|
|
"""
|
|
|
|
|
|
|
|
def prep(self, data, tmpdir):
|
Normalize Path to str in wheel tests
In our next commit we will use the scheme path to locate files to
byte-compile. If the scheme path is a `Path`, then that causes
`compileall.compile_file` (via `py_compile.compile`) to fail with:
```
.tox/py38/lib/python3.8/site-packages/pip/_internal/operations/install/wheel.py:615: in install_unpacked_wheel
success = compileall.compile_file(
../../../.pyenv/versions/3.8.0/lib/python3.8/compileall.py:157: in compile_file
ok = py_compile.compile(fullname, cfile, dfile, True,
../../../.pyenv/versions/3.8.0/lib/python3.8/py_compile.py:162: in compile
bytecode = importlib._bootstrap_external._code_to_timestamp_pyc(
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
code = <code object <module> at 0x7fa7e274f500, file "/tmp/user/1000/pytest-of-chris/pytest-37/test_std_install_with_direct_u0/dest/lib/sample/__init__.py", line 1>, mtime = 1593910285.2200587, source_size = 134
> ???
E ValueError: unmarshallable object
```
Debugging in gdb shows that the error is set due to the `Path` object
being present in the code object, which `marshal.dumps` can't handle
(frame 1):
```
0 w_complex_object (v=<optimized out>, flag=<optimized out>, p=0x7fffffff7160) at Python/marshal.c:564
1 w_object (v=<Path at remote 0x7fffee51f120>, p=0x7fffffff7160) at Python/marshal.c:370
2 w_complex_object (v=<code at remote 0x7fffee591710>, flag=<optimized out>, p=0x7fffffff7160) at Python/marshal.c:544
3 w_object (v=<code at remote 0x7fffee591710>, p=0x7fffffff7160) at Python/marshal.c:370
4 w_complex_object (v=('1.2.0', <code at remote 0x7fffee591710>, 'main', None), flag=<optimized out>, p=0x7fffffff7160) at Python/marshal.c:475
5 w_object (v=('1.2.0', <code at remote 0x7fffee591710>, 'main', None), p=0x7fffffff7160) at Python/marshal.c:370
6 w_complex_object (v=<code at remote 0x7fffee591ea0>, flag=<optimized out>, p=0x7fffffff7160) at Python/marshal.c:539
7 w_object (p=0x7fffffff7160, v=<code at remote 0x7fffee591ea0>) at Python/marshal.c:370
8 PyMarshal_WriteObjectToString (version=<optimized out>, x=<code at remote 0x7fffee591ea0>) at Python/marshal.c:1598
9 marshal_dumps_impl (module=<optimized out>, version=<optimized out>, value=<code at remote 0x7fffee591ea0>) at Python/marshal.c:1739
10 marshal_dumps (module=<optimized out>, args=<optimized out>, nargs=<optimized out>) at Python/clinic/marshal.c.h:124
```
In the interest of easy git bisects, we commit this fix before the code
that would expose the bug.
2020-07-04 20:47:32 +02:00
|
|
|
# Since Path implements __add__, os.path.join returns a Path object.
|
|
|
|
# Passing Path objects to interfaces expecting str (like
|
|
|
|
# `compileall.compile_file`) can cause failures, so we normalize it
|
|
|
|
# to a string here.
|
|
|
|
tmpdir = str(tmpdir)
|
2014-04-22 08:07:25 +02:00
|
|
|
self.name = 'sample'
|
2020-07-06 02:10:55 +02:00
|
|
|
self.wheelpath = make_wheel(
|
|
|
|
"sample",
|
|
|
|
"1.2.0",
|
|
|
|
metadata_body=textwrap.dedent(
|
|
|
|
"""
|
|
|
|
A sample Python project
|
|
|
|
=======================
|
|
|
|
|
|
|
|
...
|
|
|
|
"""
|
|
|
|
),
|
|
|
|
metadata_updates={
|
|
|
|
"Requires-Dist": ["peppercorn"],
|
|
|
|
},
|
|
|
|
extra_files={
|
|
|
|
"sample/__init__.py": textwrap.dedent(
|
|
|
|
'''
|
|
|
|
__version__ = '1.2.0'
|
|
|
|
|
|
|
|
def main():
|
|
|
|
"""Entry point for the application script"""
|
|
|
|
print("Call your main application code here")
|
|
|
|
'''
|
|
|
|
),
|
|
|
|
"sample/package_data.dat": "some data",
|
|
|
|
},
|
|
|
|
extra_metadata_files={
|
|
|
|
"DESCRIPTION.rst": textwrap.dedent(
|
|
|
|
"""
|
|
|
|
A sample Python project
|
|
|
|
=======================
|
|
|
|
|
|
|
|
...
|
|
|
|
"""
|
|
|
|
),
|
2020-07-06 02:14:15 +02:00
|
|
|
"top_level.txt": "sample\n",
|
|
|
|
"empty_dir/empty_dir/": "",
|
2020-07-06 02:10:55 +02:00
|
|
|
},
|
|
|
|
extra_data_files={
|
|
|
|
"data/my_data/data_file": "some data",
|
|
|
|
},
|
2020-07-11 17:17:20 +02:00
|
|
|
entry_points={
|
|
|
|
"console_scripts": ["sample = sample:main"],
|
|
|
|
"gui_scripts": ["sample2 = sample:main"],
|
|
|
|
},
|
2020-07-06 02:10:55 +02:00
|
|
|
).save_to_dir(tmpdir)
|
2015-11-12 00:51:46 +01:00
|
|
|
self.req = Requirement('sample')
|
2014-04-25 23:42:14 +02:00
|
|
|
self.src = os.path.join(tmpdir, 'src')
|
|
|
|
self.dest = os.path.join(tmpdir, 'dest')
|
2019-11-07 03:10:11 +01:00
|
|
|
self.scheme = Scheme(
|
|
|
|
purelib=os.path.join(self.dest, 'lib'),
|
|
|
|
platlib=os.path.join(self.dest, 'lib'),
|
|
|
|
headers=os.path.join(self.dest, 'headers'),
|
|
|
|
scripts=os.path.join(self.dest, 'bin'),
|
|
|
|
data=os.path.join(self.dest, 'data'),
|
|
|
|
)
|
2014-04-22 08:07:25 +02:00
|
|
|
self.src_dist_info = os.path.join(
|
|
|
|
self.src, 'sample-1.2.0.dist-info')
|
|
|
|
self.dest_dist_info = os.path.join(
|
2019-11-07 03:10:11 +01:00
|
|
|
self.scheme.purelib, 'sample-1.2.0.dist-info')
|
2014-04-22 08:07:25 +02:00
|
|
|
|
2020-04-26 11:27:22 +02:00
|
|
|
def assert_permission(self, path, mode):
|
|
|
|
target_mode = os.stat(path).st_mode & 0o777
|
2020-04-26 12:17:33 +02:00
|
|
|
assert (target_mode & mode) == mode, oct(target_mode)
|
2020-04-26 11:27:22 +02:00
|
|
|
|
2020-04-29 07:32:06 +02:00
|
|
|
def assert_installed(self, expected_permission):
|
2014-04-22 08:07:25 +02:00
|
|
|
# lib
|
|
|
|
assert os.path.isdir(
|
2019-11-07 03:10:11 +01:00
|
|
|
os.path.join(self.scheme.purelib, 'sample'))
|
2014-04-22 08:07:25 +02:00
|
|
|
# dist-info
|
|
|
|
metadata = os.path.join(self.dest_dist_info, 'METADATA')
|
2020-04-29 07:32:06 +02:00
|
|
|
self.assert_permission(metadata, expected_permission)
|
2020-04-26 11:27:22 +02:00
|
|
|
record = os.path.join(self.dest_dist_info, 'RECORD')
|
2020-04-29 07:32:06 +02:00
|
|
|
self.assert_permission(record, expected_permission)
|
2014-04-22 08:07:25 +02:00
|
|
|
# data files
|
2019-11-07 03:10:11 +01:00
|
|
|
data_file = os.path.join(self.scheme.data, 'my_data', 'data_file')
|
2014-04-22 08:07:25 +02:00
|
|
|
assert os.path.isfile(data_file)
|
|
|
|
# package data
|
2014-04-25 23:42:14 +02:00
|
|
|
pkg_data = os.path.join(
|
2019-11-07 03:10:11 +01:00
|
|
|
self.scheme.purelib, 'sample', 'package_data.dat')
|
2014-04-25 23:42:14 +02:00
|
|
|
assert os.path.isfile(pkg_data)
|
2014-04-22 08:07:25 +02:00
|
|
|
|
|
|
|
def test_std_install(self, data, tmpdir):
|
|
|
|
self.prep(data, tmpdir)
|
2019-12-14 17:21:00 +01:00
|
|
|
wheel.install_wheel(
|
2019-10-12 04:24:50 +02:00
|
|
|
self.name,
|
2019-12-14 17:21:00 +01:00
|
|
|
self.wheelpath,
|
2019-10-12 04:24:50 +02:00
|
|
|
scheme=self.scheme,
|
|
|
|
req_description=str(self.req),
|
|
|
|
)
|
2020-04-29 07:32:06 +02:00
|
|
|
self.assert_installed(0o644)
|
|
|
|
|
|
|
|
@pytest.mark.parametrize("user_mask, expected_permission", [
|
|
|
|
(0o27, 0o640)
|
|
|
|
])
|
|
|
|
def test_std_install_with_custom_umask(self, data, tmpdir,
|
|
|
|
user_mask, expected_permission):
|
|
|
|
"""Test that the files created after install honor the permissions
|
|
|
|
set when the user sets a custom umask"""
|
|
|
|
|
2020-04-29 08:27:11 +02:00
|
|
|
prev_umask = os.umask(user_mask)
|
2020-04-29 07:32:06 +02:00
|
|
|
try:
|
|
|
|
self.prep(data, tmpdir)
|
|
|
|
wheel.install_wheel(
|
|
|
|
self.name,
|
|
|
|
self.wheelpath,
|
|
|
|
scheme=self.scheme,
|
|
|
|
req_description=str(self.req),
|
|
|
|
)
|
|
|
|
self.assert_installed(expected_permission)
|
|
|
|
finally:
|
|
|
|
os.umask(prev_umask)
|
2014-04-22 08:07:25 +02:00
|
|
|
|
2020-04-11 18:40:55 +02:00
|
|
|
def test_std_install_requested(self, data, tmpdir):
|
|
|
|
self.prep(data, tmpdir)
|
|
|
|
wheel.install_wheel(
|
|
|
|
self.name,
|
|
|
|
self.wheelpath,
|
|
|
|
scheme=self.scheme,
|
|
|
|
req_description=str(self.req),
|
|
|
|
requested=True,
|
|
|
|
)
|
2020-04-12 12:40:38 +02:00
|
|
|
self.assert_installed(0o644)
|
2020-04-11 18:40:55 +02:00
|
|
|
requested_path = os.path.join(self.dest_dist_info, 'REQUESTED')
|
|
|
|
assert os.path.isfile(requested_path)
|
|
|
|
|
2020-02-01 13:40:20 +01:00
|
|
|
def test_std_install_with_direct_url(self, data, tmpdir):
|
|
|
|
"""Test that install_wheel creates direct_url.json metadata when
|
|
|
|
provided with a direct_url argument. Also test that the RECORDS
|
|
|
|
file contains an entry for direct_url.json in that case.
|
|
|
|
Note direct_url.url is intentionally different from wheelpath,
|
|
|
|
because wheelpath is typically the result of a local build.
|
|
|
|
"""
|
|
|
|
self.prep(data, tmpdir)
|
|
|
|
direct_url = DirectUrl(
|
|
|
|
url="file:///home/user/archive.tgz",
|
|
|
|
info=ArchiveInfo(),
|
|
|
|
)
|
|
|
|
wheel.install_wheel(
|
|
|
|
self.name,
|
|
|
|
self.wheelpath,
|
|
|
|
scheme=self.scheme,
|
|
|
|
req_description=str(self.req),
|
|
|
|
direct_url=direct_url,
|
|
|
|
)
|
|
|
|
direct_url_path = os.path.join(
|
|
|
|
self.dest_dist_info, DIRECT_URL_METADATA_NAME
|
|
|
|
)
|
2020-04-26 12:22:21 +02:00
|
|
|
self.assert_permission(direct_url_path, 0o644)
|
2020-02-01 13:40:20 +01:00
|
|
|
with open(direct_url_path, 'rb') as f:
|
|
|
|
expected_direct_url_json = direct_url.to_json()
|
|
|
|
direct_url_json = f.read().decode("utf-8")
|
|
|
|
assert direct_url_json == expected_direct_url_json
|
|
|
|
# check that the direc_url file is part of RECORDS
|
|
|
|
with open(os.path.join(self.dest_dist_info, "RECORD")) as f:
|
|
|
|
assert DIRECT_URL_METADATA_NAME in f.read()
|
|
|
|
|
2015-11-16 17:39:44 +01:00
|
|
|
def test_install_prefix(self, data, tmpdir):
|
|
|
|
prefix = os.path.join(os.path.sep, 'some', 'path')
|
|
|
|
self.prep(data, tmpdir)
|
2019-11-07 03:10:11 +01:00
|
|
|
scheme = get_scheme(
|
2015-11-16 17:39:44 +01:00
|
|
|
self.name,
|
2019-10-12 03:49:39 +02:00
|
|
|
user=False,
|
|
|
|
home=None,
|
2015-11-16 17:39:44 +01:00
|
|
|
root=tmpdir,
|
2019-10-12 03:49:39 +02:00
|
|
|
isolated=False,
|
2015-11-16 17:39:44 +01:00
|
|
|
prefix=prefix,
|
|
|
|
)
|
2019-12-14 17:21:00 +01:00
|
|
|
wheel.install_wheel(
|
2015-11-16 17:39:44 +01:00
|
|
|
self.name,
|
2019-12-14 17:21:00 +01:00
|
|
|
self.wheelpath,
|
2019-10-12 03:49:39 +02:00
|
|
|
scheme=scheme,
|
2019-10-12 04:24:50 +02:00
|
|
|
req_description=str(self.req),
|
2015-11-16 17:39:44 +01:00
|
|
|
)
|
|
|
|
|
2015-12-05 18:13:31 +01:00
|
|
|
bin_dir = 'Scripts' if WINDOWS else 'bin'
|
|
|
|
assert os.path.exists(os.path.join(tmpdir, 'some', 'path', bin_dir))
|
2015-11-16 17:39:44 +01:00
|
|
|
assert os.path.exists(os.path.join(tmpdir, 'some', 'path', 'my_data'))
|
|
|
|
|
2014-04-22 08:07:25 +02:00
|
|
|
def test_dist_info_contains_empty_dir(self, data, tmpdir):
|
|
|
|
"""
|
|
|
|
Test that empty dirs are not installed
|
|
|
|
"""
|
|
|
|
# e.g. https://github.com/pypa/pip/issues/1632#issuecomment-38027275
|
|
|
|
self.prep(data, tmpdir)
|
2019-12-14 17:21:00 +01:00
|
|
|
wheel.install_wheel(
|
2019-10-12 04:24:50 +02:00
|
|
|
self.name,
|
2019-12-14 17:21:00 +01:00
|
|
|
self.wheelpath,
|
2019-10-12 04:24:50 +02:00
|
|
|
scheme=self.scheme,
|
|
|
|
req_description=str(self.req),
|
|
|
|
)
|
2020-04-29 07:32:06 +02:00
|
|
|
self.assert_installed(0o644)
|
2014-04-25 23:42:14 +02:00
|
|
|
assert not os.path.isdir(
|
|
|
|
os.path.join(self.dest_dist_info, 'empty_dir'))
|
2014-05-03 19:02:23 +02:00
|
|
|
|
2020-07-10 03:13:44 +02:00
|
|
|
@pytest.mark.parametrize(
|
|
|
|
"path",
|
|
|
|
["/tmp/example", "../example", "./../example"]
|
|
|
|
)
|
|
|
|
def test_wheel_install_rejects_bad_paths(self, data, tmpdir, path):
|
|
|
|
self.prep(data, tmpdir)
|
|
|
|
wheel_path = make_wheel(
|
|
|
|
"simple", "0.1.0", extra_files={path: "example contents\n"}
|
|
|
|
).save_to_dir(tmpdir)
|
|
|
|
with pytest.raises(InstallationError) as e:
|
|
|
|
wheel.install_wheel(
|
|
|
|
"simple",
|
|
|
|
str(wheel_path),
|
|
|
|
scheme=self.scheme,
|
|
|
|
req_description="simple",
|
|
|
|
)
|
|
|
|
|
|
|
|
exc_text = str(e.value)
|
|
|
|
assert os.path.basename(wheel_path) in exc_text
|
|
|
|
assert "example" in exc_text
|
|
|
|
|
2020-07-11 17:17:20 +02:00
|
|
|
@pytest.mark.xfail(strict=True)
|
|
|
|
@pytest.mark.parametrize(
|
|
|
|
"entrypoint", ["hello = hello", "hello = hello:"]
|
|
|
|
)
|
|
|
|
@pytest.mark.parametrize(
|
|
|
|
"entrypoint_type", ["console_scripts", "gui_scripts"]
|
|
|
|
)
|
|
|
|
def test_invalid_entrypoints_fail(
|
|
|
|
self, data, tmpdir, entrypoint, entrypoint_type
|
|
|
|
):
|
|
|
|
self.prep(data, tmpdir)
|
|
|
|
wheel_path = make_wheel(
|
|
|
|
"simple", "0.1.0", entry_points={entrypoint_type: [entrypoint]}
|
|
|
|
).save_to_dir(tmpdir)
|
|
|
|
with pytest.raises(InstallationError) as e:
|
|
|
|
wheel.install_wheel(
|
|
|
|
"simple",
|
|
|
|
str(wheel_path),
|
|
|
|
scheme=self.scheme,
|
|
|
|
req_description="simple",
|
|
|
|
)
|
|
|
|
|
|
|
|
exc_text = str(e.value)
|
|
|
|
assert os.path.basename(wheel_path) in exc_text
|
|
|
|
assert entrypoint in exc_text
|
|
|
|
|
2014-05-03 19:02:23 +02:00
|
|
|
|
2017-10-02 18:54:37 +02:00
|
|
|
class TestMessageAboutScriptsNotOnPATH(object):
|
|
|
|
|
2019-11-16 00:16:41 +01:00
|
|
|
tilde_warning_msg = (
|
|
|
|
"NOTE: The current PATH contains path(s) starting with `~`, "
|
|
|
|
"which may not be expanded by all applications."
|
|
|
|
)
|
|
|
|
|
2017-10-02 18:54:37 +02:00
|
|
|
def _template(self, paths, scripts):
|
|
|
|
with patch.dict('os.environ', {'PATH': os.pathsep.join(paths)}):
|
|
|
|
return wheel.message_about_scripts_not_on_PATH(scripts)
|
|
|
|
|
|
|
|
def test_no_script(self):
|
|
|
|
retval = self._template(
|
|
|
|
paths=['/a/b', '/c/d/bin'],
|
|
|
|
scripts=[]
|
|
|
|
)
|
|
|
|
assert retval is None
|
|
|
|
|
|
|
|
def test_single_script__single_dir_not_on_PATH(self):
|
|
|
|
retval = self._template(
|
|
|
|
paths=['/a/b', '/c/d/bin'],
|
|
|
|
scripts=['/c/d/foo']
|
|
|
|
)
|
|
|
|
assert retval is not None
|
|
|
|
assert "--no-warn-script-location" in retval
|
|
|
|
assert "foo is installed in '/c/d'" in retval
|
2019-11-16 00:16:41 +01:00
|
|
|
assert self.tilde_warning_msg not in retval
|
2017-10-02 18:54:37 +02:00
|
|
|
|
|
|
|
def test_two_script__single_dir_not_on_PATH(self):
|
|
|
|
retval = self._template(
|
|
|
|
paths=['/a/b', '/c/d/bin'],
|
|
|
|
scripts=['/c/d/foo', '/c/d/baz']
|
|
|
|
)
|
|
|
|
assert retval is not None
|
|
|
|
assert "--no-warn-script-location" in retval
|
|
|
|
assert "baz and foo are installed in '/c/d'" in retval
|
2019-11-16 00:16:41 +01:00
|
|
|
assert self.tilde_warning_msg not in retval
|
2017-10-02 18:54:37 +02:00
|
|
|
|
|
|
|
def test_multi_script__multi_dir_not_on_PATH(self):
|
|
|
|
retval = self._template(
|
|
|
|
paths=['/a/b', '/c/d/bin'],
|
|
|
|
scripts=['/c/d/foo', '/c/d/bar', '/c/d/baz', '/a/b/c/spam']
|
|
|
|
)
|
|
|
|
assert retval is not None
|
|
|
|
assert "--no-warn-script-location" in retval
|
|
|
|
assert "bar, baz and foo are installed in '/c/d'" in retval
|
|
|
|
assert "spam is installed in '/a/b/c'" in retval
|
2019-11-16 00:16:41 +01:00
|
|
|
assert self.tilde_warning_msg not in retval
|
2017-10-02 18:54:37 +02:00
|
|
|
|
|
|
|
def test_multi_script_all__multi_dir_not_on_PATH(self):
|
|
|
|
retval = self._template(
|
|
|
|
paths=['/a/b', '/c/d/bin'],
|
|
|
|
scripts=[
|
|
|
|
'/c/d/foo', '/c/d/bar', '/c/d/baz',
|
|
|
|
'/a/b/c/spam', '/a/b/c/eggs'
|
|
|
|
]
|
|
|
|
)
|
|
|
|
assert retval is not None
|
|
|
|
assert "--no-warn-script-location" in retval
|
|
|
|
assert "bar, baz and foo are installed in '/c/d'" in retval
|
|
|
|
assert "eggs and spam are installed in '/a/b/c'" in retval
|
2019-11-16 00:16:41 +01:00
|
|
|
assert self.tilde_warning_msg not in retval
|
2017-10-02 18:54:37 +02:00
|
|
|
|
|
|
|
def test_two_script__single_dir_on_PATH(self):
|
|
|
|
retval = self._template(
|
|
|
|
paths=['/a/b', '/c/d/bin'],
|
|
|
|
scripts=['/a/b/foo', '/a/b/baz']
|
|
|
|
)
|
|
|
|
assert retval is None
|
|
|
|
|
|
|
|
def test_multi_script__multi_dir_on_PATH(self):
|
|
|
|
retval = self._template(
|
|
|
|
paths=['/a/b', '/c/d/bin'],
|
|
|
|
scripts=['/a/b/foo', '/a/b/bar', '/a/b/baz', '/c/d/bin/spam']
|
|
|
|
)
|
|
|
|
assert retval is None
|
|
|
|
|
|
|
|
def test_multi_script__single_dir_on_PATH(self):
|
|
|
|
retval = self._template(
|
|
|
|
paths=['/a/b', '/c/d/bin'],
|
|
|
|
scripts=['/a/b/foo', '/a/b/bar', '/a/b/baz']
|
|
|
|
)
|
|
|
|
assert retval is None
|
|
|
|
|
|
|
|
def test_single_script__single_dir_on_PATH(self):
|
|
|
|
retval = self._template(
|
|
|
|
paths=['/a/b', '/c/d/bin'],
|
|
|
|
scripts=['/a/b/foo']
|
|
|
|
)
|
|
|
|
assert retval is None
|
2018-04-07 13:55:41 +02:00
|
|
|
|
|
|
|
def test_PATH_check_case_insensitive_on_windows(self):
|
|
|
|
retval = self._template(
|
|
|
|
paths=['C:\\A\\b'],
|
|
|
|
scripts=['c:\\a\\b\\c', 'C:/A/b/d']
|
|
|
|
)
|
|
|
|
if WINDOWS:
|
|
|
|
assert retval is None
|
|
|
|
else:
|
|
|
|
assert retval is not None
|
2019-11-16 00:16:41 +01:00
|
|
|
assert self.tilde_warning_msg not in retval
|
2018-04-23 17:51:43 +02:00
|
|
|
|
|
|
|
def test_trailing_ossep_removal(self):
|
|
|
|
retval = self._template(
|
|
|
|
paths=[os.path.join('a', 'b', '')],
|
|
|
|
scripts=[os.path.join('a', 'b', 'c')]
|
|
|
|
)
|
|
|
|
assert retval is None
|
2018-07-02 14:14:28 +02:00
|
|
|
|
2020-07-23 11:32:56 +02:00
|
|
|
def test_missing_PATH_env_treated_as_empty_PATH_env(self, monkeypatch):
|
2018-07-02 14:14:28 +02:00
|
|
|
scripts = ['a/b/foo']
|
|
|
|
|
2020-07-23 11:32:56 +02:00
|
|
|
monkeypatch.delenv('PATH')
|
|
|
|
retval_missing = wheel.message_about_scripts_not_on_PATH(scripts)
|
2018-07-02 14:14:28 +02:00
|
|
|
|
2020-07-23 11:32:56 +02:00
|
|
|
monkeypatch.setenv('PATH', '')
|
|
|
|
retval_empty = wheel.message_about_scripts_not_on_PATH(scripts)
|
2018-07-02 14:14:28 +02:00
|
|
|
|
|
|
|
assert retval_missing == retval_empty
|
2019-06-26 11:44:43 +02:00
|
|
|
|
2019-11-16 00:16:41 +01:00
|
|
|
def test_no_script_tilde_in_path(self):
|
|
|
|
retval = self._template(
|
|
|
|
paths=['/a/b', '/c/d/bin', '~/e', '/f/g~g'],
|
|
|
|
scripts=[]
|
|
|
|
)
|
|
|
|
assert retval is None
|
|
|
|
|
|
|
|
def test_multi_script_all_tilde__multi_dir_not_on_PATH(self):
|
|
|
|
retval = self._template(
|
|
|
|
paths=['/a/b', '/c/d/bin', '~e/f'],
|
|
|
|
scripts=[
|
|
|
|
'/c/d/foo', '/c/d/bar', '/c/d/baz',
|
|
|
|
'/a/b/c/spam', '/a/b/c/eggs', '/e/f/tilde'
|
|
|
|
]
|
|
|
|
)
|
|
|
|
assert retval is not None
|
|
|
|
assert "--no-warn-script-location" in retval
|
|
|
|
assert "bar, baz and foo are installed in '/c/d'" in retval
|
|
|
|
assert "eggs and spam are installed in '/a/b/c'" in retval
|
|
|
|
assert "tilde is installed in '/e/f'" in retval
|
|
|
|
assert self.tilde_warning_msg in retval
|
|
|
|
|
|
|
|
def test_multi_script_all_tilde_not_at_start__multi_dir_not_on_PATH(self):
|
|
|
|
retval = self._template(
|
|
|
|
paths=['/e/f~f', '/c/d/bin'],
|
|
|
|
scripts=[
|
|
|
|
'/c/d/foo', '/c/d/bar', '/c/d/baz',
|
|
|
|
'/e/f~f/c/spam', '/e/f~f/c/eggs'
|
|
|
|
]
|
|
|
|
)
|
|
|
|
assert retval is not None
|
|
|
|
assert "--no-warn-script-location" in retval
|
|
|
|
assert "bar, baz and foo are installed in '/c/d'" in retval
|
|
|
|
assert "eggs and spam are installed in '/e/f~f/c'" in retval
|
|
|
|
assert self.tilde_warning_msg not in retval
|
|
|
|
|
2019-06-26 11:44:43 +02:00
|
|
|
|
|
|
|
class TestWheelHashCalculators(object):
|
|
|
|
|
|
|
|
def prep(self, tmpdir):
|
2019-07-02 07:00:32 +02:00
|
|
|
self.test_file = tmpdir.joinpath("hash.file")
|
2019-06-26 11:44:43 +02:00
|
|
|
# Want this big enough to trigger the internal read loops.
|
|
|
|
self.test_file_len = 2 * 1024 * 1024
|
|
|
|
with open(str(self.test_file), "w") as fp:
|
|
|
|
fp.truncate(self.test_file_len)
|
|
|
|
self.test_file_hash = \
|
|
|
|
'5647f05ec18958947d32874eeb788fa396a05d0bab7c1b71f112ceb7e9b31eee'
|
|
|
|
self.test_file_hash_encoded = \
|
|
|
|
'sha256=VkfwXsGJWJR9ModO63iPo5agXQurfBtx8RLOt-mzHu4'
|
|
|
|
|
|
|
|
def test_hash_file(self, tmpdir):
|
|
|
|
self.prep(tmpdir)
|
2019-11-03 14:24:11 +01:00
|
|
|
h, length = hash_file(self.test_file)
|
2019-06-26 11:44:43 +02:00
|
|
|
assert length == self.test_file_len
|
|
|
|
assert h.hexdigest() == self.test_file_hash
|
|
|
|
|
|
|
|
def test_rehash(self, tmpdir):
|
|
|
|
self.prep(tmpdir)
|
|
|
|
h, length = wheel.rehash(self.test_file)
|
|
|
|
assert length == str(self.test_file_len)
|
|
|
|
assert h == self.test_file_hash_encoded
|