2013-05-31 00:43:58 +02:00
|
|
|
import hashlib
|
2019-08-20 19:42:10 +02:00
|
|
|
import logging
|
2013-05-28 23:58:08 +02:00
|
|
|
import os
|
2019-08-21 11:19:02 +02:00
|
|
|
import shutil
|
2019-02-11 22:35:30 +01:00
|
|
|
import sys
|
2014-06-06 20:21:51 +02:00
|
|
|
from io import BytesIO
|
2017-05-16 12:16:30 +02:00
|
|
|
from shutil import copy, rmtree
|
2013-05-28 23:58:08 +02:00
|
|
|
from tempfile import mkdtemp
|
|
|
|
|
2017-06-13 14:17:00 +02:00
|
|
|
import pytest
|
|
|
|
from mock import Mock, patch
|
2019-09-15 22:47:00 +02:00
|
|
|
from pip._vendor.cachecontrol.caches import FileCache
|
2014-09-12 00:40:45 +02:00
|
|
|
|
2013-05-31 00:43:58 +02:00
|
|
|
import pip
|
2017-08-31 17:48:18 +02:00
|
|
|
from pip._internal.download import (
|
2019-07-22 06:45:27 +02:00
|
|
|
CI_ENVIRONMENT_VARIABLES,
|
|
|
|
PipSession,
|
|
|
|
SafeFileCache,
|
2019-08-21 11:19:02 +02:00
|
|
|
_copy_source_tree,
|
2019-07-22 06:45:27 +02:00
|
|
|
_download_http_url,
|
|
|
|
parse_content_disposition,
|
|
|
|
sanitize_content_filename,
|
|
|
|
unpack_file_url,
|
|
|
|
unpack_http_url,
|
2014-04-24 13:29:57 +02:00
|
|
|
)
|
2017-08-31 17:48:18 +02:00
|
|
|
from pip._internal.exceptions import HashMismatch
|
2018-08-16 10:41:02 +02:00
|
|
|
from pip._internal.models.link import Link
|
2017-08-31 17:48:18 +02:00
|
|
|
from pip._internal.utils.hashes import Hashes
|
2019-06-03 11:31:40 +02:00
|
|
|
from pip._internal.utils.misc import path_to_url
|
2017-06-05 13:48:23 +02:00
|
|
|
from tests.lib import create_file
|
2019-08-21 11:19:02 +02:00
|
|
|
from tests.lib.filesystem import (
|
|
|
|
get_filelist,
|
|
|
|
make_socket_file,
|
|
|
|
make_unreadable_file,
|
|
|
|
)
|
|
|
|
from tests.lib.path import Path
|
2013-05-28 23:58:08 +02:00
|
|
|
|
|
|
|
|
2019-05-04 23:11:44 +02:00
|
|
|
@pytest.fixture(scope="function")
|
|
|
|
def cache_tmpdir(tmpdir):
|
2019-07-02 07:00:32 +02:00
|
|
|
cache_dir = tmpdir.joinpath("cache")
|
2019-07-12 10:00:18 +02:00
|
|
|
cache_dir.mkdir(parents=True)
|
2019-05-04 23:11:44 +02:00
|
|
|
yield cache_dir
|
|
|
|
|
|
|
|
|
2013-08-23 13:12:37 +02:00
|
|
|
def test_unpack_http_url_with_urllib_response_without_content_type(data):
|
2013-05-28 23:58:08 +02:00
|
|
|
"""
|
|
|
|
It should download and unpack files even if no Content-Type header exists
|
|
|
|
"""
|
2013-08-16 14:04:27 +02:00
|
|
|
_real_session = PipSession()
|
|
|
|
|
|
|
|
def _fake_session_get(*args, **kwargs):
|
|
|
|
resp = _real_session.get(*args, **kwargs)
|
|
|
|
del resp.headers["Content-Type"]
|
2013-05-28 23:58:08 +02:00
|
|
|
return resp
|
|
|
|
|
2013-08-16 14:04:27 +02:00
|
|
|
session = Mock()
|
|
|
|
session.get = _fake_session_get
|
|
|
|
|
2019-07-02 07:00:32 +02:00
|
|
|
uri = path_to_url(data.packages.joinpath("simple-1.0.tar.gz"))
|
2013-08-16 14:04:27 +02:00
|
|
|
link = Link(uri)
|
|
|
|
temp_dir = mkdtemp()
|
|
|
|
try:
|
2014-01-28 15:17:51 +01:00
|
|
|
unpack_http_url(
|
|
|
|
link,
|
|
|
|
temp_dir,
|
2013-08-16 14:04:27 +02:00
|
|
|
download_dir=None,
|
|
|
|
session=session,
|
|
|
|
)
|
2017-12-15 06:56:04 +01:00
|
|
|
assert set(os.listdir(temp_dir)) == {
|
2014-01-28 15:17:51 +01:00
|
|
|
'PKG-INFO', 'setup.cfg', 'setup.py', 'simple', 'simple.egg-info'
|
2017-12-15 06:56:04 +01:00
|
|
|
}
|
2013-08-16 14:04:27 +02:00
|
|
|
finally:
|
|
|
|
rmtree(temp_dir)
|
2013-05-28 23:58:08 +02:00
|
|
|
|
|
|
|
|
2019-02-18 08:03:51 +01:00
|
|
|
def get_user_agent():
|
|
|
|
return PipSession().headers["User-Agent"]
|
|
|
|
|
|
|
|
|
2013-05-28 23:58:08 +02:00
|
|
|
def test_user_agent():
|
2019-02-18 08:03:51 +01:00
|
|
|
user_agent = get_user_agent()
|
2019-02-17 00:37:48 +01:00
|
|
|
|
|
|
|
assert user_agent.startswith("pip/%s" % pip.__version__)
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.parametrize('name, expected_like_ci', [
|
|
|
|
('BUILD_BUILDID', True),
|
|
|
|
('BUILD_ID', True),
|
|
|
|
('CI', True),
|
2019-05-22 22:40:01 +02:00
|
|
|
('PIP_IS_CI', True),
|
2019-02-18 08:03:51 +01:00
|
|
|
# Test a prefix substring of one of the variable names we use.
|
|
|
|
('BUILD', False),
|
2019-02-17 00:37:48 +01:00
|
|
|
])
|
|
|
|
def test_user_agent__ci(monkeypatch, name, expected_like_ci):
|
2019-02-18 08:03:51 +01:00
|
|
|
# Delete the variable names we use to check for CI to prevent the
|
|
|
|
# detection from always returning True in case the tests are being run
|
|
|
|
# under actual CI. It is okay to depend on CI_ENVIRONMENT_VARIABLES
|
|
|
|
# here (part of the code under test) because this setup step can only
|
|
|
|
# prevent false test failures. It can't cause a false test passage.
|
|
|
|
for ci_name in CI_ENVIRONMENT_VARIABLES:
|
2019-02-17 00:37:48 +01:00
|
|
|
monkeypatch.delenv(ci_name, raising=False)
|
|
|
|
|
2019-02-18 08:03:51 +01:00
|
|
|
# Confirm the baseline before setting the environment variable.
|
|
|
|
user_agent = get_user_agent()
|
|
|
|
assert '"ci":null' in user_agent
|
|
|
|
assert '"ci":true' not in user_agent
|
2019-02-17 00:37:48 +01:00
|
|
|
|
2019-02-18 08:03:51 +01:00
|
|
|
monkeypatch.setenv(name, 'true')
|
|
|
|
user_agent = get_user_agent()
|
2019-02-17 00:37:48 +01:00
|
|
|
assert ('"ci":true' in user_agent) == expected_like_ci
|
|
|
|
assert ('"ci":null' in user_agent) == (not expected_like_ci)
|
2013-05-30 23:03:04 +02:00
|
|
|
|
|
|
|
|
2019-03-31 11:37:02 +02:00
|
|
|
def test_user_agent_user_data(monkeypatch):
|
|
|
|
monkeypatch.setenv("PIP_USER_AGENT_USER_DATA", "some_string")
|
|
|
|
assert "some_string" in PipSession().headers["User-Agent"]
|
|
|
|
|
|
|
|
|
2014-01-07 15:58:11 +01:00
|
|
|
class FakeStream(object):
|
2013-08-16 14:04:27 +02:00
|
|
|
|
2013-05-31 00:43:58 +02:00
|
|
|
def __init__(self, contents):
|
|
|
|
self._io = BytesIO(contents)
|
|
|
|
|
2014-01-07 15:58:11 +01:00
|
|
|
def read(self, size, decode_content=None):
|
|
|
|
return self._io.read(size)
|
|
|
|
|
|
|
|
def stream(self, size, decode_content=None):
|
2013-08-16 14:04:27 +02:00
|
|
|
yield self._io.read(size)
|
|
|
|
|
2019-02-12 17:52:49 +01:00
|
|
|
def release_conn(self):
|
|
|
|
pass
|
|
|
|
|
2014-01-07 15:58:11 +01:00
|
|
|
|
|
|
|
class MockResponse(object):
|
|
|
|
|
|
|
|
def __init__(self, contents):
|
|
|
|
self.raw = FakeStream(contents)
|
2019-02-12 17:52:49 +01:00
|
|
|
self.content = contents
|
|
|
|
self.request = None
|
|
|
|
self.status_code = 200
|
|
|
|
self.connection = None
|
|
|
|
self.url = None
|
|
|
|
self.headers = {}
|
|
|
|
self.history = []
|
2014-01-07 15:58:11 +01:00
|
|
|
|
2013-08-16 14:04:27 +02:00
|
|
|
def raise_for_status(self):
|
|
|
|
pass
|
2013-05-31 00:43:58 +02:00
|
|
|
|
|
|
|
|
2019-02-12 17:52:49 +01:00
|
|
|
class MockConnection(object):
|
|
|
|
|
|
|
|
def _send(self, req, **kwargs):
|
|
|
|
raise NotImplementedError("_send must be overridden for tests")
|
|
|
|
|
|
|
|
def send(self, req, **kwargs):
|
|
|
|
resp = self._send(req, **kwargs)
|
|
|
|
for cb in req.hooks.get("response", []):
|
|
|
|
cb(resp)
|
|
|
|
return resp
|
|
|
|
|
|
|
|
|
|
|
|
class MockRequest(object):
|
|
|
|
|
|
|
|
def __init__(self, url):
|
|
|
|
self.url = url
|
|
|
|
self.headers = {}
|
|
|
|
self.hooks = {}
|
|
|
|
|
|
|
|
def register_hook(self, event_name, callback):
|
|
|
|
self.hooks.setdefault(event_name, []).append(callback)
|
|
|
|
|
|
|
|
|
2017-08-31 17:48:18 +02:00
|
|
|
@patch('pip._internal.download.unpack_file')
|
2013-08-16 14:04:27 +02:00
|
|
|
def test_unpack_http_url_bad_downloaded_checksum(mock_unpack_file):
|
2013-05-30 23:03:04 +02:00
|
|
|
"""
|
|
|
|
If already-downloaded file has bad checksum, re-download.
|
|
|
|
"""
|
2013-05-31 00:43:58 +02:00
|
|
|
base_url = 'http://www.example.com/somepackage.tgz'
|
2014-06-27 04:44:42 +02:00
|
|
|
contents = b'downloaded'
|
2013-05-31 00:43:58 +02:00
|
|
|
download_hash = hashlib.new('sha1', contents)
|
|
|
|
link = Link(base_url + '#sha1=' + download_hash.hexdigest())
|
2013-08-16 14:04:27 +02:00
|
|
|
|
|
|
|
session = Mock()
|
|
|
|
session.get = Mock()
|
|
|
|
response = session.get.return_value = MockResponse(contents)
|
|
|
|
response.headers = {'content-type': 'application/x-tar'}
|
|
|
|
response.url = base_url
|
2013-05-31 00:43:58 +02:00
|
|
|
|
|
|
|
download_dir = mkdtemp()
|
|
|
|
try:
|
|
|
|
downloaded_file = os.path.join(download_dir, 'somepackage.tgz')
|
2017-06-05 13:48:23 +02:00
|
|
|
create_file(downloaded_file, 'some contents')
|
2013-05-31 00:43:58 +02:00
|
|
|
|
2014-01-28 15:17:51 +01:00
|
|
|
unpack_http_url(
|
|
|
|
link,
|
|
|
|
'location',
|
2013-08-16 14:04:27 +02:00
|
|
|
download_dir=download_dir,
|
|
|
|
session=session,
|
Add checks against requirements-file-dwelling hashes for most kinds of packages. Close #1175.
* Add --require-hashes option. This is handy in deployment scripts to force application authors to hash their requirements. It is also a convenient way to get pip to show computed hashes for a virgin, unhashed requirements file. Eventually, additions to `pip freeze` should fill a superset of this use case.
* In --require-hashes mode, at least one hash is required to match for each requirement.
* Option-based requirements (--sha256=...) turn on --require-hashes mode implicitly.
* Internet-derived URL-based hashes are "necessary but not sufficient": they do not satisfy --require-hashes mode when they match, but they are still used to guard against transmission errors.
* Other URL-based requirements (#md5=...) are treated just like flag-based ones, except they don't turn on --require-hashes.
* Complain informatively, with the most devastating errors first so you don't chase your tail all day only to run up against a brick wall at the end. This also means we don't complain that a hash is missing, only for the user to find, after fixing it, that we have no idea how to even compute a hash for that type of requirement.
* Complain about unpinned requirements when hash-checking mode is on, lest they cause the user surprise later.
* Complain about missing hashes.
* Complain about requirement types we don't know how to hash (like VCS ones and local dirs).
* Have InstallRequirement keep its original Link around (original_link) so we can differentiate between URL hashes from requirements files and ones downloaded from the (untrustworthy) internet.
* Remove test_download_hashes, which is obsolete. Similar coverage is provided in test_utils.TestHashes and the various hash cases in test_req.py.
2015-09-09 19:01:53 +02:00
|
|
|
hashes=Hashes({'sha1': [download_hash.hexdigest()]})
|
2013-08-16 14:04:27 +02:00
|
|
|
)
|
2013-05-31 00:43:58 +02:00
|
|
|
|
|
|
|
# despite existence of downloaded file with bad hash, downloaded again
|
2013-08-16 14:04:27 +02:00
|
|
|
session.get.assert_called_once_with(
|
|
|
|
'http://www.example.com/somepackage.tgz',
|
2014-03-26 15:58:46 +01:00
|
|
|
headers={"Accept-Encoding": "identity"},
|
2013-08-16 14:04:27 +02:00
|
|
|
stream=True,
|
|
|
|
)
|
2013-05-31 00:43:58 +02:00
|
|
|
# cached file is replaced with newly downloaded file
|
|
|
|
with open(downloaded_file) as fh:
|
|
|
|
assert fh.read() == 'downloaded'
|
|
|
|
|
|
|
|
finally:
|
|
|
|
rmtree(download_dir)
|
2013-11-19 07:12:41 +01:00
|
|
|
|
|
|
|
|
2019-04-17 15:25:45 +02:00
|
|
|
@pytest.mark.parametrize("filename, expected", [
|
|
|
|
('dir/file', 'file'),
|
|
|
|
('../file', 'file'),
|
|
|
|
('../../file', 'file'),
|
|
|
|
('../', ''),
|
|
|
|
('../..', '..'),
|
|
|
|
('/', ''),
|
|
|
|
])
|
|
|
|
def test_sanitize_content_filename(filename, expected):
|
|
|
|
"""
|
|
|
|
Test inputs where the result is the same for Windows and non-Windows.
|
|
|
|
"""
|
|
|
|
assert sanitize_content_filename(filename) == expected
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.parametrize("filename, win_expected, non_win_expected", [
|
|
|
|
('dir\\file', 'file', 'dir\\file'),
|
|
|
|
('..\\file', 'file', '..\\file'),
|
|
|
|
('..\\..\\file', 'file', '..\\..\\file'),
|
|
|
|
('..\\', '', '..\\'),
|
|
|
|
('..\\..', '..', '..\\..'),
|
|
|
|
('\\', '', '\\'),
|
|
|
|
])
|
|
|
|
def test_sanitize_content_filename__platform_dependent(
|
|
|
|
filename,
|
|
|
|
win_expected,
|
|
|
|
non_win_expected
|
|
|
|
):
|
|
|
|
"""
|
|
|
|
Test inputs where the result is different for Windows and non-Windows.
|
|
|
|
"""
|
|
|
|
if sys.platform == 'win32':
|
|
|
|
expected = win_expected
|
|
|
|
else:
|
|
|
|
expected = non_win_expected
|
|
|
|
assert sanitize_content_filename(filename) == expected
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.parametrize("content_disposition, default_filename, expected", [
|
|
|
|
('attachment;filename="../file"', 'df', 'file'),
|
|
|
|
])
|
|
|
|
def test_parse_content_disposition(
|
|
|
|
content_disposition,
|
|
|
|
default_filename,
|
|
|
|
expected
|
|
|
|
):
|
|
|
|
actual = parse_content_disposition(content_disposition, default_filename)
|
|
|
|
assert actual == expected
|
|
|
|
|
|
|
|
|
|
|
|
def test_download_http_url__no_directory_traversal(tmpdir):
|
|
|
|
"""
|
|
|
|
Test that directory traversal doesn't happen on download when the
|
|
|
|
Content-Disposition header contains a filename with a ".." path part.
|
|
|
|
"""
|
|
|
|
mock_url = 'http://www.example.com/whatever.tgz'
|
|
|
|
contents = b'downloaded'
|
|
|
|
link = Link(mock_url)
|
|
|
|
|
|
|
|
session = Mock()
|
|
|
|
resp = MockResponse(contents)
|
|
|
|
resp.url = mock_url
|
|
|
|
resp.headers = {
|
|
|
|
# Set the content-type to a random value to prevent
|
|
|
|
# mimetypes.guess_extension from guessing the extension.
|
|
|
|
'content-type': 'random',
|
|
|
|
'content-disposition': 'attachment;filename="../out_dir_file"'
|
|
|
|
}
|
|
|
|
session.get.return_value = resp
|
|
|
|
|
2019-07-02 07:00:32 +02:00
|
|
|
download_dir = tmpdir.joinpath('download')
|
2019-04-17 15:25:45 +02:00
|
|
|
os.mkdir(download_dir)
|
|
|
|
file_path, content_type = _download_http_url(
|
|
|
|
link,
|
|
|
|
session,
|
|
|
|
download_dir,
|
|
|
|
hashes=None,
|
|
|
|
progress_bar='on',
|
|
|
|
)
|
|
|
|
# The file should be downloaded to download_dir.
|
|
|
|
actual = os.listdir(download_dir)
|
|
|
|
assert actual == ['out_dir_file']
|
|
|
|
|
|
|
|
|
2019-08-21 11:19:02 +02:00
|
|
|
@pytest.fixture
|
|
|
|
def clean_project(tmpdir_factory, data):
|
|
|
|
tmpdir = Path(str(tmpdir_factory.mktemp("clean_project")))
|
|
|
|
new_project_dir = tmpdir.joinpath("FSPkg")
|
|
|
|
path = data.packages.joinpath("FSPkg")
|
|
|
|
shutil.copytree(path, new_project_dir)
|
|
|
|
return new_project_dir
|
|
|
|
|
|
|
|
|
|
|
|
def test_copy_source_tree(clean_project, tmpdir):
|
|
|
|
target = tmpdir.joinpath("target")
|
|
|
|
expected_files = get_filelist(clean_project)
|
|
|
|
assert len(expected_files) == 3
|
|
|
|
|
|
|
|
_copy_source_tree(clean_project, target)
|
|
|
|
|
|
|
|
copied_files = get_filelist(target)
|
|
|
|
assert expected_files == copied_files
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.skipif("sys.platform == 'win32' or sys.version_info < (3,)")
|
|
|
|
def test_copy_source_tree_with_socket(clean_project, tmpdir, caplog):
|
|
|
|
target = tmpdir.joinpath("target")
|
|
|
|
expected_files = get_filelist(clean_project)
|
|
|
|
socket_path = str(clean_project.joinpath("aaa"))
|
|
|
|
make_socket_file(socket_path)
|
|
|
|
|
|
|
|
_copy_source_tree(clean_project, target)
|
|
|
|
|
|
|
|
copied_files = get_filelist(target)
|
|
|
|
assert expected_files == copied_files
|
|
|
|
|
|
|
|
# Warning should have been logged.
|
|
|
|
assert len(caplog.records) == 1
|
|
|
|
record = caplog.records[0]
|
|
|
|
assert record.levelname == 'WARNING'
|
|
|
|
assert socket_path in record.message
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.skipif("sys.platform == 'win32' or sys.version_info < (3,)")
|
|
|
|
def test_copy_source_tree_with_socket_fails_with_no_socket_error(
|
|
|
|
clean_project, tmpdir
|
|
|
|
):
|
|
|
|
target = tmpdir.joinpath("target")
|
|
|
|
expected_files = get_filelist(clean_project)
|
|
|
|
make_socket_file(clean_project.joinpath("aaa"))
|
|
|
|
unreadable_file = clean_project.joinpath("bbb")
|
|
|
|
make_unreadable_file(unreadable_file)
|
|
|
|
|
|
|
|
with pytest.raises(shutil.Error) as e:
|
|
|
|
_copy_source_tree(clean_project, target)
|
|
|
|
|
|
|
|
errored_files = [err[0] for err in e.value.args[0]]
|
|
|
|
assert len(errored_files) == 1
|
|
|
|
assert unreadable_file in errored_files
|
|
|
|
|
|
|
|
copied_files = get_filelist(target)
|
|
|
|
# All files without errors should have been copied.
|
|
|
|
assert expected_files == copied_files
|
|
|
|
|
|
|
|
|
|
|
|
def test_copy_source_tree_with_unreadable_dir_fails(clean_project, tmpdir):
|
|
|
|
target = tmpdir.joinpath("target")
|
|
|
|
expected_files = get_filelist(clean_project)
|
|
|
|
unreadable_file = clean_project.joinpath("bbb")
|
|
|
|
make_unreadable_file(unreadable_file)
|
|
|
|
|
|
|
|
with pytest.raises(shutil.Error) as e:
|
|
|
|
_copy_source_tree(clean_project, target)
|
|
|
|
|
|
|
|
errored_files = [err[0] for err in e.value.args[0]]
|
|
|
|
assert len(errored_files) == 1
|
|
|
|
assert unreadable_file in errored_files
|
|
|
|
|
|
|
|
copied_files = get_filelist(target)
|
|
|
|
# All files without errors should have been copied.
|
|
|
|
assert expected_files == copied_files
|
|
|
|
|
|
|
|
|
2014-02-01 20:41:55 +01:00
|
|
|
class Test_unpack_file_url(object):
|
|
|
|
|
|
|
|
def prep(self, tmpdir, data):
|
2019-07-02 07:00:32 +02:00
|
|
|
self.build_dir = tmpdir.joinpath('build')
|
|
|
|
self.download_dir = tmpdir.joinpath('download')
|
2014-02-01 20:41:55 +01:00
|
|
|
os.mkdir(self.build_dir)
|
|
|
|
os.mkdir(self.download_dir)
|
|
|
|
self.dist_file = "simple-1.0.tar.gz"
|
|
|
|
self.dist_file2 = "simple-2.0.tar.gz"
|
2019-07-02 07:00:32 +02:00
|
|
|
self.dist_path = data.packages.joinpath(self.dist_file)
|
|
|
|
self.dist_path2 = data.packages.joinpath(self.dist_file2)
|
2014-02-01 20:41:55 +01:00
|
|
|
self.dist_url = Link(path_to_url(self.dist_path))
|
|
|
|
self.dist_url2 = Link(path_to_url(self.dist_path2))
|
|
|
|
|
|
|
|
def test_unpack_file_url_no_download(self, tmpdir, data):
|
|
|
|
self.prep(tmpdir, data)
|
|
|
|
unpack_file_url(self.dist_url, self.build_dir)
|
|
|
|
assert os.path.isdir(os.path.join(self.build_dir, 'simple'))
|
|
|
|
assert not os.path.isfile(
|
|
|
|
os.path.join(self.download_dir, self.dist_file))
|
|
|
|
|
|
|
|
def test_unpack_file_url_and_download(self, tmpdir, data):
|
|
|
|
self.prep(tmpdir, data)
|
|
|
|
unpack_file_url(self.dist_url, self.build_dir,
|
|
|
|
download_dir=self.download_dir)
|
|
|
|
assert os.path.isdir(os.path.join(self.build_dir, 'simple'))
|
|
|
|
assert os.path.isfile(os.path.join(self.download_dir, self.dist_file))
|
|
|
|
|
|
|
|
def test_unpack_file_url_download_already_exists(self, tmpdir,
|
|
|
|
data, monkeypatch):
|
|
|
|
self.prep(tmpdir, data)
|
|
|
|
# add in previous download (copy simple-2.0 as simple-1.0)
|
|
|
|
# so we can tell it didn't get overwritten
|
|
|
|
dest_file = os.path.join(self.download_dir, self.dist_file)
|
|
|
|
copy(self.dist_path2, dest_file)
|
2017-05-24 06:29:07 +02:00
|
|
|
with open(self.dist_path2, 'rb') as f:
|
|
|
|
dist_path2_md5 = hashlib.md5(f.read()).hexdigest()
|
2014-02-01 20:41:55 +01:00
|
|
|
|
|
|
|
unpack_file_url(self.dist_url, self.build_dir,
|
|
|
|
download_dir=self.download_dir)
|
|
|
|
# our hash should be the same, i.e. not overwritten by simple-1.0 hash
|
2017-05-24 06:29:07 +02:00
|
|
|
with open(dest_file, 'rb') as f:
|
|
|
|
assert dist_path2_md5 == hashlib.md5(f.read()).hexdigest()
|
2014-02-01 20:41:55 +01:00
|
|
|
|
2014-02-01 23:04:58 +01:00
|
|
|
def test_unpack_file_url_bad_hash(self, tmpdir, data,
|
|
|
|
monkeypatch):
|
|
|
|
"""
|
|
|
|
Test when the file url hash fragment is wrong
|
|
|
|
"""
|
|
|
|
self.prep(tmpdir, data)
|
2019-06-23 01:02:42 +02:00
|
|
|
url = '{}#md5=bogus'.format(self.dist_url.url)
|
|
|
|
dist_url = Link(url)
|
2014-02-01 23:04:58 +01:00
|
|
|
with pytest.raises(HashMismatch):
|
2019-06-23 01:02:42 +02:00
|
|
|
unpack_file_url(dist_url,
|
Add checks against requirements-file-dwelling hashes for most kinds of packages. Close #1175.
* Add --require-hashes option. This is handy in deployment scripts to force application authors to hash their requirements. It is also a convenient way to get pip to show computed hashes for a virgin, unhashed requirements file. Eventually, additions to `pip freeze` should fill a superset of this use case.
* In --require-hashes mode, at least one hash is required to match for each requirement.
* Option-based requirements (--sha256=...) turn on --require-hashes mode implicitly.
* Internet-derived URL-based hashes are "necessary but not sufficient": they do not satisfy --require-hashes mode when they match, but they are still used to guard against transmission errors.
* Other URL-based requirements (#md5=...) are treated just like flag-based ones, except they don't turn on --require-hashes.
* Complain informatively, with the most devastating errors first so you don't chase your tail all day only to run up against a brick wall at the end. This also means we don't complain that a hash is missing, only for the user to find, after fixing it, that we have no idea how to even compute a hash for that type of requirement.
* Complain about unpinned requirements when hash-checking mode is on, lest they cause the user surprise later.
* Complain about missing hashes.
* Complain about requirement types we don't know how to hash (like VCS ones and local dirs).
* Have InstallRequirement keep its original Link around (original_link) so we can differentiate between URL hashes from requirements files and ones downloaded from the (untrustworthy) internet.
* Remove test_download_hashes, which is obsolete. Similar coverage is provided in test_utils.TestHashes and the various hash cases in test_req.py.
2015-09-09 19:01:53 +02:00
|
|
|
self.build_dir,
|
|
|
|
hashes=Hashes({'md5': ['bogus']}))
|
2014-02-01 23:04:58 +01:00
|
|
|
|
2014-02-01 20:41:55 +01:00
|
|
|
def test_unpack_file_url_download_bad_hash(self, tmpdir, data,
|
|
|
|
monkeypatch):
|
|
|
|
"""
|
|
|
|
Test when existing download has different hash from the file url
|
|
|
|
fragment
|
|
|
|
"""
|
|
|
|
self.prep(tmpdir, data)
|
|
|
|
|
|
|
|
# add in previous download (copy simple-2.0 as simple-1.0 so it's wrong
|
|
|
|
# hash)
|
|
|
|
dest_file = os.path.join(self.download_dir, self.dist_file)
|
|
|
|
copy(self.dist_path2, dest_file)
|
|
|
|
|
2017-05-24 06:29:07 +02:00
|
|
|
with open(self.dist_path, 'rb') as f:
|
|
|
|
dist_path_md5 = hashlib.md5(f.read()).hexdigest()
|
|
|
|
with open(dest_file, 'rb') as f:
|
|
|
|
dist_path2_md5 = hashlib.md5(f.read()).hexdigest()
|
2014-02-01 20:41:55 +01:00
|
|
|
|
|
|
|
assert dist_path_md5 != dist_path2_md5
|
|
|
|
|
2019-06-23 01:02:42 +02:00
|
|
|
url = '{}#md5={}'.format(self.dist_url.url, dist_path_md5)
|
|
|
|
dist_url = Link(url)
|
|
|
|
unpack_file_url(dist_url, self.build_dir,
|
Add checks against requirements-file-dwelling hashes for most kinds of packages. Close #1175.
* Add --require-hashes option. This is handy in deployment scripts to force application authors to hash their requirements. It is also a convenient way to get pip to show computed hashes for a virgin, unhashed requirements file. Eventually, additions to `pip freeze` should fill a superset of this use case.
* In --require-hashes mode, at least one hash is required to match for each requirement.
* Option-based requirements (--sha256=...) turn on --require-hashes mode implicitly.
* Internet-derived URL-based hashes are "necessary but not sufficient": they do not satisfy --require-hashes mode when they match, but they are still used to guard against transmission errors.
* Other URL-based requirements (#md5=...) are treated just like flag-based ones, except they don't turn on --require-hashes.
* Complain informatively, with the most devastating errors first so you don't chase your tail all day only to run up against a brick wall at the end. This also means we don't complain that a hash is missing, only for the user to find, after fixing it, that we have no idea how to even compute a hash for that type of requirement.
* Complain about unpinned requirements when hash-checking mode is on, lest they cause the user surprise later.
* Complain about missing hashes.
* Complain about requirement types we don't know how to hash (like VCS ones and local dirs).
* Have InstallRequirement keep its original Link around (original_link) so we can differentiate between URL hashes from requirements files and ones downloaded from the (untrustworthy) internet.
* Remove test_download_hashes, which is obsolete. Similar coverage is provided in test_utils.TestHashes and the various hash cases in test_req.py.
2015-09-09 19:01:53 +02:00
|
|
|
download_dir=self.download_dir,
|
|
|
|
hashes=Hashes({'md5': [dist_path_md5]}))
|
2014-02-01 20:41:55 +01:00
|
|
|
|
|
|
|
# confirm hash is for simple1-1.0
|
|
|
|
# the previous bad download has been removed
|
2017-05-24 06:29:07 +02:00
|
|
|
with open(dest_file, 'rb') as f:
|
|
|
|
assert hashlib.md5(f.read()).hexdigest() == dist_path_md5
|
2014-02-01 20:41:55 +01:00
|
|
|
|
|
|
|
def test_unpack_file_url_thats_a_dir(self, tmpdir, data):
|
|
|
|
self.prep(tmpdir, data)
|
2019-07-02 07:00:32 +02:00
|
|
|
dist_path = data.packages.joinpath("FSPkg")
|
2014-02-01 20:41:55 +01:00
|
|
|
dist_url = Link(path_to_url(dist_path))
|
|
|
|
unpack_file_url(dist_url, self.build_dir,
|
|
|
|
download_dir=self.download_dir)
|
|
|
|
assert os.path.isdir(os.path.join(self.build_dir, 'fspkg'))
|
2014-04-24 13:29:57 +02:00
|
|
|
|
|
|
|
|
2019-08-03 03:17:53 +02:00
|
|
|
@pytest.mark.parametrize('exclude_dir', [
|
|
|
|
'.nox',
|
|
|
|
'.tox'
|
|
|
|
])
|
2019-08-03 08:14:41 +02:00
|
|
|
def test_unpack_file_url_excludes_expected_dirs(tmpdir, exclude_dir):
|
|
|
|
src_dir = tmpdir / 'src'
|
|
|
|
dst_dir = tmpdir / 'dst'
|
2019-08-03 20:14:36 +02:00
|
|
|
src_included_file = src_dir.joinpath('file.txt')
|
|
|
|
src_excluded_dir = src_dir.joinpath(exclude_dir)
|
|
|
|
src_excluded_file = src_dir.joinpath(exclude_dir, 'file.txt')
|
|
|
|
src_included_dir = src_dir.joinpath('subdir', exclude_dir)
|
2019-08-03 03:17:53 +02:00
|
|
|
|
|
|
|
# set up source directory
|
2019-08-03 08:14:41 +02:00
|
|
|
src_excluded_dir.mkdir(parents=True)
|
|
|
|
src_included_dir.mkdir(parents=True)
|
2019-08-03 20:14:36 +02:00
|
|
|
src_included_file.touch()
|
|
|
|
src_excluded_file.touch()
|
2019-08-03 08:14:41 +02:00
|
|
|
|
2019-08-03 20:14:36 +02:00
|
|
|
dst_included_file = dst_dir.joinpath('file.txt')
|
|
|
|
dst_excluded_dir = dst_dir.joinpath(exclude_dir)
|
|
|
|
dst_excluded_file = dst_dir.joinpath(exclude_dir, 'file.txt')
|
|
|
|
dst_included_dir = dst_dir.joinpath('subdir', exclude_dir)
|
2019-08-03 08:14:41 +02:00
|
|
|
|
|
|
|
src_link = Link(path_to_url(src_dir))
|
|
|
|
unpack_file_url(
|
|
|
|
src_link,
|
|
|
|
dst_dir,
|
|
|
|
download_dir=None
|
|
|
|
)
|
|
|
|
assert not os.path.isdir(dst_excluded_dir)
|
|
|
|
assert not os.path.isfile(dst_excluded_file)
|
|
|
|
assert os.path.isfile(dst_included_file)
|
|
|
|
assert os.path.isdir(dst_included_dir)
|
2019-08-03 03:17:53 +02:00
|
|
|
|
|
|
|
|
2014-04-24 13:29:57 +02:00
|
|
|
class TestSafeFileCache:
|
2016-01-04 22:54:26 +01:00
|
|
|
"""
|
|
|
|
The no_perms test are useless on Windows since SafeFileCache uses
|
2017-08-31 17:48:18 +02:00
|
|
|
pip._internal.utils.filesystem.check_path_owner which is based on
|
|
|
|
os.geteuid which is absent on Windows.
|
2016-01-04 22:54:26 +01:00
|
|
|
"""
|
2014-04-24 13:29:57 +02:00
|
|
|
|
2019-05-04 23:11:44 +02:00
|
|
|
def test_cache_roundtrip(self, cache_tmpdir):
|
2014-04-24 13:29:57 +02:00
|
|
|
|
2019-05-04 23:11:44 +02:00
|
|
|
cache = SafeFileCache(cache_tmpdir)
|
2014-04-24 13:29:57 +02:00
|
|
|
assert cache.get("test key") is None
|
|
|
|
cache.set("test key", b"a test string")
|
|
|
|
assert cache.get("test key") == b"a test string"
|
|
|
|
cache.delete("test key")
|
|
|
|
assert cache.get("test key") is None
|
|
|
|
|
2016-01-04 22:54:26 +01:00
|
|
|
@pytest.mark.skipif("sys.platform == 'win32'")
|
2019-05-04 23:11:44 +02:00
|
|
|
def test_safe_get_no_perms(self, cache_tmpdir, monkeypatch):
|
|
|
|
os.chmod(cache_tmpdir, 000)
|
2014-04-24 13:29:57 +02:00
|
|
|
|
|
|
|
monkeypatch.setattr(os.path, "exists", lambda x: True)
|
|
|
|
|
2019-05-04 23:11:44 +02:00
|
|
|
cache = SafeFileCache(cache_tmpdir)
|
2014-04-24 13:29:57 +02:00
|
|
|
cache.get("foo")
|
|
|
|
|
2016-01-04 22:54:26 +01:00
|
|
|
@pytest.mark.skipif("sys.platform == 'win32'")
|
2019-05-04 23:11:44 +02:00
|
|
|
def test_safe_set_no_perms(self, cache_tmpdir):
|
|
|
|
os.chmod(cache_tmpdir, 000)
|
2014-04-24 13:29:57 +02:00
|
|
|
|
2019-05-04 23:11:44 +02:00
|
|
|
cache = SafeFileCache(cache_tmpdir)
|
2016-01-04 22:54:26 +01:00
|
|
|
cache.set("foo", b"bar")
|
2014-04-24 13:29:57 +02:00
|
|
|
|
2016-01-04 22:54:26 +01:00
|
|
|
@pytest.mark.skipif("sys.platform == 'win32'")
|
2019-05-04 23:11:44 +02:00
|
|
|
def test_safe_delete_no_perms(self, cache_tmpdir):
|
|
|
|
os.chmod(cache_tmpdir, 000)
|
2014-04-24 13:29:57 +02:00
|
|
|
|
2019-05-04 23:11:44 +02:00
|
|
|
cache = SafeFileCache(cache_tmpdir)
|
2014-04-24 13:29:57 +02:00
|
|
|
cache.delete("foo")
|
|
|
|
|
2019-09-15 22:47:00 +02:00
|
|
|
def test_cache_hashes_are_same(self, cache_tmpdir):
|
|
|
|
cache = SafeFileCache(cache_tmpdir)
|
|
|
|
key = "test key"
|
2019-09-15 23:01:32 +02:00
|
|
|
fake_cache = Mock(
|
|
|
|
FileCache, directory=cache.directory, encode=FileCache.encode
|
|
|
|
)
|
|
|
|
assert cache._get_cache_path(key) == FileCache._fn(fake_cache, key)
|
2019-09-15 22:47:00 +02:00
|
|
|
|
2014-04-24 13:29:57 +02:00
|
|
|
|
|
|
|
class TestPipSession:
|
|
|
|
|
|
|
|
def test_cache_defaults_off(self):
|
|
|
|
session = PipSession()
|
|
|
|
|
|
|
|
assert not hasattr(session.adapters["http://"], "cache")
|
|
|
|
assert not hasattr(session.adapters["https://"], "cache")
|
|
|
|
|
|
|
|
def test_cache_is_enabled(self, tmpdir):
|
2019-07-02 07:00:32 +02:00
|
|
|
session = PipSession(cache=tmpdir.joinpath("test-cache"))
|
2014-04-24 13:29:57 +02:00
|
|
|
|
|
|
|
assert hasattr(session.adapters["https://"], "cache")
|
|
|
|
|
2015-02-24 13:46:10 +01:00
|
|
|
assert (session.adapters["https://"].cache.directory ==
|
2019-07-02 07:00:32 +02:00
|
|
|
tmpdir.joinpath("test-cache"))
|
2014-12-21 00:03:33 +01:00
|
|
|
|
|
|
|
def test_http_cache_is_not_enabled(self, tmpdir):
|
2019-07-02 07:00:32 +02:00
|
|
|
session = PipSession(cache=tmpdir.joinpath("test-cache"))
|
2014-12-21 00:03:33 +01:00
|
|
|
|
|
|
|
assert not hasattr(session.adapters["http://"], "cache")
|
|
|
|
|
2019-07-14 11:00:05 +02:00
|
|
|
def test_insecure_host_adapter(self, tmpdir):
|
2014-12-21 00:03:33 +01:00
|
|
|
session = PipSession(
|
2019-07-02 07:00:32 +02:00
|
|
|
cache=tmpdir.joinpath("test-cache"),
|
2019-08-20 21:58:31 +02:00
|
|
|
trusted_hosts=["example.com"],
|
2014-12-21 00:03:33 +01:00
|
|
|
)
|
|
|
|
|
2019-07-14 11:00:05 +02:00
|
|
|
assert "https://example.com/" in session.adapters
|
2019-08-15 07:11:57 +02:00
|
|
|
# Check that the "port wildcard" is present.
|
2019-07-14 11:00:05 +02:00
|
|
|
assert "https://example.com:" in session.adapters
|
2019-08-15 07:11:57 +02:00
|
|
|
# Check that the cache isn't enabled.
|
|
|
|
assert not hasattr(session.adapters["https://example.com/"], "cache")
|
2019-02-12 17:52:49 +01:00
|
|
|
|
2019-08-20 19:42:10 +02:00
|
|
|
def test_add_trusted_host(self):
|
|
|
|
# Leave a gap to test how the ordering is affected.
|
|
|
|
trusted_hosts = ['host1', 'host3']
|
2019-08-20 21:58:31 +02:00
|
|
|
session = PipSession(trusted_hosts=trusted_hosts)
|
2019-08-20 19:42:10 +02:00
|
|
|
insecure_adapter = session._insecure_adapter
|
|
|
|
prefix2 = 'https://host2/'
|
|
|
|
prefix3 = 'https://host3/'
|
2019-08-26 01:26:01 +02:00
|
|
|
prefix3_wildcard = 'https://host3:'
|
2019-08-20 19:42:10 +02:00
|
|
|
|
|
|
|
# Confirm some initial conditions as a baseline.
|
2019-08-26 01:26:01 +02:00
|
|
|
assert session.pip_trusted_origins == [
|
|
|
|
('host1', None), ('host3', None)
|
|
|
|
]
|
2019-08-20 19:42:10 +02:00
|
|
|
assert session.adapters[prefix3] is insecure_adapter
|
2019-08-26 01:26:01 +02:00
|
|
|
assert session.adapters[prefix3_wildcard] is insecure_adapter
|
|
|
|
|
2019-08-20 19:42:10 +02:00
|
|
|
assert prefix2 not in session.adapters
|
|
|
|
|
|
|
|
# Test adding a new host.
|
|
|
|
session.add_trusted_host('host2')
|
2019-08-26 01:26:01 +02:00
|
|
|
assert session.pip_trusted_origins == [
|
|
|
|
('host1', None), ('host3', None), ('host2', None)
|
|
|
|
]
|
2019-08-20 19:42:10 +02:00
|
|
|
# Check that prefix3 is still present.
|
|
|
|
assert session.adapters[prefix3] is insecure_adapter
|
|
|
|
assert session.adapters[prefix2] is insecure_adapter
|
|
|
|
|
|
|
|
# Test that adding the same host doesn't create a duplicate.
|
|
|
|
session.add_trusted_host('host3')
|
2019-08-26 01:26:01 +02:00
|
|
|
assert session.pip_trusted_origins == [
|
|
|
|
('host1', None), ('host3', None), ('host2', None)
|
|
|
|
], 'actual: {}'.format(session.pip_trusted_origins)
|
|
|
|
|
|
|
|
session.add_trusted_host('host4:8080')
|
|
|
|
prefix4 = 'https://host4:8080/'
|
|
|
|
assert session.pip_trusted_origins == [
|
|
|
|
('host1', None), ('host3', None),
|
|
|
|
('host2', None), ('host4', 8080)
|
|
|
|
]
|
|
|
|
assert session.adapters[prefix4] is insecure_adapter
|
2019-08-20 19:42:10 +02:00
|
|
|
|
|
|
|
def test_add_trusted_host__logging(self, caplog):
|
|
|
|
"""
|
|
|
|
Test logging when add_trusted_host() is called.
|
|
|
|
"""
|
|
|
|
trusted_hosts = ['host0', 'host1']
|
2019-08-20 21:58:31 +02:00
|
|
|
session = PipSession(trusted_hosts=trusted_hosts)
|
2019-08-20 19:42:10 +02:00
|
|
|
with caplog.at_level(logging.INFO):
|
|
|
|
# Test adding an existing host.
|
|
|
|
session.add_trusted_host('host1', source='somewhere')
|
|
|
|
session.add_trusted_host('host2')
|
|
|
|
# Test calling add_trusted_host() on the same host twice.
|
|
|
|
session.add_trusted_host('host2')
|
|
|
|
|
|
|
|
actual = [(r.levelname, r.message) for r in caplog.records]
|
|
|
|
# Observe that "host0" isn't included in the logs.
|
|
|
|
expected = [
|
|
|
|
('INFO', "adding trusted host: 'host1' (from somewhere)"),
|
|
|
|
('INFO', "adding trusted host: 'host2'"),
|
|
|
|
('INFO', "adding trusted host: 'host2'"),
|
|
|
|
]
|
|
|
|
assert actual == expected
|
|
|
|
|
|
|
|
def test_iter_secure_origins(self):
|
2019-08-26 01:26:01 +02:00
|
|
|
trusted_hosts = ['host1', 'host2', 'host3:8080']
|
2019-08-20 21:58:31 +02:00
|
|
|
session = PipSession(trusted_hosts=trusted_hosts)
|
2019-08-20 19:42:10 +02:00
|
|
|
|
|
|
|
actual = list(session.iter_secure_origins())
|
2019-08-26 01:26:01 +02:00
|
|
|
assert len(actual) == 9
|
2019-08-20 19:42:10 +02:00
|
|
|
# Spot-check that SECURE_ORIGINS is included.
|
|
|
|
assert actual[0] == ('https', '*', '*')
|
2019-08-26 01:26:01 +02:00
|
|
|
assert actual[-3:] == [
|
2019-08-20 19:42:10 +02:00
|
|
|
('*', 'host1', '*'),
|
|
|
|
('*', 'host2', '*'),
|
2019-08-26 01:26:01 +02:00
|
|
|
('*', 'host3', 8080)
|
2019-08-20 19:42:10 +02:00
|
|
|
]
|
|
|
|
|
2019-08-20 21:58:31 +02:00
|
|
|
def test_iter_secure_origins__trusted_hosts_empty(self):
|
2019-08-20 19:42:10 +02:00
|
|
|
"""
|
2019-08-20 21:58:31 +02:00
|
|
|
Test iter_secure_origins() after passing trusted_hosts=[].
|
2019-08-20 19:42:10 +02:00
|
|
|
"""
|
2019-08-20 21:58:31 +02:00
|
|
|
session = PipSession(trusted_hosts=[])
|
2019-08-20 19:42:10 +02:00
|
|
|
|
|
|
|
actual = list(session.iter_secure_origins())
|
|
|
|
assert len(actual) == 6
|
|
|
|
# Spot-check that SECURE_ORIGINS is included.
|
|
|
|
assert actual[0] == ('https', '*', '*')
|
|
|
|
|
|
|
|
@pytest.mark.parametrize(
|
2019-08-20 19:51:28 +02:00
|
|
|
'location, trusted, expected',
|
2019-08-20 19:42:10 +02:00
|
|
|
[
|
2019-08-20 19:51:28 +02:00
|
|
|
("http://pypi.org/something", [], False),
|
|
|
|
("https://pypi.org/something", [], True),
|
|
|
|
("git+http://pypi.org/something", [], False),
|
|
|
|
("git+https://pypi.org/something", [], True),
|
|
|
|
("git+ssh://git@pypi.org/something", [], True),
|
|
|
|
("http://localhost", [], True),
|
|
|
|
("http://127.0.0.1", [], True),
|
|
|
|
("http://example.com/something/", [], False),
|
|
|
|
("http://example.com/something/", ["example.com"], True),
|
|
|
|
# Try changing the case.
|
|
|
|
("http://eXample.com/something/", ["example.cOm"], True),
|
2019-08-26 01:26:01 +02:00
|
|
|
# Test hosts with port.
|
|
|
|
("http://example.com:8080/something/", ["example.com"], True),
|
|
|
|
# Test a trusted_host with a port.
|
|
|
|
("http://example.com:8080/something/", ["example.com:8080"], True),
|
|
|
|
("http://example.com/something/", ["example.com:8080"], False),
|
|
|
|
(
|
|
|
|
"http://example.com:8888/something/",
|
|
|
|
["example.com:8080"],
|
|
|
|
False
|
|
|
|
),
|
2019-08-20 19:42:10 +02:00
|
|
|
],
|
|
|
|
)
|
2019-08-20 19:51:28 +02:00
|
|
|
def test_is_secure_origin(self, caplog, location, trusted, expected):
|
2019-08-20 19:42:10 +02:00
|
|
|
class MockLogger(object):
|
|
|
|
def __init__(self):
|
|
|
|
self.called = False
|
|
|
|
|
|
|
|
def warning(self, *args, **kwargs):
|
|
|
|
self.called = True
|
|
|
|
|
2019-08-20 21:58:31 +02:00
|
|
|
session = PipSession(trusted_hosts=trusted)
|
2019-08-20 19:51:28 +02:00
|
|
|
actual = session.is_secure_origin(location)
|
|
|
|
assert actual == expected
|
|
|
|
|
|
|
|
log_records = [(r.levelname, r.message) for r in caplog.records]
|
|
|
|
if expected:
|
|
|
|
assert not log_records
|
|
|
|
return
|
|
|
|
|
|
|
|
assert len(log_records) == 1
|
|
|
|
actual_level, actual_message = log_records[0]
|
|
|
|
assert actual_level == 'WARNING'
|
|
|
|
assert 'is not a trusted or secure host' in actual_message
|