2013-05-28 23:58:08 +02:00
|
|
|
import os
|
2019-08-21 11:19:02 +02:00
|
|
|
import shutil
|
2019-12-14 01:53:40 +01:00
|
|
|
from shutil import rmtree
|
2013-05-28 23:58:08 +02:00
|
|
|
from tempfile import mkdtemp
|
|
|
|
|
2017-06-13 14:17:00 +02:00
|
|
|
import pytest
|
2019-12-14 02:15:44 +01:00
|
|
|
from mock import Mock
|
2014-09-12 00:40:45 +02:00
|
|
|
|
2019-10-14 00:44:23 +02:00
|
|
|
from pip._internal.exceptions import HashMismatch
|
|
|
|
from pip._internal.models.link import Link
|
2019-12-06 02:26:53 +01:00
|
|
|
from pip._internal.network.download import Downloader
|
2019-10-14 00:44:23 +02:00
|
|
|
from pip._internal.network.session import PipSession
|
2020-03-22 14:26:58 +01:00
|
|
|
from pip._internal.operations.prepare import _download_http_url, unpack_url
|
2017-08-31 17:48:18 +02:00
|
|
|
from pip._internal.utils.hashes import Hashes
|
2019-09-24 10:56:42 +02:00
|
|
|
from pip._internal.utils.urls import path_to_url
|
2019-08-21 11:19:02 +02:00
|
|
|
from tests.lib.path import Path
|
2019-11-29 17:03:48 +01:00
|
|
|
from tests.lib.requests_mocks import MockResponse
|
2013-05-28 23:58:08 +02:00
|
|
|
|
|
|
|
|
2020-02-05 03:25:30 +01:00
|
|
|
def test_unpack_url_with_urllib_response_without_content_type(data):
|
2013-05-28 23:58:08 +02:00
|
|
|
"""
|
|
|
|
It should download and unpack files even if no Content-Type header exists
|
|
|
|
"""
|
2013-08-16 14:04:27 +02:00
|
|
|
_real_session = PipSession()
|
|
|
|
|
|
|
|
def _fake_session_get(*args, **kwargs):
|
|
|
|
resp = _real_session.get(*args, **kwargs)
|
|
|
|
del resp.headers["Content-Type"]
|
2013-05-28 23:58:08 +02:00
|
|
|
return resp
|
|
|
|
|
2013-08-16 14:04:27 +02:00
|
|
|
session = Mock()
|
|
|
|
session.get = _fake_session_get
|
2019-11-18 01:31:20 +01:00
|
|
|
downloader = Downloader(session, progress_bar="on")
|
2013-08-16 14:04:27 +02:00
|
|
|
|
2019-07-02 07:00:32 +02:00
|
|
|
uri = path_to_url(data.packages.joinpath("simple-1.0.tar.gz"))
|
2013-08-16 14:04:27 +02:00
|
|
|
link = Link(uri)
|
|
|
|
temp_dir = mkdtemp()
|
|
|
|
try:
|
2020-02-05 03:25:30 +01:00
|
|
|
unpack_url(
|
2014-01-28 15:17:51 +01:00
|
|
|
link,
|
|
|
|
temp_dir,
|
2019-11-18 01:31:20 +01:00
|
|
|
downloader=downloader,
|
2019-11-12 04:34:52 +01:00
|
|
|
download_dir=None,
|
2013-08-16 14:04:27 +02:00
|
|
|
)
|
2017-12-15 06:56:04 +01:00
|
|
|
assert set(os.listdir(temp_dir)) == {
|
2014-01-28 15:17:51 +01:00
|
|
|
'PKG-INFO', 'setup.cfg', 'setup.py', 'simple', 'simple.egg-info'
|
2017-12-15 06:56:04 +01:00
|
|
|
}
|
2013-08-16 14:04:27 +02:00
|
|
|
finally:
|
|
|
|
rmtree(temp_dir)
|
2013-05-28 23:58:08 +02:00
|
|
|
|
|
|
|
|
2019-04-17 15:25:45 +02:00
|
|
|
def test_download_http_url__no_directory_traversal(tmpdir):
|
|
|
|
"""
|
|
|
|
Test that directory traversal doesn't happen on download when the
|
|
|
|
Content-Disposition header contains a filename with a ".." path part.
|
|
|
|
"""
|
|
|
|
mock_url = 'http://www.example.com/whatever.tgz'
|
|
|
|
contents = b'downloaded'
|
|
|
|
link = Link(mock_url)
|
|
|
|
|
|
|
|
session = Mock()
|
|
|
|
resp = MockResponse(contents)
|
|
|
|
resp.url = mock_url
|
|
|
|
resp.headers = {
|
|
|
|
# Set the content-type to a random value to prevent
|
|
|
|
# mimetypes.guess_extension from guessing the extension.
|
|
|
|
'content-type': 'random',
|
|
|
|
'content-disposition': 'attachment;filename="../out_dir_file"'
|
|
|
|
}
|
|
|
|
session.get.return_value = resp
|
2019-11-18 01:28:08 +01:00
|
|
|
downloader = Downloader(session, progress_bar="on")
|
2019-04-17 15:25:45 +02:00
|
|
|
|
2019-07-02 07:00:32 +02:00
|
|
|
download_dir = tmpdir.joinpath('download')
|
2019-04-17 15:25:45 +02:00
|
|
|
os.mkdir(download_dir)
|
|
|
|
file_path, content_type = _download_http_url(
|
|
|
|
link,
|
2019-11-18 01:28:08 +01:00
|
|
|
downloader,
|
2019-04-17 15:25:45 +02:00
|
|
|
download_dir,
|
|
|
|
hashes=None,
|
|
|
|
)
|
|
|
|
# The file should be downloaded to download_dir.
|
|
|
|
actual = os.listdir(download_dir)
|
|
|
|
assert actual == ['out_dir_file']
|
|
|
|
|
|
|
|
|
2019-08-21 11:19:02 +02:00
|
|
|
@pytest.fixture
|
|
|
|
def clean_project(tmpdir_factory, data):
|
|
|
|
tmpdir = Path(str(tmpdir_factory.mktemp("clean_project")))
|
|
|
|
new_project_dir = tmpdir.joinpath("FSPkg")
|
|
|
|
path = data.packages.joinpath("FSPkg")
|
|
|
|
shutil.copytree(path, new_project_dir)
|
|
|
|
return new_project_dir
|
|
|
|
|
|
|
|
|
2020-02-05 03:25:30 +01:00
|
|
|
class Test_unpack_url(object):
|
2014-02-01 20:41:55 +01:00
|
|
|
|
|
|
|
def prep(self, tmpdir, data):
|
2019-07-02 07:00:32 +02:00
|
|
|
self.build_dir = tmpdir.joinpath('build')
|
|
|
|
self.download_dir = tmpdir.joinpath('download')
|
2014-02-01 20:41:55 +01:00
|
|
|
os.mkdir(self.build_dir)
|
|
|
|
os.mkdir(self.download_dir)
|
|
|
|
self.dist_file = "simple-1.0.tar.gz"
|
|
|
|
self.dist_file2 = "simple-2.0.tar.gz"
|
2019-07-02 07:00:32 +02:00
|
|
|
self.dist_path = data.packages.joinpath(self.dist_file)
|
|
|
|
self.dist_path2 = data.packages.joinpath(self.dist_file2)
|
2014-02-01 20:41:55 +01:00
|
|
|
self.dist_url = Link(path_to_url(self.dist_path))
|
|
|
|
self.dist_url2 = Link(path_to_url(self.dist_path2))
|
2020-02-05 03:25:30 +01:00
|
|
|
self.no_downloader = Mock(side_effect=AssertionError)
|
2014-02-01 20:41:55 +01:00
|
|
|
|
2020-02-05 03:25:30 +01:00
|
|
|
def test_unpack_url_no_download(self, tmpdir, data):
|
2014-02-01 20:41:55 +01:00
|
|
|
self.prep(tmpdir, data)
|
2020-02-05 03:25:30 +01:00
|
|
|
unpack_url(self.dist_url, self.build_dir, self.no_downloader)
|
2014-02-01 20:41:55 +01:00
|
|
|
assert os.path.isdir(os.path.join(self.build_dir, 'simple'))
|
|
|
|
assert not os.path.isfile(
|
|
|
|
os.path.join(self.download_dir, self.dist_file))
|
|
|
|
|
2020-02-05 03:25:30 +01:00
|
|
|
def test_unpack_url_bad_hash(self, tmpdir, data,
|
|
|
|
monkeypatch):
|
2014-02-01 23:04:58 +01:00
|
|
|
"""
|
|
|
|
Test when the file url hash fragment is wrong
|
|
|
|
"""
|
|
|
|
self.prep(tmpdir, data)
|
2019-06-23 01:02:42 +02:00
|
|
|
url = '{}#md5=bogus'.format(self.dist_url.url)
|
|
|
|
dist_url = Link(url)
|
2014-02-01 23:04:58 +01:00
|
|
|
with pytest.raises(HashMismatch):
|
2020-02-05 03:25:30 +01:00
|
|
|
unpack_url(dist_url,
|
|
|
|
self.build_dir,
|
|
|
|
downloader=self.no_downloader,
|
|
|
|
hashes=Hashes({'md5': ['bogus']}))
|
2014-02-01 23:04:58 +01:00
|
|
|
|
2020-02-05 03:25:30 +01:00
|
|
|
def test_unpack_url_thats_a_dir(self, tmpdir, data):
|
2014-02-01 20:41:55 +01:00
|
|
|
self.prep(tmpdir, data)
|
2019-07-02 07:00:32 +02:00
|
|
|
dist_path = data.packages.joinpath("FSPkg")
|
2014-02-01 20:41:55 +01:00
|
|
|
dist_url = Link(path_to_url(dist_path))
|
2020-02-05 03:25:30 +01:00
|
|
|
unpack_url(dist_url, self.build_dir,
|
|
|
|
downloader=self.no_downloader,
|
|
|
|
download_dir=self.download_dir)
|
2020-03-22 14:18:11 +01:00
|
|
|
# test that nothing was copied to build_dir since we build in place
|
|
|
|
assert not os.path.exists(os.path.join(self.build_dir, 'fspkg'))
|