2016-02-12 23:41:21 +01:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
|
2012-07-09 01:16:26 +02:00
|
|
|
"""
|
|
|
|
util tests
|
|
|
|
|
|
|
|
"""
|
|
|
|
import os
|
2017-05-16 12:16:30 +02:00
|
|
|
import shutil
|
2013-08-17 09:18:40 +02:00
|
|
|
import stat
|
2013-05-28 23:58:08 +02:00
|
|
|
import sys
|
2013-08-17 09:18:40 +02:00
|
|
|
import tempfile
|
2017-05-16 12:16:30 +02:00
|
|
|
import time
|
2016-07-10 05:17:04 +02:00
|
|
|
import warnings
|
2013-05-28 23:58:08 +02:00
|
|
|
|
2017-05-16 12:16:30 +02:00
|
|
|
import pytest
|
2012-12-16 09:24:34 +01:00
|
|
|
from mock import Mock, patch
|
2017-06-13 14:17:00 +02:00
|
|
|
from pip._vendor.six import BytesIO
|
|
|
|
|
2017-08-31 17:48:18 +02:00
|
|
|
from pip._internal.exceptions import (
|
2017-11-21 08:50:32 +01:00
|
|
|
HashMismatch, HashMissing, InstallationError, UnsupportedPythonVersion,
|
2017-05-16 12:16:30 +02:00
|
|
|
)
|
2017-08-31 17:48:18 +02:00
|
|
|
from pip._internal.utils.encoding import auto_decode
|
|
|
|
from pip._internal.utils.glibc import check_glibc_version
|
|
|
|
from pip._internal.utils.hashes import Hashes, MissingHashes
|
|
|
|
from pip._internal.utils.misc import (
|
2018-01-21 12:25:41 +01:00
|
|
|
call_subprocess, egg_link_path, ensure_dir, get_installed_distributions,
|
2018-09-24 23:53:39 +02:00
|
|
|
get_prog, make_vcs_requirement_url, normalize_path, remove_auth_from_url,
|
|
|
|
rmtree, split_auth_from_netloc, untar_file, unzip_file,
|
2017-05-16 12:16:30 +02:00
|
|
|
)
|
2017-08-31 17:48:18 +02:00
|
|
|
from pip._internal.utils.packaging import check_dist_requires_python
|
|
|
|
from pip._internal.utils.temp_dir import TempDirectory
|
2012-07-09 01:16:26 +02:00
|
|
|
|
|
|
|
|
|
|
|
class Tests_EgglinkPath:
|
|
|
|
"util.egg_link_path() tests"
|
|
|
|
|
|
|
|
def setup(self):
|
|
|
|
|
|
|
|
project = 'foo'
|
|
|
|
|
|
|
|
self.mock_dist = Mock(project_name=project)
|
|
|
|
self.site_packages = 'SITE_PACKAGES'
|
|
|
|
self.user_site = 'USER_SITE'
|
2014-01-28 15:17:51 +01:00
|
|
|
self.user_site_egglink = os.path.join(
|
|
|
|
self.user_site,
|
|
|
|
'%s.egg-link' % project
|
|
|
|
)
|
|
|
|
self.site_packages_egglink = os.path.join(
|
|
|
|
self.site_packages,
|
|
|
|
'%s.egg-link' % project,
|
|
|
|
)
|
2012-07-09 01:16:26 +02:00
|
|
|
|
2014-03-26 23:24:19 +01:00
|
|
|
# patches
|
2017-08-31 17:48:18 +02:00
|
|
|
from pip._internal.utils import misc as utils
|
2014-08-31 01:52:28 +02:00
|
|
|
self.old_site_packages = utils.site_packages
|
|
|
|
self.mock_site_packages = utils.site_packages = 'SITE_PACKAGES'
|
|
|
|
self.old_running_under_virtualenv = utils.running_under_virtualenv
|
|
|
|
self.mock_running_under_virtualenv = utils.running_under_virtualenv = \
|
2014-01-28 15:17:51 +01:00
|
|
|
Mock()
|
2014-08-31 01:52:28 +02:00
|
|
|
self.old_virtualenv_no_global = utils.virtualenv_no_global
|
|
|
|
self.mock_virtualenv_no_global = utils.virtualenv_no_global = Mock()
|
|
|
|
self.old_user_site = utils.user_site
|
|
|
|
self.mock_user_site = utils.user_site = self.user_site
|
2012-07-09 01:16:26 +02:00
|
|
|
from os import path
|
2012-07-10 05:58:38 +02:00
|
|
|
self.old_isfile = path.isfile
|
2012-07-09 01:16:26 +02:00
|
|
|
self.mock_isfile = path.isfile = Mock()
|
|
|
|
|
2012-07-10 05:58:38 +02:00
|
|
|
def teardown(self):
|
2017-08-31 17:48:18 +02:00
|
|
|
from pip._internal.utils import misc as utils
|
2014-08-31 01:52:28 +02:00
|
|
|
utils.site_packages = self.old_site_packages
|
|
|
|
utils.running_under_virtualenv = self.old_running_under_virtualenv
|
|
|
|
utils.virtualenv_no_global = self.old_virtualenv_no_global
|
|
|
|
utils.user_site = self.old_user_site
|
2012-07-10 05:58:38 +02:00
|
|
|
from os import path
|
|
|
|
path.isfile = self.old_isfile
|
|
|
|
|
2014-01-28 15:17:51 +01:00
|
|
|
def eggLinkInUserSite(self, egglink):
|
|
|
|
return egglink == self.user_site_egglink
|
2012-07-10 05:58:38 +02:00
|
|
|
|
2014-01-28 15:17:51 +01:00
|
|
|
def eggLinkInSitePackages(self, egglink):
|
|
|
|
return egglink == self.site_packages_egglink
|
2012-07-09 01:16:26 +02:00
|
|
|
|
2014-03-26 23:24:19 +01:00
|
|
|
# ####################### #
|
|
|
|
# # egglink in usersite # #
|
|
|
|
# ####################### #
|
2012-07-09 01:16:26 +02:00
|
|
|
def test_egglink_in_usersite_notvenv(self):
|
|
|
|
self.mock_virtualenv_no_global.return_value = False
|
|
|
|
self.mock_running_under_virtualenv.return_value = False
|
|
|
|
self.mock_isfile.side_effect = self.eggLinkInUserSite
|
2013-08-18 11:59:44 +02:00
|
|
|
assert egg_link_path(self.mock_dist) == self.user_site_egglink
|
2012-07-09 01:16:26 +02:00
|
|
|
|
|
|
|
def test_egglink_in_usersite_venv_noglobal(self):
|
|
|
|
self.mock_virtualenv_no_global.return_value = True
|
|
|
|
self.mock_running_under_virtualenv.return_value = True
|
|
|
|
self.mock_isfile.side_effect = self.eggLinkInUserSite
|
2013-08-18 11:59:44 +02:00
|
|
|
assert egg_link_path(self.mock_dist) is None
|
2012-07-09 01:16:26 +02:00
|
|
|
|
|
|
|
def test_egglink_in_usersite_venv_global(self):
|
|
|
|
self.mock_virtualenv_no_global.return_value = False
|
|
|
|
self.mock_running_under_virtualenv.return_value = True
|
|
|
|
self.mock_isfile.side_effect = self.eggLinkInUserSite
|
2013-08-18 11:59:44 +02:00
|
|
|
assert egg_link_path(self.mock_dist) == self.user_site_egglink
|
2012-07-09 01:16:26 +02:00
|
|
|
|
2014-03-26 23:24:19 +01:00
|
|
|
# ####################### #
|
|
|
|
# # egglink in sitepkgs # #
|
|
|
|
# ####################### #
|
2012-07-09 01:16:26 +02:00
|
|
|
def test_egglink_in_sitepkgs_notvenv(self):
|
|
|
|
self.mock_virtualenv_no_global.return_value = False
|
|
|
|
self.mock_running_under_virtualenv.return_value = False
|
|
|
|
self.mock_isfile.side_effect = self.eggLinkInSitePackages
|
2013-08-18 11:59:44 +02:00
|
|
|
assert egg_link_path(self.mock_dist) == self.site_packages_egglink
|
2012-07-09 01:16:26 +02:00
|
|
|
|
|
|
|
def test_egglink_in_sitepkgs_venv_noglobal(self):
|
|
|
|
self.mock_virtualenv_no_global.return_value = True
|
|
|
|
self.mock_running_under_virtualenv.return_value = True
|
|
|
|
self.mock_isfile.side_effect = self.eggLinkInSitePackages
|
2013-08-18 11:59:44 +02:00
|
|
|
assert egg_link_path(self.mock_dist) == self.site_packages_egglink
|
2012-07-09 01:16:26 +02:00
|
|
|
|
|
|
|
def test_egglink_in_sitepkgs_venv_global(self):
|
|
|
|
self.mock_virtualenv_no_global.return_value = False
|
|
|
|
self.mock_running_under_virtualenv.return_value = True
|
|
|
|
self.mock_isfile.side_effect = self.eggLinkInSitePackages
|
2013-08-18 11:59:44 +02:00
|
|
|
assert egg_link_path(self.mock_dist) == self.site_packages_egglink
|
2012-07-09 01:16:26 +02:00
|
|
|
|
2014-03-26 23:24:19 +01:00
|
|
|
# ################################## #
|
|
|
|
# # egglink in usersite & sitepkgs # #
|
|
|
|
# ################################## #
|
2012-07-09 01:16:26 +02:00
|
|
|
def test_egglink_in_both_notvenv(self):
|
|
|
|
self.mock_virtualenv_no_global.return_value = False
|
|
|
|
self.mock_running_under_virtualenv.return_value = False
|
|
|
|
self.mock_isfile.return_value = True
|
2013-08-18 11:59:44 +02:00
|
|
|
assert egg_link_path(self.mock_dist) == self.user_site_egglink
|
2012-07-09 01:16:26 +02:00
|
|
|
|
|
|
|
def test_egglink_in_both_venv_noglobal(self):
|
|
|
|
self.mock_virtualenv_no_global.return_value = True
|
|
|
|
self.mock_running_under_virtualenv.return_value = True
|
|
|
|
self.mock_isfile.return_value = True
|
2013-08-18 11:59:44 +02:00
|
|
|
assert egg_link_path(self.mock_dist) == self.site_packages_egglink
|
2012-07-09 01:16:26 +02:00
|
|
|
|
|
|
|
def test_egglink_in_both_venv_global(self):
|
|
|
|
self.mock_virtualenv_no_global.return_value = False
|
|
|
|
self.mock_running_under_virtualenv.return_value = True
|
|
|
|
self.mock_isfile.return_value = True
|
2013-08-18 11:59:44 +02:00
|
|
|
assert egg_link_path(self.mock_dist) == self.site_packages_egglink
|
2012-07-09 01:16:26 +02:00
|
|
|
|
2014-03-26 23:24:19 +01:00
|
|
|
# ############## #
|
|
|
|
# # no egglink # #
|
|
|
|
# ############## #
|
2012-07-09 01:16:26 +02:00
|
|
|
def test_noegglink_in_sitepkgs_notvenv(self):
|
|
|
|
self.mock_virtualenv_no_global.return_value = False
|
|
|
|
self.mock_running_under_virtualenv.return_value = False
|
|
|
|
self.mock_isfile.return_value = False
|
2013-08-18 11:59:44 +02:00
|
|
|
assert egg_link_path(self.mock_dist) is None
|
2012-07-09 01:16:26 +02:00
|
|
|
|
|
|
|
def test_noegglink_in_sitepkgs_venv_noglobal(self):
|
|
|
|
self.mock_virtualenv_no_global.return_value = True
|
|
|
|
self.mock_running_under_virtualenv.return_value = True
|
|
|
|
self.mock_isfile.return_value = False
|
2013-08-18 11:59:44 +02:00
|
|
|
assert egg_link_path(self.mock_dist) is None
|
2012-07-09 01:16:26 +02:00
|
|
|
|
|
|
|
def test_noegglink_in_sitepkgs_venv_global(self):
|
|
|
|
self.mock_virtualenv_no_global.return_value = False
|
|
|
|
self.mock_running_under_virtualenv.return_value = True
|
|
|
|
self.mock_isfile.return_value = False
|
2013-08-18 11:59:44 +02:00
|
|
|
assert egg_link_path(self.mock_dist) is None
|
2012-07-09 01:16:26 +02:00
|
|
|
|
2014-01-28 15:17:51 +01:00
|
|
|
|
2017-08-31 17:48:18 +02:00
|
|
|
@patch('pip._internal.utils.misc.dist_in_usersite')
|
|
|
|
@patch('pip._internal.utils.misc.dist_is_local')
|
|
|
|
@patch('pip._internal.utils.misc.dist_is_editable')
|
2012-12-16 09:24:34 +01:00
|
|
|
class Tests_get_installed_distributions:
|
|
|
|
"""test util.get_installed_distributions"""
|
|
|
|
|
|
|
|
workingset = [
|
2014-01-28 15:17:51 +01:00
|
|
|
Mock(test_name="global"),
|
|
|
|
Mock(test_name="editable"),
|
2014-03-01 05:09:26 +01:00
|
|
|
Mock(test_name="normal"),
|
2014-10-15 23:22:50 +02:00
|
|
|
Mock(test_name="user"),
|
2014-03-01 05:09:26 +01:00
|
|
|
]
|
|
|
|
|
|
|
|
workingset_stdlib = [
|
|
|
|
Mock(test_name='normal', key='argparse'),
|
|
|
|
Mock(test_name='normal', key='wsgiref')
|
|
|
|
]
|
|
|
|
|
|
|
|
workingset_freeze = [
|
|
|
|
Mock(test_name='normal', key='pip'),
|
|
|
|
Mock(test_name='normal', key='setuptools'),
|
|
|
|
Mock(test_name='normal', key='distribute')
|
2014-01-28 15:17:51 +01:00
|
|
|
]
|
2012-12-16 09:24:34 +01:00
|
|
|
|
|
|
|
def dist_is_editable(self, dist):
|
|
|
|
return dist.test_name == "editable"
|
|
|
|
|
|
|
|
def dist_is_local(self, dist):
|
2014-10-15 23:22:50 +02:00
|
|
|
return dist.test_name != "global" and dist.test_name != 'user'
|
|
|
|
|
|
|
|
def dist_in_usersite(self, dist):
|
|
|
|
return dist.test_name == "user"
|
2012-12-16 09:24:34 +01:00
|
|
|
|
2014-01-07 12:43:10 +01:00
|
|
|
@patch('pip._vendor.pkg_resources.working_set', workingset)
|
2014-10-15 23:22:50 +02:00
|
|
|
def test_editables_only(self, mock_dist_is_editable,
|
|
|
|
mock_dist_is_local,
|
|
|
|
mock_dist_in_usersite):
|
2012-12-16 09:24:34 +01:00
|
|
|
mock_dist_is_editable.side_effect = self.dist_is_editable
|
|
|
|
mock_dist_is_local.side_effect = self.dist_is_local
|
2014-10-15 23:22:50 +02:00
|
|
|
mock_dist_in_usersite.side_effect = self.dist_in_usersite
|
2012-12-16 09:24:34 +01:00
|
|
|
dists = get_installed_distributions(editables_only=True)
|
|
|
|
assert len(dists) == 1, dists
|
|
|
|
assert dists[0].test_name == "editable"
|
|
|
|
|
2014-01-07 12:43:10 +01:00
|
|
|
@patch('pip._vendor.pkg_resources.working_set', workingset)
|
2014-10-15 23:22:50 +02:00
|
|
|
def test_exclude_editables(self, mock_dist_is_editable,
|
|
|
|
mock_dist_is_local,
|
|
|
|
mock_dist_in_usersite):
|
2012-12-16 09:24:34 +01:00
|
|
|
mock_dist_is_editable.side_effect = self.dist_is_editable
|
|
|
|
mock_dist_is_local.side_effect = self.dist_is_local
|
2014-10-15 23:22:50 +02:00
|
|
|
mock_dist_in_usersite.side_effect = self.dist_in_usersite
|
2012-12-16 09:24:34 +01:00
|
|
|
dists = get_installed_distributions(include_editables=False)
|
|
|
|
assert len(dists) == 1
|
|
|
|
assert dists[0].test_name == "normal"
|
|
|
|
|
2014-01-07 12:43:10 +01:00
|
|
|
@patch('pip._vendor.pkg_resources.working_set', workingset)
|
2014-10-15 23:22:50 +02:00
|
|
|
def test_include_globals(self, mock_dist_is_editable,
|
|
|
|
mock_dist_is_local,
|
|
|
|
mock_dist_in_usersite):
|
2012-12-16 09:24:34 +01:00
|
|
|
mock_dist_is_editable.side_effect = self.dist_is_editable
|
|
|
|
mock_dist_is_local.side_effect = self.dist_is_local
|
2014-10-15 23:22:50 +02:00
|
|
|
mock_dist_in_usersite.side_effect = self.dist_in_usersite
|
2012-12-16 09:24:34 +01:00
|
|
|
dists = get_installed_distributions(local_only=False)
|
2014-10-15 23:22:50 +02:00
|
|
|
assert len(dists) == 4
|
|
|
|
|
|
|
|
@patch('pip._vendor.pkg_resources.working_set', workingset)
|
|
|
|
def test_user_only(self, mock_dist_is_editable,
|
|
|
|
mock_dist_is_local,
|
|
|
|
mock_dist_in_usersite):
|
|
|
|
mock_dist_is_editable.side_effect = self.dist_is_editable
|
|
|
|
mock_dist_is_local.side_effect = self.dist_is_local
|
|
|
|
mock_dist_in_usersite.side_effect = self.dist_in_usersite
|
|
|
|
dists = get_installed_distributions(local_only=False,
|
|
|
|
user_only=True)
|
|
|
|
assert len(dists) == 1
|
|
|
|
assert dists[0].test_name == "user"
|
2012-12-16 09:24:34 +01:00
|
|
|
|
2014-03-01 05:09:26 +01:00
|
|
|
@patch('pip._vendor.pkg_resources.working_set', workingset_stdlib)
|
|
|
|
def test_gte_py27_excludes(self, mock_dist_is_editable,
|
2014-10-15 23:22:50 +02:00
|
|
|
mock_dist_is_local,
|
|
|
|
mock_dist_in_usersite):
|
2014-03-01 05:09:26 +01:00
|
|
|
mock_dist_is_editable.side_effect = self.dist_is_editable
|
|
|
|
mock_dist_is_local.side_effect = self.dist_is_local
|
2014-10-15 23:22:50 +02:00
|
|
|
mock_dist_in_usersite.side_effect = self.dist_in_usersite
|
2014-03-01 05:09:26 +01:00
|
|
|
dists = get_installed_distributions()
|
|
|
|
assert len(dists) == 0
|
|
|
|
|
|
|
|
@patch('pip._vendor.pkg_resources.working_set', workingset_freeze)
|
2014-10-15 23:22:50 +02:00
|
|
|
def test_freeze_excludes(self, mock_dist_is_editable,
|
|
|
|
mock_dist_is_local,
|
|
|
|
mock_dist_in_usersite):
|
2014-03-01 05:09:26 +01:00
|
|
|
mock_dist_is_editable.side_effect = self.dist_is_editable
|
|
|
|
mock_dist_is_local.side_effect = self.dist_is_local
|
2014-10-15 23:22:50 +02:00
|
|
|
mock_dist_in_usersite.side_effect = self.dist_in_usersite
|
2015-10-10 23:31:29 +02:00
|
|
|
dists = get_installed_distributions(
|
|
|
|
skip=('setuptools', 'pip', 'distribute'))
|
2014-03-01 05:09:26 +01:00
|
|
|
assert len(dists) == 0
|
|
|
|
|
2012-12-16 09:24:34 +01:00
|
|
|
|
2013-08-17 09:18:40 +02:00
|
|
|
class TestUnpackArchives(object):
|
|
|
|
"""
|
2014-01-28 15:17:51 +01:00
|
|
|
test_tar.tgz/test_tar.zip have content as follows engineered to confirm 3
|
|
|
|
things:
|
2013-08-17 09:18:40 +02:00
|
|
|
1) confirm that reg files, dirs, and symlinks get unpacked
|
|
|
|
2) permissions are not preserved (and go by the 022 umask)
|
|
|
|
3) reg files with *any* execute perms, get chmod +x
|
|
|
|
|
|
|
|
file.txt 600 regular file
|
|
|
|
symlink.txt 777 symlink to file.txt
|
|
|
|
script_owner.sh 700 script where owner can execute
|
|
|
|
script_group.sh 610 script where group can execute
|
|
|
|
script_world.sh 601 script where world can execute
|
|
|
|
dir 744 directory
|
|
|
|
dir/dirfile 622 regular file
|
2012-12-16 09:24:34 +01:00
|
|
|
|
2013-08-17 09:18:40 +02:00
|
|
|
"""
|
2012-12-16 09:24:34 +01:00
|
|
|
|
2013-08-17 09:18:40 +02:00
|
|
|
def setup(self):
|
|
|
|
self.tempdir = tempfile.mkdtemp()
|
|
|
|
self.old_mask = os.umask(0o022)
|
|
|
|
self.symlink_expected_mode = None
|
2012-12-16 09:24:34 +01:00
|
|
|
|
2013-08-17 09:18:40 +02:00
|
|
|
def teardown(self):
|
|
|
|
os.umask(self.old_mask)
|
|
|
|
shutil.rmtree(self.tempdir, ignore_errors=True)
|
|
|
|
|
|
|
|
def mode(self, path):
|
|
|
|
return stat.S_IMODE(os.stat(path).st_mode)
|
|
|
|
|
|
|
|
def confirm_files(self):
|
2016-06-10 21:27:07 +02:00
|
|
|
# expectations based on 022 umask set above and the unpack logic that
|
2014-01-28 15:17:51 +01:00
|
|
|
# sets execute permissions, not preservation
|
2013-08-17 09:18:40 +02:00
|
|
|
for fname, expected_mode, test in [
|
2014-01-28 15:17:51 +01:00
|
|
|
('file.txt', 0o644, os.path.isfile),
|
|
|
|
('symlink.txt', 0o644, os.path.isfile),
|
|
|
|
('script_owner.sh', 0o755, os.path.isfile),
|
|
|
|
('script_group.sh', 0o755, os.path.isfile),
|
|
|
|
('script_world.sh', 0o755, os.path.isfile),
|
|
|
|
('dir', 0o755, os.path.isdir),
|
2014-02-24 22:52:23 +01:00
|
|
|
(os.path.join('dir', 'dirfile'), 0o644, os.path.isfile)]:
|
2013-08-17 09:18:40 +02:00
|
|
|
path = os.path.join(self.tempdir, fname)
|
|
|
|
if path.endswith('symlink.txt') and sys.platform == 'win32':
|
|
|
|
# no symlinks created on windows
|
|
|
|
continue
|
|
|
|
assert test(path), path
|
|
|
|
if sys.platform == 'win32':
|
|
|
|
# the permissions tests below don't apply in windows
|
|
|
|
# due to os.chmod being a noop
|
|
|
|
continue
|
|
|
|
mode = self.mode(path)
|
2014-01-28 15:17:51 +01:00
|
|
|
assert mode == expected_mode, (
|
|
|
|
"mode: %s, expected mode: %s" % (mode, expected_mode)
|
|
|
|
)
|
2013-08-17 09:18:40 +02:00
|
|
|
|
2013-08-23 13:12:37 +02:00
|
|
|
def test_unpack_tgz(self, data):
|
2013-08-17 09:18:40 +02:00
|
|
|
"""
|
|
|
|
Test unpacking a *.tgz, and setting execute permissions
|
|
|
|
"""
|
2013-08-23 13:12:37 +02:00
|
|
|
test_file = data.packages.join("test_tar.tgz")
|
2013-08-17 09:18:40 +02:00
|
|
|
untar_file(test_file, self.tempdir)
|
|
|
|
self.confirm_files()
|
2015-12-18 18:49:05 +01:00
|
|
|
# Check the timestamp of an extracted file
|
|
|
|
file_txt_path = os.path.join(self.tempdir, 'file.txt')
|
|
|
|
mtime = time.gmtime(os.stat(file_txt_path).st_mtime)
|
|
|
|
assert mtime[0:6] == (2013, 8, 16, 5, 13, 37), mtime
|
2013-08-17 09:18:40 +02:00
|
|
|
|
2013-08-23 13:12:37 +02:00
|
|
|
def test_unpack_zip(self, data):
|
2013-08-17 09:18:40 +02:00
|
|
|
"""
|
|
|
|
Test unpacking a *.zip, and setting execute permissions
|
|
|
|
"""
|
2013-08-23 13:12:37 +02:00
|
|
|
test_file = data.packages.join("test_zip.zip")
|
2013-08-17 09:18:40 +02:00
|
|
|
unzip_file(test_file, self.tempdir)
|
|
|
|
self.confirm_files()
|
2015-02-04 17:46:42 +01:00
|
|
|
|
|
|
|
|
|
|
|
class Failer:
|
|
|
|
def __init__(self, duration=1):
|
|
|
|
self.succeed_after = time.time() + duration
|
|
|
|
|
|
|
|
def call(self, *args, **kw):
|
|
|
|
"""Fail with OSError self.max_fails times"""
|
|
|
|
if time.time() < self.succeed_after:
|
|
|
|
raise OSError("Failed")
|
|
|
|
|
|
|
|
|
|
|
|
def test_rmtree_retries(tmpdir, monkeypatch):
|
|
|
|
"""
|
2017-08-31 17:48:18 +02:00
|
|
|
Test pip._internal.utils.rmtree will retry failures
|
2015-02-04 17:46:42 +01:00
|
|
|
"""
|
|
|
|
monkeypatch.setattr(shutil, 'rmtree', Failer(duration=1).call)
|
|
|
|
rmtree('foo')
|
|
|
|
|
|
|
|
|
|
|
|
def test_rmtree_retries_for_3sec(tmpdir, monkeypatch):
|
|
|
|
"""
|
2017-08-31 17:48:18 +02:00
|
|
|
Test pip._internal.utils.rmtree will retry failures for no more than 3 sec
|
2015-02-04 17:46:42 +01:00
|
|
|
"""
|
|
|
|
monkeypatch.setattr(shutil, 'rmtree', Failer(duration=5).call)
|
|
|
|
with pytest.raises(OSError):
|
|
|
|
rmtree('foo')
|
2015-03-19 00:02:30 +01:00
|
|
|
|
2015-03-19 01:45:53 +01:00
|
|
|
|
2015-03-19 00:02:30 +01:00
|
|
|
class Test_normalize_path(object):
|
|
|
|
# Technically, symlinks are possible on Windows, but you need a special
|
|
|
|
# permission bit to create them, and Python 2 doesn't support it anyway, so
|
|
|
|
# it's easiest just to skip this test on Windows altogether.
|
|
|
|
@pytest.mark.skipif("sys.platform == 'win32'")
|
2015-03-21 00:09:34 +01:00
|
|
|
def test_resolve_symlinks(self, tmpdir):
|
|
|
|
print(type(tmpdir))
|
|
|
|
print(dir(tmpdir))
|
|
|
|
orig_working_dir = os.getcwd()
|
|
|
|
os.chdir(tmpdir)
|
|
|
|
try:
|
|
|
|
d = os.path.join('foo', 'bar')
|
|
|
|
f = os.path.join(d, 'file1')
|
|
|
|
os.makedirs(d)
|
|
|
|
with open(f, 'w'): # Create the file
|
|
|
|
pass
|
|
|
|
|
|
|
|
os.symlink(d, 'dir_link')
|
|
|
|
os.symlink(f, 'file_link')
|
|
|
|
|
|
|
|
assert normalize_path(
|
|
|
|
'dir_link/file1', resolve_symlinks=True
|
|
|
|
) == os.path.join(tmpdir, f)
|
|
|
|
assert normalize_path(
|
|
|
|
'dir_link/file1', resolve_symlinks=False
|
|
|
|
) == os.path.join(tmpdir, 'dir_link', 'file1')
|
|
|
|
|
|
|
|
assert normalize_path(
|
|
|
|
'file_link', resolve_symlinks=True
|
|
|
|
) == os.path.join(tmpdir, f)
|
|
|
|
assert normalize_path(
|
|
|
|
'file_link', resolve_symlinks=False
|
|
|
|
) == os.path.join(tmpdir, 'file_link')
|
|
|
|
finally:
|
|
|
|
os.chdir(orig_working_dir)
|
Add checks against requirements-file-dwelling hashes for most kinds of packages. Close #1175.
* Add --require-hashes option. This is handy in deployment scripts to force application authors to hash their requirements. It is also a convenient way to get pip to show computed hashes for a virgin, unhashed requirements file. Eventually, additions to `pip freeze` should fill a superset of this use case.
* In --require-hashes mode, at least one hash is required to match for each requirement.
* Option-based requirements (--sha256=...) turn on --require-hashes mode implicitly.
* Internet-derived URL-based hashes are "necessary but not sufficient": they do not satisfy --require-hashes mode when they match, but they are still used to guard against transmission errors.
* Other URL-based requirements (#md5=...) are treated just like flag-based ones, except they don't turn on --require-hashes.
* Complain informatively, with the most devastating errors first so you don't chase your tail all day only to run up against a brick wall at the end. This also means we don't complain that a hash is missing, only for the user to find, after fixing it, that we have no idea how to even compute a hash for that type of requirement.
* Complain about unpinned requirements when hash-checking mode is on, lest they cause the user surprise later.
* Complain about missing hashes.
* Complain about requirement types we don't know how to hash (like VCS ones and local dirs).
* Have InstallRequirement keep its original Link around (original_link) so we can differentiate between URL hashes from requirements files and ones downloaded from the (untrustworthy) internet.
* Remove test_download_hashes, which is obsolete. Similar coverage is provided in test_utils.TestHashes and the various hash cases in test_req.py.
2015-09-09 19:01:53 +02:00
|
|
|
|
|
|
|
|
|
|
|
class TestHashes(object):
|
2017-08-31 17:48:18 +02:00
|
|
|
"""Tests for pip._internal.utils.hashes"""
|
Add checks against requirements-file-dwelling hashes for most kinds of packages. Close #1175.
* Add --require-hashes option. This is handy in deployment scripts to force application authors to hash their requirements. It is also a convenient way to get pip to show computed hashes for a virgin, unhashed requirements file. Eventually, additions to `pip freeze` should fill a superset of this use case.
* In --require-hashes mode, at least one hash is required to match for each requirement.
* Option-based requirements (--sha256=...) turn on --require-hashes mode implicitly.
* Internet-derived URL-based hashes are "necessary but not sufficient": they do not satisfy --require-hashes mode when they match, but they are still used to guard against transmission errors.
* Other URL-based requirements (#md5=...) are treated just like flag-based ones, except they don't turn on --require-hashes.
* Complain informatively, with the most devastating errors first so you don't chase your tail all day only to run up against a brick wall at the end. This also means we don't complain that a hash is missing, only for the user to find, after fixing it, that we have no idea how to even compute a hash for that type of requirement.
* Complain about unpinned requirements when hash-checking mode is on, lest they cause the user surprise later.
* Complain about missing hashes.
* Complain about requirement types we don't know how to hash (like VCS ones and local dirs).
* Have InstallRequirement keep its original Link around (original_link) so we can differentiate between URL hashes from requirements files and ones downloaded from the (untrustworthy) internet.
* Remove test_download_hashes, which is obsolete. Similar coverage is provided in test_utils.TestHashes and the various hash cases in test_req.py.
2015-09-09 19:01:53 +02:00
|
|
|
|
|
|
|
def test_success(self, tmpdir):
|
|
|
|
"""Make sure no error is raised when at least one hash matches.
|
|
|
|
|
|
|
|
Test check_against_path because it calls everything else.
|
|
|
|
|
|
|
|
"""
|
|
|
|
file = tmpdir / 'to_hash'
|
|
|
|
file.write('hello')
|
|
|
|
hashes = Hashes({
|
|
|
|
'sha256': ['2cf24dba5fb0a30e26e83b2ac5b9e29e'
|
|
|
|
'1b161e5c1fa7425e73043362938b9824'],
|
|
|
|
'sha224': ['wrongwrong'],
|
|
|
|
'md5': ['5d41402abc4b2a76b9719d911017c592']})
|
|
|
|
hashes.check_against_path(file)
|
|
|
|
|
|
|
|
def test_failure(self):
|
|
|
|
"""Hashes should raise HashMismatch when no hashes match."""
|
|
|
|
hashes = Hashes({'sha256': ['wrongwrong']})
|
|
|
|
with pytest.raises(HashMismatch):
|
2015-09-25 18:24:32 +02:00
|
|
|
hashes.check_against_file(BytesIO(b'hello'))
|
Add checks against requirements-file-dwelling hashes for most kinds of packages. Close #1175.
* Add --require-hashes option. This is handy in deployment scripts to force application authors to hash their requirements. It is also a convenient way to get pip to show computed hashes for a virgin, unhashed requirements file. Eventually, additions to `pip freeze` should fill a superset of this use case.
* In --require-hashes mode, at least one hash is required to match for each requirement.
* Option-based requirements (--sha256=...) turn on --require-hashes mode implicitly.
* Internet-derived URL-based hashes are "necessary but not sufficient": they do not satisfy --require-hashes mode when they match, but they are still used to guard against transmission errors.
* Other URL-based requirements (#md5=...) are treated just like flag-based ones, except they don't turn on --require-hashes.
* Complain informatively, with the most devastating errors first so you don't chase your tail all day only to run up against a brick wall at the end. This also means we don't complain that a hash is missing, only for the user to find, after fixing it, that we have no idea how to even compute a hash for that type of requirement.
* Complain about unpinned requirements when hash-checking mode is on, lest they cause the user surprise later.
* Complain about missing hashes.
* Complain about requirement types we don't know how to hash (like VCS ones and local dirs).
* Have InstallRequirement keep its original Link around (original_link) so we can differentiate between URL hashes from requirements files and ones downloaded from the (untrustworthy) internet.
* Remove test_download_hashes, which is obsolete. Similar coverage is provided in test_utils.TestHashes and the various hash cases in test_req.py.
2015-09-09 19:01:53 +02:00
|
|
|
|
|
|
|
def test_missing_hashes(self):
|
|
|
|
"""MissingHashes should raise HashMissing when any check is done."""
|
|
|
|
with pytest.raises(HashMissing):
|
2015-09-25 18:24:32 +02:00
|
|
|
MissingHashes().check_against_file(BytesIO(b'hello'))
|
Add checks against requirements-file-dwelling hashes for most kinds of packages. Close #1175.
* Add --require-hashes option. This is handy in deployment scripts to force application authors to hash their requirements. It is also a convenient way to get pip to show computed hashes for a virgin, unhashed requirements file. Eventually, additions to `pip freeze` should fill a superset of this use case.
* In --require-hashes mode, at least one hash is required to match for each requirement.
* Option-based requirements (--sha256=...) turn on --require-hashes mode implicitly.
* Internet-derived URL-based hashes are "necessary but not sufficient": they do not satisfy --require-hashes mode when they match, but they are still used to guard against transmission errors.
* Other URL-based requirements (#md5=...) are treated just like flag-based ones, except they don't turn on --require-hashes.
* Complain informatively, with the most devastating errors first so you don't chase your tail all day only to run up against a brick wall at the end. This also means we don't complain that a hash is missing, only for the user to find, after fixing it, that we have no idea how to even compute a hash for that type of requirement.
* Complain about unpinned requirements when hash-checking mode is on, lest they cause the user surprise later.
* Complain about missing hashes.
* Complain about requirement types we don't know how to hash (like VCS ones and local dirs).
* Have InstallRequirement keep its original Link around (original_link) so we can differentiate between URL hashes from requirements files and ones downloaded from the (untrustworthy) internet.
* Remove test_download_hashes, which is obsolete. Similar coverage is provided in test_utils.TestHashes and the various hash cases in test_req.py.
2015-09-09 19:01:53 +02:00
|
|
|
|
|
|
|
def test_unknown_hash(self):
|
|
|
|
"""Hashes should raise InstallationError when it encounters an unknown
|
|
|
|
hash."""
|
|
|
|
hashes = Hashes({'badbad': ['dummy']})
|
|
|
|
with pytest.raises(InstallationError):
|
2015-09-25 18:24:32 +02:00
|
|
|
hashes.check_against_file(BytesIO(b'hello'))
|
Add checks against requirements-file-dwelling hashes for most kinds of packages. Close #1175.
* Add --require-hashes option. This is handy in deployment scripts to force application authors to hash their requirements. It is also a convenient way to get pip to show computed hashes for a virgin, unhashed requirements file. Eventually, additions to `pip freeze` should fill a superset of this use case.
* In --require-hashes mode, at least one hash is required to match for each requirement.
* Option-based requirements (--sha256=...) turn on --require-hashes mode implicitly.
* Internet-derived URL-based hashes are "necessary but not sufficient": they do not satisfy --require-hashes mode when they match, but they are still used to guard against transmission errors.
* Other URL-based requirements (#md5=...) are treated just like flag-based ones, except they don't turn on --require-hashes.
* Complain informatively, with the most devastating errors first so you don't chase your tail all day only to run up against a brick wall at the end. This also means we don't complain that a hash is missing, only for the user to find, after fixing it, that we have no idea how to even compute a hash for that type of requirement.
* Complain about unpinned requirements when hash-checking mode is on, lest they cause the user surprise later.
* Complain about missing hashes.
* Complain about requirement types we don't know how to hash (like VCS ones and local dirs).
* Have InstallRequirement keep its original Link around (original_link) so we can differentiate between URL hashes from requirements files and ones downloaded from the (untrustworthy) internet.
* Remove test_download_hashes, which is obsolete. Similar coverage is provided in test_utils.TestHashes and the various hash cases in test_req.py.
2015-09-09 19:01:53 +02:00
|
|
|
|
|
|
|
def test_non_zero(self):
|
|
|
|
"""Test that truthiness tests tell whether any known-good hashes
|
|
|
|
exist."""
|
|
|
|
assert Hashes({'sha256': 'dummy'})
|
|
|
|
assert not Hashes()
|
|
|
|
assert not Hashes({})
|
2016-02-12 23:41:21 +01:00
|
|
|
|
|
|
|
|
|
|
|
class TestEncoding(object):
|
2017-08-31 17:48:18 +02:00
|
|
|
"""Tests for pip._internal.utils.encoding"""
|
2016-02-12 23:41:21 +01:00
|
|
|
|
|
|
|
def test_auto_decode_utf16_le(self):
|
|
|
|
data = (
|
|
|
|
b'\xff\xfeD\x00j\x00a\x00n\x00g\x00o\x00=\x00'
|
|
|
|
b'=\x001\x00.\x004\x00.\x002\x00'
|
|
|
|
)
|
|
|
|
assert auto_decode(data) == "Django==1.4.2"
|
|
|
|
|
2016-02-14 01:21:20 +01:00
|
|
|
def test_auto_decode_no_bom(self):
|
|
|
|
assert auto_decode(b'foobar') == u'foobar'
|
2016-03-07 11:08:59 +01:00
|
|
|
|
|
|
|
def test_auto_decode_pep263_headers(self):
|
|
|
|
latin1_req = u'# coding=latin1\n# Pas trop de café'
|
|
|
|
assert auto_decode(latin1_req.encode('latin1')) == latin1_req
|
2016-05-20 03:02:26 +02:00
|
|
|
|
2017-06-01 21:41:01 +02:00
|
|
|
def test_auto_decode_no_preferred_encoding(self):
|
|
|
|
om, em = Mock(), Mock()
|
|
|
|
om.return_value = 'ascii'
|
|
|
|
em.return_value = None
|
|
|
|
data = u'data'
|
|
|
|
with patch('sys.getdefaultencoding', om):
|
|
|
|
with patch('locale.getpreferredencoding', em):
|
|
|
|
ret = auto_decode(data.encode(sys.getdefaultencoding()))
|
|
|
|
assert ret == data
|
|
|
|
|
2016-05-20 03:02:26 +02:00
|
|
|
|
2017-06-01 14:54:29 +02:00
|
|
|
class TestTempDirectory(object):
|
|
|
|
|
2016-05-20 03:02:26 +02:00
|
|
|
# No need to test symlinked directories on Windows
|
|
|
|
@pytest.mark.skipif("sys.platform == 'win32'")
|
2017-06-01 14:54:29 +02:00
|
|
|
def test_symlinked_path(self):
|
|
|
|
with TempDirectory() as tmp_dir:
|
|
|
|
assert os.path.exists(tmp_dir.path)
|
|
|
|
|
|
|
|
alt_tmp_dir = tempfile.mkdtemp(prefix="pip-test-")
|
2016-05-20 03:02:26 +02:00
|
|
|
assert (
|
2017-06-01 14:54:29 +02:00
|
|
|
os.path.dirname(tmp_dir.path) ==
|
|
|
|
os.path.dirname(os.path.realpath(alt_tmp_dir))
|
2016-05-20 03:02:26 +02:00
|
|
|
)
|
|
|
|
# are we on a system where /tmp is a symlink
|
2017-06-01 14:54:29 +02:00
|
|
|
if os.path.realpath(alt_tmp_dir) != os.path.abspath(alt_tmp_dir):
|
|
|
|
assert (
|
|
|
|
os.path.dirname(tmp_dir.path) !=
|
|
|
|
os.path.dirname(alt_tmp_dir)
|
|
|
|
)
|
2016-05-20 03:02:26 +02:00
|
|
|
else:
|
2017-06-01 14:54:29 +02:00
|
|
|
assert (
|
|
|
|
os.path.dirname(tmp_dir.path) ==
|
|
|
|
os.path.dirname(alt_tmp_dir)
|
|
|
|
)
|
|
|
|
os.rmdir(tmp_dir.path)
|
|
|
|
assert not os.path.exists(tmp_dir.path)
|
|
|
|
|
|
|
|
def test_deletes_readonly_files(self):
|
|
|
|
def create_file(*args):
|
|
|
|
fpath = os.path.join(*args)
|
|
|
|
ensure_dir(os.path.dirname(fpath))
|
|
|
|
with open(fpath, "w") as f:
|
|
|
|
f.write("Holla!")
|
|
|
|
|
|
|
|
def readonly_file(*args):
|
|
|
|
fpath = os.path.join(*args)
|
|
|
|
os.chmod(fpath, stat.S_IREAD)
|
|
|
|
|
|
|
|
with TempDirectory() as tmp_dir:
|
|
|
|
create_file(tmp_dir.path, "normal-file")
|
|
|
|
create_file(tmp_dir.path, "readonly-file")
|
|
|
|
readonly_file(tmp_dir.path, "readonly-file")
|
|
|
|
|
|
|
|
create_file(tmp_dir.path, "subfolder", "normal-file")
|
|
|
|
create_file(tmp_dir.path, "subfolder", "readonly-file")
|
|
|
|
readonly_file(tmp_dir.path, "subfolder", "readonly-file")
|
|
|
|
|
|
|
|
assert tmp_dir.path is None
|
|
|
|
|
|
|
|
def test_create_and_cleanup_work(self):
|
|
|
|
tmp_dir = TempDirectory()
|
|
|
|
assert tmp_dir.path is None
|
|
|
|
|
|
|
|
tmp_dir.create()
|
|
|
|
created_path = tmp_dir.path
|
|
|
|
assert tmp_dir.path is not None
|
|
|
|
assert os.path.exists(created_path)
|
|
|
|
|
|
|
|
tmp_dir.cleanup()
|
|
|
|
assert tmp_dir.path is None
|
|
|
|
assert not os.path.exists(created_path)
|
2016-07-10 05:17:04 +02:00
|
|
|
|
|
|
|
|
|
|
|
class TestGlibc(object):
|
|
|
|
def test_manylinux1_check_glibc_version(self):
|
|
|
|
"""
|
|
|
|
Test that the check_glibc_version function is robust against weird
|
|
|
|
glibc version strings.
|
|
|
|
"""
|
|
|
|
for two_twenty in ["2.20",
|
|
|
|
# used by "linaro glibc", see gh-3588
|
|
|
|
"2.20-2014.11",
|
|
|
|
# weird possibilities that I just made up
|
|
|
|
"2.20+dev",
|
|
|
|
"2.20-custom",
|
|
|
|
"2.20.1",
|
|
|
|
]:
|
|
|
|
assert check_glibc_version(two_twenty, 2, 15)
|
|
|
|
assert check_glibc_version(two_twenty, 2, 20)
|
|
|
|
assert not check_glibc_version(two_twenty, 2, 21)
|
|
|
|
assert not check_glibc_version(two_twenty, 3, 15)
|
|
|
|
assert not check_glibc_version(two_twenty, 1, 15)
|
|
|
|
|
|
|
|
# For strings that we just can't parse at all, we should warn and
|
|
|
|
# return false
|
|
|
|
for bad_string in ["asdf", "", "foo.bar"]:
|
|
|
|
with warnings.catch_warnings(record=True) as ws:
|
|
|
|
warnings.filterwarnings("always")
|
|
|
|
assert not check_glibc_version(bad_string, 2, 5)
|
|
|
|
for w in ws:
|
|
|
|
if "Expected glibc version with" in str(w.message):
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
# Didn't find the warning we were expecting
|
|
|
|
assert False
|
2016-04-28 22:41:45 +02:00
|
|
|
|
|
|
|
|
|
|
|
class TestCheckRequiresPython(object):
|
|
|
|
|
|
|
|
@pytest.mark.parametrize(
|
|
|
|
("metadata", "should_raise"),
|
|
|
|
[
|
|
|
|
("Name: test\n", False),
|
|
|
|
("Name: test\nRequires-Python:", False),
|
|
|
|
("Name: test\nRequires-Python: invalid_spec", False),
|
|
|
|
("Name: test\nRequires-Python: <=1", True),
|
|
|
|
],
|
|
|
|
)
|
|
|
|
def test_check_requires(self, metadata, should_raise):
|
|
|
|
fake_dist = Mock(
|
|
|
|
has_metadata=lambda _: True,
|
|
|
|
get_metadata=lambda _: metadata)
|
|
|
|
if should_raise:
|
|
|
|
with pytest.raises(UnsupportedPythonVersion):
|
|
|
|
check_dist_requires_python(fake_dist)
|
|
|
|
else:
|
|
|
|
check_dist_requires_python(fake_dist)
|
2017-10-12 07:29:53 +02:00
|
|
|
|
|
|
|
|
|
|
|
class TestGetProg(object):
|
|
|
|
|
|
|
|
@pytest.mark.parametrize(
|
|
|
|
("argv", "executable", "expected"),
|
|
|
|
[
|
|
|
|
('/usr/bin/pip', '', 'pip'),
|
|
|
|
('-c', '/usr/bin/python', '/usr/bin/python -m pip'),
|
|
|
|
('__main__.py', '/usr/bin/python', '/usr/bin/python -m pip'),
|
|
|
|
('/usr/bin/pip3', '', 'pip3'),
|
|
|
|
]
|
|
|
|
)
|
|
|
|
def test_get_prog(self, monkeypatch, argv, executable, expected):
|
|
|
|
monkeypatch.setattr('pip._internal.utils.misc.sys.argv', [argv])
|
|
|
|
monkeypatch.setattr(
|
|
|
|
'pip._internal.utils.misc.sys.executable',
|
|
|
|
executable
|
|
|
|
)
|
|
|
|
assert get_prog() == expected
|
2018-01-21 12:25:41 +01:00
|
|
|
|
|
|
|
|
|
|
|
def test_call_subprocess_works_okay_when_just_given_nothing():
|
|
|
|
try:
|
|
|
|
call_subprocess([sys.executable, '-c', 'print("Hello")'])
|
|
|
|
except Exception:
|
|
|
|
assert False, "Expected subprocess call to succeed"
|
|
|
|
|
|
|
|
|
|
|
|
def test_call_subprocess_closes_stdin():
|
|
|
|
with pytest.raises(InstallationError):
|
|
|
|
call_subprocess([sys.executable, '-c', 'input()'])
|
2018-04-27 00:02:54 +02:00
|
|
|
|
|
|
|
|
2018-09-24 23:53:39 +02:00
|
|
|
@pytest.mark.parametrize('args, expected', [
|
|
|
|
# Test without subdir.
|
|
|
|
(('git+https://example.com/pkg', 'dev', 'myproj'),
|
|
|
|
'git+https://example.com/pkg@dev#egg=myproj'),
|
|
|
|
# Test with subdir.
|
|
|
|
(('git+https://example.com/pkg', 'dev', 'myproj', 'sub/dir'),
|
|
|
|
'git+https://example.com/pkg@dev#egg=myproj&subdirectory=sub/dir'),
|
|
|
|
# Test with None subdir.
|
|
|
|
(('git+https://example.com/pkg', 'dev', 'myproj', None),
|
|
|
|
'git+https://example.com/pkg@dev#egg=myproj'),
|
|
|
|
])
|
|
|
|
def test_make_vcs_requirement_url(args, expected):
|
2018-09-25 11:23:32 +02:00
|
|
|
actual = make_vcs_requirement_url(*args)
|
2018-09-24 23:53:39 +02:00
|
|
|
assert actual == expected
|
|
|
|
|
|
|
|
|
2018-07-25 18:56:22 +02:00
|
|
|
@pytest.mark.parametrize('netloc, expected', [
|
|
|
|
# Test a basic case.
|
|
|
|
('example.com', ('example.com', (None, None))),
|
|
|
|
# Test with username and no password.
|
|
|
|
('user@example.com', ('example.com', ('user', None))),
|
|
|
|
# Test with username and password.
|
|
|
|
('user:pass@example.com', ('example.com', ('user', 'pass'))),
|
|
|
|
# Test with username and empty password.
|
|
|
|
('user:@example.com', ('example.com', ('user', ''))),
|
|
|
|
# Test the password containing an @ symbol.
|
|
|
|
('user:pass@word@example.com', ('example.com', ('user', 'pass@word'))),
|
|
|
|
# Test the password containing a : symbol.
|
|
|
|
('user:pass:word@example.com', ('example.com', ('user', 'pass:word'))),
|
|
|
|
])
|
|
|
|
def test_split_auth_from_netloc(netloc, expected):
|
|
|
|
actual = split_auth_from_netloc(netloc)
|
|
|
|
assert actual == expected
|
|
|
|
|
|
|
|
|
2018-05-17 16:21:34 +02:00
|
|
|
@pytest.mark.parametrize('auth_url, expected_url', [
|
|
|
|
('https://user:pass@domain.tld/project/tags/v0.2',
|
|
|
|
'https://domain.tld/project/tags/v0.2'),
|
|
|
|
('https://domain.tld/project/tags/v0.2',
|
|
|
|
'https://domain.tld/project/tags/v0.2',),
|
|
|
|
('https://user:pass@domain.tld/svn/project/trunk@8181',
|
|
|
|
'https://domain.tld/svn/project/trunk@8181'),
|
|
|
|
('https://domain.tld/project/trunk@8181',
|
|
|
|
'https://domain.tld/project/trunk@8181',),
|
|
|
|
('git+https://pypi.org/something',
|
|
|
|
'git+https://pypi.org/something'),
|
|
|
|
('git+https://user:pass@pypi.org/something',
|
|
|
|
'git+https://pypi.org/something'),
|
|
|
|
('git+ssh://git@pypi.org/something',
|
|
|
|
'git+ssh://pypi.org/something'),
|
|
|
|
])
|
|
|
|
def test_remove_auth_from_url(auth_url, expected_url):
|
|
|
|
url = remove_auth_from_url(auth_url)
|
2018-04-27 00:02:54 +02:00
|
|
|
assert url == expected_url
|