Deprecate and Noop the PEP 438 functionality due to PEP 470

This commit is contained in:
Donald Stufft 2015-09-02 23:08:11 -04:00
parent 9a917aca76
commit 0f5d4286a5
16 changed files with 132 additions and 503 deletions

View File

@ -1,3 +1,11 @@
**8.0.0 (unreleased)**
* Deprecate and no-op the ``--allow-external``, ``--allow-all-external``, and
``--allow-unverified`` functionality that was added as part of PEP 438. With
changes made to the repository protocol made in PEP 470, these options are no
longer functional.
**7.1.2 (2015-08-22)**
* Don't raise an error if pip is not installed when checking for the latest pip

View File

@ -105,9 +105,6 @@ Additionally, the following Package Index Options are supported:
* :ref:`--extra-index-url <--extra-index-url>`
* :ref:`--no-index <--no-index>`
* :ref:`-f, --find-links <--find-links>`
* :ref:`--allow-external <--allow-external>`
* :ref:`--allow-all-external <--allow-external>`
* :ref:`--allow-unverified <--allow-unverified>`
* :ref:`--no-binary <install_--no-binary>`
* :ref:`--only-binary <install_--only-binary>`
@ -224,37 +221,6 @@ that will enable installing pre-releases and development releases.
.. _PEP426: http://www.python.org/dev/peps/pep-0426
.. _`Externally Hosted Files`:
Externally Hosted Files
+++++++++++++++++++++++
Starting with v1.4, pip will warn about installing any file that does not come
from the primary index. As of version 1.5, pip defaults to ignoring these files
unless asked to consider them.
The ``pip install`` command supports a
:ref:`--allow-external PROJECT <--allow-external>` option that will enable
installing links that are linked directly from the simple index but to an
external host that also have a supported hash fragment. Externally hosted
files for all projects may be enabled using the
:ref:`--allow-all-external <--allow-all-external>` flag to the ``pip install``
command.
The ``pip install`` command also supports a
:ref:`--allow-unverified PROJECT <--allow-unverified>` option that will enable
installing insecurely linked files. These are either directly linked (as above)
files without a hash, or files that are linked from either the home page or the
download url of a package.
These options can be used in a requirements file. Assuming some fictional
`ExternalPackage` that is hosted external and unverified, then your requirements
file would be like so::
--allow-external ExternalPackage
--allow-unverified ExternalPackage
ExternalPackage
.. _`VCS Support`:

View File

@ -30,7 +30,7 @@ import pip.cmdoptions
cmdoptions = pip.cmdoptions
# The version as used in the setup.py and the docs conf.py
__version__ = "7.2.0.dev0"
__version__ = "8.0.0.dev0"
logger = logging.getLogger(__name__)

View File

@ -266,8 +266,7 @@ def allow_external():
action="append",
default=[],
metavar="PACKAGE",
help="Allow the installation of a package even if it is externally "
"hosted",
help=SUPPRESS_HELP,
)
@ -277,7 +276,7 @@ allow_all_external = partial(
dest="allow_all_external",
action="store_true",
default=False,
help="Allow the installation of all packages that are externally hosted",
help=SUPPRESS_HELP,
)
@ -312,8 +311,7 @@ def allow_unsafe():
action="append",
default=[],
metavar="PACKAGE",
help="Allow the installation of a package even if it is hosted "
"in an insecure and unverifiable way",
help=SUPPRESS_HELP,
)
# Remove after 7.0

View File

@ -21,7 +21,7 @@ from pip.exceptions import (
from pip import cmdoptions
from pip.utils import ensure_dir
from pip.utils.build import BuildDirectory
from pip.utils.deprecation import RemovedInPip8Warning
from pip.utils.deprecation import RemovedInPip8Warning, RemovedInPip10Warning
from pip.utils.filesystem import check_path_owner
from pip.wheel import WheelCache, WheelBuilder
@ -185,9 +185,6 @@ class InstallCommand(RequirementCommand):
find_links=options.find_links,
format_control=options.format_control,
index_urls=index_urls,
allow_external=options.allow_external,
allow_unverified=options.allow_unverified,
allow_all_external=options.allow_all_external,
trusted_hosts=options.trusted_hosts,
allow_all_prereleases=options.pre,
process_dependency_links=options.process_dependency_links,
@ -198,6 +195,30 @@ class InstallCommand(RequirementCommand):
cmdoptions.resolve_wheel_no_use_binary(options)
cmdoptions.check_install_build_global(options)
if options.allow_external:
warnings.warn(
"--allow-external has been deprecated and will be removed in "
"the future. Due to changes in the repository protocol, it no "
"longer has any effect.",
RemovedInPip10Warning,
)
if options.allow_all_external:
warnings.warn(
"--allow-all-external has been deprecated and will be removed "
"in the future. Due to changes in the repository protocol, it "
"no longer has any effect.",
RemovedInPip10Warning,
)
if options.allow_unverified:
warnings.warn(
"--allow-unverified has been deprecated and will be removed "
"in the future. Due to changes in the repository protocol, it "
"no longer has any effect.",
RemovedInPip10Warning,
)
if options.download_dir:
options.ignore_installed = True

View File

@ -1,6 +1,7 @@
from __future__ import absolute_import
import logging
import warnings
from pip._vendor import pkg_resources
@ -9,6 +10,7 @@ from pip.exceptions import DistributionNotFound
from pip.index import FormatControl, fmt_ctl_formats, PackageFinder, Search
from pip.req import InstallRequirement
from pip.utils import get_installed_distributions, dist_is_editable
from pip.utils.deprecation import RemovedInPip10Warning
from pip.wheel import WheelCache
from pip.cmdoptions import make_option_group, index_group
@ -81,9 +83,6 @@ class ListCommand(Command):
return PackageFinder(
find_links=options.find_links,
index_urls=index_urls,
allow_external=options.allow_external,
allow_unverified=options.allow_unverified,
allow_all_external=options.allow_all_external,
allow_all_prereleases=options.pre,
trusted_hosts=options.trusted_hosts,
process_dependency_links=options.process_dependency_links,
@ -91,6 +90,30 @@ class ListCommand(Command):
)
def run(self, options, args):
if options.allow_external:
warnings.warn(
"--allow-external has been deprecated and will be removed in "
"the future. Due to changes in the repository protocol, it no "
"longer has any effect.",
RemovedInPip10Warning,
)
if options.allow_all_external:
warnings.warn(
"--allow-all-external has been deprecated and will be removed "
"in the future. Due to changes in the repository protocol, it "
"no longer has any effect.",
RemovedInPip10Warning,
)
if options.allow_unverified:
warnings.warn(
"--allow-unverified has been deprecated and will be removed "
"in the future. Due to changes in the repository protocol, it "
"no longer has any effect.",
RemovedInPip10Warning,
)
if options.outdated:
self.run_outdated(options)
elif options.uptodate:

View File

@ -11,7 +11,7 @@ from pip.exceptions import CommandError, PreviousBuildDirError
from pip.req import RequirementSet
from pip.utils import import_or_raise, normalize_path
from pip.utils.build import BuildDirectory
from pip.utils.deprecation import RemovedInPip8Warning
from pip.utils.deprecation import RemovedInPip8Warning, RemovedInPip10Warning
from pip.wheel import WheelCache, WheelBuilder
from pip import cmdoptions
@ -128,6 +128,30 @@ class WheelCommand(RequirementCommand):
cmdoptions.resolve_wheel_no_use_binary(options)
cmdoptions.check_install_build_global(options)
if options.allow_external:
warnings.warn(
"--allow-external has been deprecated and will be removed in "
"the future. Due to changes in the repository protocol, it no "
"longer has any effect.",
RemovedInPip10Warning,
)
if options.allow_all_external:
warnings.warn(
"--allow-all-external has been deprecated and will be removed "
"in the future. Due to changes in the repository protocol, it "
"no longer has any effect.",
RemovedInPip10Warning,
)
if options.allow_unverified:
warnings.warn(
"--allow-unverified has been deprecated and will be removed "
"in the future. Due to changes in the repository protocol, it "
"no longer has any effect.",
RemovedInPip10Warning,
)
index_urls = [options.index_url] + options.extra_index_urls
if options.no_index:
logger.info('Ignoring indexes: %s', ','.join(index_urls))
@ -150,9 +174,6 @@ class WheelCommand(RequirementCommand):
find_links=options.find_links,
format_control=options.format_control,
index_urls=index_urls,
allow_external=options.allow_external,
allow_unverified=options.allow_unverified,
allow_all_external=options.allow_all_external,
allow_all_prereleases=options.pre,
trusted_hosts=options.trusted_hosts,
process_dependency_links=options.process_dependency_links,

View File

@ -99,9 +99,7 @@ class PackageFinder(object):
packages, by reading pages and looking for appropriate links.
"""
def __init__(self, find_links, index_urls,
allow_external=(), allow_unverified=(),
allow_all_external=False, allow_all_prereleases=False,
def __init__(self, find_links, index_urls, allow_all_prereleases=False,
trusted_hosts=None, process_dependency_links=False,
session=None, format_control=None):
"""Create a PackageFinder.
@ -137,34 +135,12 @@ class PackageFinder(object):
self.format_control = format_control or FormatControl(set(), set())
# Do we allow (safe and verifiable) externally hosted files?
self.allow_external = set(normalize_name(n) for n in allow_external)
# Which names are allowed to install insecure and unverifiable files?
self.allow_unverified = set(
normalize_name(n) for n in allow_unverified
)
# Anything that is allowed unverified is also allowed external
self.allow_external |= self.allow_unverified
# Do we allow all (safe and verifiable) externally hosted files?
self.allow_all_external = allow_all_external
# Domains that we won't emit warnings for when not using HTTPS
self.secure_origins = [
("*", host, "*")
for host in (trusted_hosts if trusted_hosts else [])
]
# Stores if we ignored any external links so that we can instruct
# end users how to install them if no distributions are available
self.need_warn_external = False
# Stores if we ignored any unsafe links so that we can instruct
# end users how to install them if no distributions are available
self.need_warn_unverified = False
# Do we want to allow _all_ pre-releases?
self.allow_all_prereleases = allow_all_prereleases
@ -370,10 +346,7 @@ class PackageFinder(object):
# Check that we have the url_name correctly spelled:
# Only check main index if index URL is given
main_index_url = Link(
mkurl_pypi_url(self.index_urls[0]),
trusted=True,
)
main_index_url = Link(mkurl_pypi_url(self.index_urls[0]))
page = self._get_page(main_index_url)
if page is None and PyPI.netloc not in str(main_index_url):
@ -385,7 +358,7 @@ class PackageFinder(object):
)
project_url_name = self._find_url_name(
Link(self.index_urls[0], trusted=True),
Link(self.index_urls[0]),
project_url_name,
) or project_url_name
@ -418,8 +391,8 @@ class PackageFinder(object):
# We want to filter out any thing which does not have a secure origin.
url_locations = [
link for link in itertools.chain(
(Link(url, trusted=True) for url in index_url_loc),
(Link(url, trusted=True) for url in fl_url_loc),
(Link(url) for url in index_url_loc),
(Link(url) for url in fl_url_loc),
(Link(url) for url in dep_url_loc),
)
if self._validate_secure_origin(logger, link)
@ -436,7 +409,7 @@ class PackageFinder(object):
search = Search(project_name.lower(), canonical_name, formats)
find_links_versions = self._package_versions(
# We trust every directly linked archive in find_links
(Link(url, '-f', trusted=True) for url in self.find_links),
(Link(url, '-f') for url in self.find_links),
search
)
@ -552,21 +525,6 @@ class PackageFinder(object):
)
)
if self.need_warn_external:
logger.warning(
"Some externally hosted files were ignored as access to "
"them may be unreliable (use --allow-external %s to "
"allow).",
req.name,
)
if self.need_warn_unverified:
logger.warning(
"Some insecure and unverifiable files were ignored"
" (use --allow-unverified %s to allow).",
req.name,
)
raise DistributionNotFound(
'No matching distribution found for %s' % req
)
@ -591,12 +549,6 @@ class PackageFinder(object):
selected_version = applicable_versions[0].location
if (selected_version.verifiable is not None and not
selected_version.verifiable):
logger.warning(
"%s is potentially insecure and unverifiable.", req.name,
)
return selected_version
def _find_url_name(self, index_url, url_name):
@ -630,7 +582,6 @@ class PackageFinder(object):
"""
all_locations = list(locations)
seen = set()
normalized = normalize_name(project_name)
while all_locations:
location = all_locations.pop(0)
@ -645,29 +596,6 @@ class PackageFinder(object):
yield page
for link in page.rel_links():
if (normalized not in self.allow_external and not
self.allow_all_external):
self.need_warn_external = True
logger.debug(
"Not searching %s for files because external "
"urls are disallowed.",
link,
)
continue
if (link.trusted is not None and not
link.trusted and
normalized not in self.allow_unverified):
logger.debug(
"Not searching %s for urls, it is an "
"untrusted link and cannot produce safe or "
"verifiable files.",
link,
)
self.need_warn_unverified = True
continue
all_locations.append(link)
_py_version_re = re.compile(r'-py([123]\.?[0-9]?)$')
@ -779,29 +707,6 @@ class PackageFinder(object):
link, 'wrong project name (not %s)' % search.supplied)
return
if (link.internal is not None and not
link.internal and not
normalize_name(search.supplied).lower()
in self.allow_external and not
self.allow_all_external):
# We have a link that we are sure is external, so we should skip
# it unless we are allowing externals
self._log_skipped_link(link, 'it is externally hosted')
self.need_warn_external = True
return
if (link.verifiable is not None and not
link.verifiable and not
(normalize_name(search.supplied).lower()
in self.allow_unverified)):
# We have a link that we are sure we cannot verify its integrity,
# so we should skip it unless we are allowing unsafe installs
# for this requirement.
self._log_skipped_link(
link, 'it is an insecure and unverifiable file')
self.need_warn_unverified = True
return
match = self._py_version_re.search(version)
if match:
version = version[:match.start()]
@ -850,7 +755,7 @@ def egg_info_matches(
class HTMLPage(object):
"""Represents one page, along with its URL"""
def __init__(self, content, url, headers=None, trusted=None):
def __init__(self, content, url, headers=None):
# Determine if we have any encoding information in our headers
encoding = None
if headers and "Content-Type" in headers:
@ -867,7 +772,6 @@ class HTMLPage(object):
)
self.url = url
self.headers = headers
self.trusted = trusted
def __str__(self):
return self.url
@ -944,10 +848,7 @@ class HTMLPage(object):
)
return
inst = cls(
resp.content, resp.url, resp.headers,
trusted=link.trusted,
)
inst = cls(resp.content, resp.url, resp.headers)
except requests.HTTPError as exc:
level = 2 if exc.response.status_code == 404 else 1
cls._handle_fail(link, exc, url, level=level)
@ -983,20 +884,6 @@ class HTMLPage(object):
return resp.headers.get("Content-Type", "")
@cached_property
def api_version(self):
metas = [
x for x in self.parsed.findall(".//meta")
if x.get("name", "").lower() == "api-version"
]
if metas:
try:
return int(metas[0].get("value", None))
except (TypeError, ValueError):
pass
return None
@cached_property
def base_url(self):
bases = [
@ -1017,20 +904,7 @@ class HTMLPage(object):
url = self.clean_link(
urllib_parse.urljoin(self.base_url, href)
)
# Determine if this link is internal. If that distinction
# doesn't make sense in this context, then we don't make
# any distinction.
internal = None
if self.api_version and self.api_version >= 2:
# Only api_versions >= 2 have a distinction between
# external and internal links
internal = bool(
anchor.get("rel") and
"internal" in anchor.get("rel").split()
)
yield Link(url, self, internal=internal)
yield Link(url, self)
def rel_links(self, rels=('homepage', 'download')):
"""Yields all links with the given relations"""
@ -1046,7 +920,7 @@ class HTMLPage(object):
url = self.clean_link(
urllib_parse.urljoin(self.base_url, href)
)
yield Link(url, self, trusted=False)
yield Link(url, self)
_clean_re = re.compile(r'[^a-z0-9$&+,/:;=?@.#%_\\|-]', re.I)
@ -1060,7 +934,7 @@ class HTMLPage(object):
class Link(object):
def __init__(self, url, comes_from=None, internal=None, trusted=None):
def __init__(self, url, comes_from=None):
# url can be a UNC windows share
if url != Inf and url.startswith('\\\\'):
@ -1068,8 +942,6 @@ class Link(object):
self.url = url
self.comes_from = comes_from
self.internal = internal
self.trusted = trusted
def __str__(self):
if self.comes_from:
@ -1176,41 +1048,6 @@ class Link(object):
def show_url(self):
return posixpath.basename(self.url.split('#', 1)[0].split('?', 1)[0])
@property
def verifiable(self):
"""
Returns True if this link can be verified after download, False if it
cannot, and None if we cannot determine.
"""
trusted = self.trusted or getattr(self.comes_from, "trusted", None)
if trusted is not None and trusted:
# This link came from a trusted source. It *may* be verifiable but
# first we need to see if this page is operating under the new
# API version.
try:
api_version = getattr(self.comes_from, "api_version", None)
api_version = int(api_version)
except (ValueError, TypeError):
api_version = None
if api_version is None or api_version <= 1:
# This link is either trusted, or it came from a trusted,
# however it is not operating under the API version 2 so
# we can't make any claims about if it's safe or not
return
if self.hash:
# This link came from a trusted source and it has a hash, so we
# can consider it safe.
return True
else:
# This link came from a trusted source, using the new API
# version, and it does not have a hash. It is NOT verifiable
return False
elif trusted is not None:
# This link came from an untrusted source and we cannot trust it
return False
@property
def is_wheel(self):
return self.ext == wheel_ext

View File

@ -8,6 +8,7 @@ import os
import re
import shlex
import optparse
import warnings
from pip._vendor.six.moves.urllib import parse as urllib_parse
from pip._vendor.six.moves import filterfalse
@ -16,7 +17,7 @@ import pip
from pip.download import get_file_content
from pip.req.req_install import InstallRequirement
from pip.exceptions import (RequirementsFileParseError)
from pip.utils import normalize_name
from pip.utils.deprecation import RemovedInPip10Warning
from pip import cmdoptions
__all__ = ['parse_requirements']
@ -174,6 +175,30 @@ def process_line(line, filename, line_number, finder=None, comes_from=None,
# set finder options
elif finder:
if opts.allow_external:
warnings.warn(
"--allow-external has been deprecated and will be removed in "
"the future. Due to changes in the repository protocol, it no "
"longer has any effect.",
RemovedInPip10Warning,
)
if opts.allow_all_external:
warnings.warn(
"--allow-all-external has been deprecated and will be removed "
"in the future. Due to changes in the repository protocol, it "
"no longer has any effect.",
RemovedInPip10Warning,
)
if opts.allow_unverified:
warnings.warn(
"--allow-unverified has been deprecated and will be removed "
"in the future. Due to changes in the repository protocol, it "
"no longer has any effect.",
RemovedInPip10Warning,
)
if opts.index_url:
finder.index_urls = [opts.index_url]
if opts.use_wheel is False:
@ -181,17 +206,8 @@ def process_line(line, filename, line_number, finder=None, comes_from=None,
pip.index.fmt_ctl_no_use_wheel(finder.format_control)
if opts.no_index is True:
finder.index_urls = []
if opts.allow_all_external:
finder.allow_all_external = opts.allow_all_external
if opts.extra_index_urls:
finder.index_urls.extend(opts.extra_index_urls)
if opts.allow_external:
finder.allow_external |= set(
[normalize_name(v).lower() for v in opts.allow_external])
if opts.allow_unverified:
# Remove after 7.0
finder.allow_unverified |= set(
[normalize_name(v).lower() for v in opts.allow_unverified])
if opts.find_links:
# FIXME: it would be nice to keep track of the source
# of the find_links: support a find-links local path

View File

@ -19,6 +19,10 @@ class RemovedInPip9Warning(PipDeprecationWarning, PendingDeprecationWarning):
pass
class RemovedInPip10Warning(PipDeprecationWarning, PendingDeprecationWarning):
pass
DEPRECATIONS = [RemovedInPip8Warning, RemovedInPip9Warning]

View File

@ -140,7 +140,7 @@ def cached_wheel(cache_dir, link, format_control, package_name):
return link
candidates.sort()
path = os.path.join(root, candidates[0][1])
return pip.index.Link(path_to_url(path), trusted=True)
return pip.index.Link(path_to_url(path))
def rehash(path, algo='sha256', blocksize=1 << 20):
@ -783,7 +783,7 @@ class WheelBuilder(object):
self.requirement_set.build_dir)
# Update the link for this.
req.link = pip.index.Link(
path_to_url(wheel_file), trusted=True)
path_to_url(wheel_file))
assert req.link.is_wheel
# extract the wheel into the dir
unpack_url(

View File

@ -1,11 +0,0 @@
<html>
<head>
<meta name="api-version" value="2" />
</head>
<body>
<a rel="internal" href="bar-1.0.tar.gz#md5=7b8a4e19dfd3be354046b97f62cefc09">bar-1.0.tar.gz</a>
<a href="../foo/bar-2.0.tar.gz#md5=d41d8cd98f00b204e9800998ecf8427e">bar-2.0.tar.gz</a>
<a href="../foo/bar-3.0.tar.gz">bar-3.0.tar.gz</a>
<a rel="download" href="../bar2/">
</body>
</html>

View File

@ -1,5 +0,0 @@
<html>
<body>
<a href="bar-4.0.tar.gz">bar-4.0.tar.gz</a>
</body>
</html>

View File

@ -133,12 +133,7 @@ def test_install_dev_version_from_pypi(script):
"""
Test using package==dev.
"""
result = script.pip(
'install', 'INITools===dev',
'--allow-external', 'INITools',
'--allow-unverified', 'INITools',
expect_error=True,
)
result = script.pip('install', 'INITools===dev', expect_error=True)
assert (script.site_packages / 'initools') in result.files_created, (
str(result.stdout)
)

View File

@ -460,226 +460,6 @@ def test_finder_installs_pre_releases_with_version_spec():
assert link.url == "https://foo/bar-2.0b1.tar.gz"
def test_finder_ignores_external_links(data):
"""
Tests that PackageFinder ignores external links, with or without hashes.
"""
req = InstallRequirement.from_line("bar", None)
# using a local index
finder = PackageFinder(
[],
[data.index_url("externals")],
session=PipSession(),
)
link = finder.find_requirement(req, False)
assert link.filename == "bar-1.0.tar.gz"
def test_finder_finds_external_links_with_hashes_per_project(data):
"""
Tests that PackageFinder finds external links but only if they have a hash
using the per project configuration.
"""
req = InstallRequirement.from_line("bar", None)
# using a local index
finder = PackageFinder(
[],
[data.index_url("externals")],
allow_external=["bar"],
session=PipSession(),
)
link = finder.find_requirement(req, False)
assert link.filename == "bar-2.0.tar.gz"
def test_finder_finds_external_links_with_hashes_all(data):
"""
Tests that PackageFinder finds external links but only if they have a hash
using the all externals flag.
"""
req = InstallRequirement.from_line("bar", None)
# using a local index
finder = PackageFinder(
[],
[data.index_url("externals")],
allow_all_external=True,
session=PipSession(),
)
link = finder.find_requirement(req, False)
assert link.filename == "bar-2.0.tar.gz"
def test_finder_finds_external_links_without_hashes_per_project(data):
"""
Tests that PackageFinder finds external links if they do not have a hash
"""
req = InstallRequirement.from_line("bar==3.0", None)
# using a local index
finder = PackageFinder(
[],
[data.index_url("externals")],
allow_external=["bar"],
allow_unverified=["bar"],
session=PipSession(),
)
link = finder.find_requirement(req, False)
assert link.filename == "bar-3.0.tar.gz"
def test_finder_finds_external_links_without_hashes_all(data):
"""
Tests that PackageFinder finds external links if they do not have a hash
using the all external flag
"""
req = InstallRequirement.from_line("bar==3.0", None)
# using a local index
finder = PackageFinder(
[],
[data.index_url("externals")],
allow_all_external=True,
allow_unverified=["bar"],
session=PipSession(),
)
link = finder.find_requirement(req, False)
assert link.filename == "bar-3.0.tar.gz"
def test_finder_finds_external_links_without_hashes_scraped_per_project(data):
"""
Tests that PackageFinder finds externally scraped links
"""
req = InstallRequirement.from_line("bar", None)
# using a local index
finder = PackageFinder(
[],
[data.index_url("externals")],
allow_external=["bar"],
allow_unverified=["bar"],
session=PipSession(),
)
link = finder.find_requirement(req, False)
assert link.filename == "bar-4.0.tar.gz"
def test_finder_finds_external_links_without_hashes_scraped_all(data):
"""
Tests that PackageFinder finds externally scraped links using the all
external flag.
"""
req = InstallRequirement.from_line("bar", None)
# using a local index
finder = PackageFinder(
[],
[data.index_url("externals")],
allow_all_external=True,
allow_unverified=["bar"],
session=PipSession(),
)
link = finder.find_requirement(req, False)
assert link.filename == "bar-4.0.tar.gz"
def test_finder_finds_external_links_without_hashes_per_project_all_insecure(
data):
"""
Tests that PackageFinder finds external links if they do not have a hash
"""
req = InstallRequirement.from_line("bar==3.0", None)
# using a local index
finder = PackageFinder(
[],
[data.index_url("externals")],
allow_external=["bar"],
allow_unverified=["bar"],
session=PipSession(),
)
link = finder.find_requirement(req, False)
assert link.filename == "bar-3.0.tar.gz"
def test_finder_finds_external_links_without_hashes_all_all_insecure(data):
"""
Tests that PackageFinder finds external links if they do not have a hash
using the all external flag
"""
req = InstallRequirement.from_line("bar==3.0", None)
# using a local index
finder = PackageFinder(
[],
[data.index_url("externals")],
allow_all_external=True,
allow_unverified=["bar"],
session=PipSession(),
)
link = finder.find_requirement(req, False)
assert link.filename == "bar-3.0.tar.gz"
def test_finder_finds_external_links_without_hashes_scraped_per_project_all_insecure(data): # noqa
"""
Tests that PackageFinder finds externally scraped links
"""
req = InstallRequirement.from_line("bar", None)
# using a local index
finder = PackageFinder(
[],
[data.index_url("externals")],
allow_external=["bar"],
allow_unverified=["bar"],
session=PipSession(),
)
link = finder.find_requirement(req, False)
assert link.filename == "bar-4.0.tar.gz"
def test_finder_finds_external_links_without_hashes_scraped_all_all_insecure(
data):
"""
Tests that PackageFinder finds externally scraped links using the all
external flag.
"""
req = InstallRequirement.from_line("bar", None)
# using a local index
finder = PackageFinder(
[],
[data.index_url("externals")],
allow_all_external=True,
allow_unverified=["bar"],
session=PipSession(),
)
link = finder.find_requirement(req, False)
assert link.filename == "bar-4.0.tar.gz"
def test_finder_finds_external_links_without_hashes_scraped_insecure(data):
"""
Tests that PackageFinder finds externally scraped links without the
external flag
"""
req = InstallRequirement.from_line("bar", None)
# using a local index
finder = PackageFinder(
[],
[data.index_url("externals")],
allow_unverified=["bar"],
session=PipSession(),
)
link = finder.find_requirement(req, False)
assert link.filename == "bar-4.0.tar.gz"
class test_link_package_versions(object):
# patch this for travis which has distribute in its base env for now

View File

@ -192,16 +192,6 @@ class TestProcessLine(object):
list(process_line("--extra-index-url=url", "file", 1, finder=finder))
assert finder.index_urls == ['url']
def test_set_finder_allow_external(self, finder):
list(process_line("--allow-external=SomeProject",
"file", 1, finder=finder))
assert finder.allow_external == set(['someproject'])
def test_set_finder_allow_unsafe(self, finder):
list(process_line("--allow-unverified=SomeProject",
"file", 1, finder=finder))
assert finder.allow_unverified == set(['someproject'])
def test_set_finder_use_wheel(self, finder):
list(process_line("--use-wheel", "file", 1, finder=finder))
no_use_wheel_fmt = pip.index.FormatControl(set(), set())
@ -444,10 +434,6 @@ class TestParseRequirements(object):
--extra-index-url https://two.example.com/
--no-use-wheel
--no-index
--allow-external foo
--allow-all-external
--allow-insecure foo
--allow-unverified foo
""")
parse_requirements(tmpdir.join("req.txt"), session=PipSession())
@ -484,13 +470,3 @@ class TestParseRequirements(object):
call.index(global_option) > 0
assert options.format_control.no_binary == set([':all:'])
assert options.format_control.only_binary == set([])
def test_allow_all_external(self, tmpdir):
req_path = tmpdir.join("requirements.txt")
with open(req_path, "w") as fh:
fh.write("""
--allow-all-external
foo
""")
list(parse_requirements(req_path, session=PipSession()))