Merge pull request #9739 from pradyunsg/tests/drop-YAML-based-resolver-tests

Drop YAML tests for the resolver
This commit is contained in:
Pradyun Gedam 2021-03-28 07:56:09 +01:00 committed by GitHub
commit 6ff1f7da2f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
26 changed files with 0 additions and 2552 deletions

View File

@ -34,7 +34,6 @@ repos:
^tools/|
# Tests
^tests/conftest.py|
^tests/yaml|
^tests/lib|
^tests/data|
^tests/unit|
@ -45,7 +44,6 @@ repos:
# A blank ignore, to avoid merge conflicts later.
^$
- repo: https://gitlab.com/pycqa/flake8
rev: 3.8.4
hooks:

View File

@ -51,7 +51,6 @@ The ``README``, license, ``pyproject.toml``, ``setup.py``, and so on are in the
* ``functional/`` *[functional tests of pips CLI -- end-to-end, invoke pip in subprocess & check results of execution against desired result. This also is what makes test suite slow]*
* ``lib/`` *[helpers for tests]*
* ``unit/`` *[unit tests -- fast and small and nice!]*
* ``yaml/`` *[resolver tests! Theyre written in YAML. This folder just contains .yaml files -- actual code for reading/running them is in lib/yaml.py . This is fine!]*
* ``tools`` *[misc development workflow tools, like requirements files & Travis CI files & helpers for tox]*
* ``.azure-pipelines``

View File

@ -66,7 +66,6 @@ markers =
svn: VCS: Subversion
mercurial: VCS: Mercurial
git: VCS: git
yaml: yaml based tests
search: tests for 'pip search'
[coverage:run]

View File

@ -1,203 +0,0 @@
"""
Tests for the resolver
"""
import os
import re
import sys
import pytest
import yaml
from tests.lib import DATA_DIR, create_basic_wheel_for_package, path_to_url
def generate_yaml_tests(directory):
"""
Generate yaml test cases from the yaml files in the given directory
"""
for yml_file in directory.glob("*.yml"):
data = yaml.safe_load(yml_file.read_text())
assert "cases" in data, "A fixture needs cases to be used in testing"
# Strip the parts of the directory to only get a name without
# extension and resolver directory
base_name = str(yml_file)[len(str(directory)) + 1:-4]
base = data.get("base", {})
cases = data["cases"]
for resolver in 'legacy', '2020-resolver':
for i, case_template in enumerate(cases):
case = base.copy()
case.update(case_template)
case[":name:"] = base_name
if len(cases) > 1:
case[":name:"] += "-" + str(i)
case[":name:"] += "*" + resolver
case[":resolver:"] = resolver
skip = case.pop("skip", False)
assert skip in [False, True, 'legacy', '2020-resolver']
if skip is True or skip == resolver:
case = pytest.param(case, marks=pytest.mark.xfail)
yield case
def id_func(param):
"""
Give a nice parameter name to the generated function parameters
"""
if isinstance(param, dict) and ":name:" in param:
return param[":name:"]
retval = str(param)
if len(retval) > 25:
retval = retval[:20] + "..." + retval[-2:]
return retval
def convert_to_dict(string):
def stripping_split(my_str, splitwith, count=None):
if count is None:
return [x.strip() for x in my_str.strip().split(splitwith)]
else:
return [x.strip() for x in my_str.strip().split(splitwith, count)]
parts = stripping_split(string, ";")
retval = {}
retval["depends"] = []
retval["extras"] = {}
retval["name"], retval["version"] = stripping_split(parts[0], " ")
for part in parts[1:]:
verb, args_str = stripping_split(part, " ", 1)
assert verb in ["depends"], f"Unknown verb {verb!r}"
retval[verb] = stripping_split(args_str, ",")
return retval
def handle_request(script, action, requirement, options, resolver_variant):
if action == 'install':
args = ['install']
if resolver_variant == "legacy":
args.append("--use-deprecated=legacy-resolver")
args.extend(["--no-index", "--find-links",
path_to_url(script.scratch_path)])
elif action == 'uninstall':
args = ['uninstall', '--yes']
else:
raise f"Did not excpet action: {action!r}"
if isinstance(requirement, str):
args.append(requirement)
elif isinstance(requirement, list):
args.extend(requirement)
else:
raise f"requirement neither str nor list {requirement!r}"
args.extend(options)
args.append("--verbose")
result = script.pip(*args,
allow_stderr_error=True,
allow_stderr_warning=True,
allow_error=True)
# Check which packages got installed
state = []
for path in os.listdir(script.site_packages_path):
if path.endswith(".dist-info"):
name, version = (
os.path.basename(path)[:-len(".dist-info")]
).rsplit("-", 1)
# TODO: information about extras.
state.append(" ".join((name, version)))
return {"result": result, "state": sorted(state)}
def check_error(error, result):
return_code = error.get('code')
if return_code:
assert result.returncode == return_code
stderr = error.get('stderr')
if not stderr:
return
if isinstance(stderr, str):
patters = [stderr]
elif isinstance(stderr, list):
patters = stderr
else:
raise "string or list expected, found %r" % stderr
for patter in patters:
match = re.search(patter, result.stderr, re.I)
assert match, 'regex %r not found in stderr: %r' % (
stderr, result.stderr)
@pytest.mark.yaml
@pytest.mark.parametrize(
"case", generate_yaml_tests(DATA_DIR.parent / "yaml"), ids=id_func
)
def test_yaml_based(script, case):
available = case.get("available", [])
requests = case.get("request", [])
responses = case.get("response", [])
assert len(requests) == len(responses), (
"Expected requests and responses counts to be same"
)
# Create a custom index of all the packages that are supposed to be
# available
# XXX: This doesn't work because this isn't making an index of files.
for package in available:
if isinstance(package, str):
package = convert_to_dict(package)
assert isinstance(package, dict), "Needs to be a dictionary"
create_basic_wheel_for_package(script, **package)
# use scratch path for index
for request, response in zip(requests, responses):
for action in 'install', 'uninstall':
if action in request:
break
else:
raise f"Unsupported request {request!r}"
# Perform the requested action
effect = handle_request(script, action,
request[action],
request.get('options', '').split(),
resolver_variant=case[':resolver:'])
result = effect['result']
if 0: # for analyzing output easier
with open(DATA_DIR.parent / "yaml" /
case[':name:'].replace('*', '-'), 'w') as fo:
fo.write("=== RETURNCODE = %d\n" % result.returncode)
fo.write("=== STDERR ===:\n%s\n" % result.stderr)
if 'state' in response:
assert effect['state'] == (response['state'] or []), str(result)
error = response.get('error')
if error and case[":resolver:"] == 'new' and sys.platform != 'win32':
# Note: we currently skip running these tests on Windows, as they
# were failing due to different error codes. There should not
# be a reason for not running these this check on Windows.
check_error(error, result)

View File

@ -1,60 +0,0 @@
# New resolver error messages
## Incompatible requirements
Most resolver error messages are due to incompatible requirements.
That is, the dependency tree contains conflicting versions of the same
package. Take the example:
base:
available:
- A 1.0.0; depends B == 1.0.0, C == 2.0.0
- B 1.0.0; depends C == 1.0.0
- C 1.0.0
- C 2.0.0
Here, `A` cannot be installed because it depends on `B` (which depends on
a different version of `C` than `A` itself. In real world examples, the
conflicting version are not so easy to spot. I'm suggesting an error
message which looks something like this:
A 1.0.0 -> B 1.0.0 -> C 1.0.0
A 1.0.0 -> C 2.0.0
That is, for the conflicting package, we show the user where exactly the
requirement came from.
## Double requirement
I've noticed that in many cases the old resolver messages are more
informative. For example, in the simple example:
base:
available:
- B 1.0.0
- B 2.0.0
Now if we want to install both version of `B` at the same time,
i.e. the requirement `B==1.0.0 B==2.0.0`, we get:
ERROR: Could not find a version that satisfies the requirement B==1.0.0
ERROR: Could not find a version that satisfies the requirement B==2.0.0
No matching distribution found for b, b
Even though both version are actually available and satisfy each requirement,
just not at once. When trying to install a version of `B` which does not
exist, say requirement `B==1.5.0`, you get the same type of error message:
Could not find a version that satisfies the requirement B==1.5.0
No matching distribution found for b
For this case, the old error message was:
Could not find a version that satisfies the requirement B==1.5.0 (from versions: 1.0.0, 2.0.0)
No matching distribution found for B==1.5.0
And the old error message for the requirement `B==1.0.0 B==2.0.0`:
Double requirement given: B==2.0.0 (already in B==1.0.0, name='B')

View File

@ -1,74 +0,0 @@
# YAML tests for pip's resolver
This directory contains fixtures for testing pip's resolver.
The fixtures are written as `.yml` files, with a convenient format
that allows for specifying a custom index for temporary use.
The `.yml` files are typically organized in the following way. Here, we are
going to take a closer look at the `simple.yml` file and step through the
test cases. A `base` section defines which packages are available upstream:
base:
available:
- simple 0.1.0
- simple 0.2.0
- base 0.1.0; depends dep
- dep 0.1.0
Each package has a name and version number. Here, there are two
packages `simple` (with versoin `0.1.0` and `0.2.0`). The package
`base 0.1.0` depends on the requirement `dep` (which simply means it
depends on any version of `dep`. More generally, a package can also
depend on a specific version of another package, or a range of versions.
Next, in our yaml file, we have the `cases:` section which is a list of
test cases. Each test case has a request and a response. The request
is what the user would want to do:
cases:
-
request:
- install: simple
- uninstall: simple
response:
- state:
- simple 0.2.0
- state: null
Here the first request is to install the package simple, this would
basically be equivalent to typing `pip install simple`, and the corresponding
first response is that the state of installed packages is `simple 0.2.0`.
Note that by default the highest version of an available package will be
installed.
The second request is to uninstall simple again, which will result in the
state `null` (basically an empty list of installed packages).
When the yaml tests are run, each response is verified by checking which
packages got actually installed. Note that this is check is done in
alphabetical order.
The linter is very useful for initally checking `.yml` files, e.g.:
$ python linter.py -v simple.yml
To run only the yaml tests, use (from the root of the source tree):
$ tox -e py38 -- -m yaml -vv
Or, in order to avoid collecting all the test cases:
$ tox -e py38 -- tests/functional/test_yaml.py
Or, only a specific test:
$ tox -e py38 -- tests/functional/test_yaml.py -k simple
Or, just a specific test case:
$ tox -e py38 -- tests/functional/test_yaml.py -k simple-0
<!-- TODO: Add a good description of the format and how it can be used. -->

View File

@ -1,40 +0,0 @@
# Pradyun's backtracking example
base:
available:
- A 1.0.0; depends B == 1.0.0
- A 2.0.0; depends B == 2.0.0, C == 1.0.0
- A 3.0.0; depends B == 3.0.0, C == 2.0.0
- A 4.0.0; depends B == 4.0.0, C == 3.0.0
- A 5.0.0; depends B == 5.0.0, C == 4.0.0
- A 6.0.0; depends B == 6.0.0, C == 5.0.0
- A 7.0.0; depends B == 7.0.0, C == 6.0.0
- A 8.0.0; depends B == 8.0.0, C == 7.0.0
- B 1.0.0; depends C == 1.0.0
- B 2.0.0; depends C == 2.0.0
- B 3.0.0; depends C == 3.0.0
- B 4.0.0; depends C == 4.0.0
- B 5.0.0; depends C == 5.0.0
- B 6.0.0; depends C == 6.0.0
- B 7.0.0; depends C == 7.0.0
- B 8.0.0; depends C == 8.0.0
- C 1.0.0
- C 2.0.0
- C 3.0.0
- C 4.0.0
- C 5.0.0
- C 6.0.0
- C 7.0.0
- C 8.0.0
cases:
-
request:
- install: A
response:
- state:
- A 1.0.0
- B 1.0.0
- C 1.0.0
skip: legacy

View File

@ -1,45 +0,0 @@
base:
available:
- A 1.0.0; depends B == 1.0.0
- B 1.0.0; depends C == 1.0.0
- C 1.0.0; depends D == 1.0.0
- D 1.0.0; depends A == 1.0.0
cases:
# NOTE: Do we want to check the order?
-
request:
- install: A
response:
- state:
- A 1.0.0
- B 1.0.0
- C 1.0.0
- D 1.0.0
-
request:
- install: B
response:
- state:
- A 1.0.0
- B 1.0.0
- C 1.0.0
- D 1.0.0
-
request:
- install: C
response:
- state:
- A 1.0.0
- B 1.0.0
- C 1.0.0
- D 1.0.0
-
request:
- install: D
response:
- state:
- A 1.0.0
- B 1.0.0
- C 1.0.0
- D 1.0.0

View File

@ -1,77 +0,0 @@
base:
available:
- A 1.0.0; depends B == 1.0.0, B == 2.0.0
- B 1.0.0
- B 2.0.0
cases:
-
request:
- install: A
response:
- error:
code: 0
stderr: ['incompatible']
skip: legacy
# -- a good error message would be:
# A 1.0.0 has incompatible requirements B==1.0.0, B==2.0.0
-
request:
- install: ['B==1.0.0', 'B']
response:
- state:
- B 1.0.0
skip: legacy
# -- old error:
# Double requirement given: B (already in B==1.0.0, name='B')
-
request:
- install: ['B==1.0.0', 'B==2.0.0']
response:
- state: null
error:
code: 1
stderr: >-
Cannot install B==1.0.0 and B==2.0.0 because these
package versions have conflicting dependencies.
skip: legacy
# -- currently the (new resolver) error message is:
# Could not find a version that satisfies the requirement B==1.0.0
# Could not find a version that satisfies the requirement B==2.0.0
# No matching distribution found for b, b
# -- better would be:
# cannot install different version (1.0.0, 2.0.0) of package B at the
# same time.
# -- the old error message was actually better here:
# Double requirement given: B==2.0.0 (already in B==1.0.0, name='B')
-
request:
- install: B==1.5.0
response:
- state: null
error:
code: 1
stderr: 'no\s+matching\s+distribution'
skip: legacy
# -- currently (new resolver) error message is:
# Could not find a version that satisfies the requirement B==1.5.0
# No matching distribution found for b
# -- the old error message was actually better here:
# Could not find a version that satisfies the requirement B==1.5.0 (from versions: 1.0.0, 2.0.0)
# No matching distribution found for B==1.5.0
-
request:
- install: A==2.0
response:
- state: null
error:
code: 1
stderr: 'no\s+matching\s+distribution'
skip: legacy
# -- currently the error message is:
# Could not find a version that satisfies the requirement A==2.0
# No matching distribution found for a

View File

@ -1,28 +0,0 @@
# Tzu-ping mentioned this example
base:
available:
- name: virtualenv
version: 20.0.2
depends: ['six>=1.12.0,<2']
- six 1.11
- six 1.12
- six 1.13
cases:
-
request:
- install: virtualenv
response:
- state:
- six 1.13
- virtualenv 20.0.2
-
request:
- install: ['six<1.12', 'virtualenv==20.0.2']
response:
- state: null
error:
stderr: >-
Cannot install six<1.12 and virtualenv 20.0.2 because these
package versions have conflicting dependencies.
skip: legacy

View File

@ -1,22 +0,0 @@
base:
available:
- A 1.0.0; depends B == 1.0.0, C == 2.0.0
- B 1.0.0; depends C == 1.0.0
- C 1.0.0
- C 2.0.0
cases:
-
request:
- install: A
response:
- state: null
skip: legacy
# -- currently the error message is:
# Could not find a version that satisfies the requirement C==2.0.0 (from a)
# Could not find a version that satisfies the requirement C==1.0.0 (from b)
# No matching distribution found for c, c
# -- This is a bit confusing, as both versions of C are available.
# -- better would be something like:
# A 1.0.0 -> B 1.0.0 -> C 1.0.0
# A 1.0.0 -> C 2.0.0

View File

@ -1,19 +0,0 @@
cases:
-
available:
- A 1.0.0; depends B == 1.0.0, C == 1.0.0
- B 1.0.0; depends D == 1.0.0
- C 1.0.0; depends D == 2.0.0
- D 1.0.0
- D 2.0.0
request:
- install: A
response:
- error:
code: 1
stderr: >-
Cannot install A and A because these package
versions have conflicting dependencies.
# TODO: Tweak this error message to make sense.
# https://github.com/pypa/pip/issues/8495
skip: legacy

View File

@ -1,18 +0,0 @@
cases:
-
available:
- A 1.0.0; depends C == 1.0.0
- B 1.0.0; depends C == 2.0.0
- C 1.0.0
- C 2.0.0
request:
- install: A
- install: B
response:
- state:
- A 1.0.0
- C 1.0.0
- error:
code: 0
stderr: ['c==1\.0\.0', 'incompatible']
skip: legacy

View File

@ -1,49 +0,0 @@
base:
available:
- A 1.0.0; depends B == 1.0.0, C == 1.0.0, D == 1.0.0
- B 1.0.0; depends D[extra_1] == 1.0.0
- C 1.0.0; depends D[extra_2] == 1.0.0
- name: D
version: 1.0.0
depends: []
extras:
extra_1: [E == 1.0.0]
extra_2: [F == 1.0.0]
- E 1.0.0
- F 1.0.0
cases:
-
request:
- install: B
response:
- state:
- B 1.0.0
- D 1.0.0
- E 1.0.0
-
request:
- install: C
response:
- state:
- C 1.0.0
- D 1.0.0
- F 1.0.0
-
request:
- install: A
response:
- state:
- A 1.0.0
- B 1.0.0
- C 1.0.0
- D 1.0.0
- E 1.0.0
- F 1.0.0
skip: legacy
-
request:
- install: D[extra_1]
options: --no-deps
response:
- state:
- D 1.0.0

View File

@ -1,20 +0,0 @@
base:
available:
- A 1.0.0; depends B == 1.0.0, C == 1.0.0
- A 0.8.0
- B 1.0.0; depends D == 1.0.0
- C 1.0.0; depends D == 2.0.0
- D 1.0.0
- D 2.0.0
cases:
-
request:
- install: A
response:
- state:
- A 0.8.0
# the old resolver tries to install A 1.0.0 (which fails), but the new
# resolver realises that A 1.0.0 cannot be installed and falls back to
# installing the older version A 0.8.0 instead.
skip: legacy

File diff suppressed because it is too large Load Diff

View File

@ -1,295 +0,0 @@
# The 129 available packages have been obtained by transforming a
# conda repodata.json, and doing some manual fixes.
base:
available:
- affine 2.2.0
- affine 2.2.1
- asn1crypto 0.22.0
- asn1crypto 0.23.0
- asn1crypto 0.24.0
- backports 1.0
- name: backports.functools_lru_cache
version: '1.4'
depends: ['backports', 'setuptools']
- name: backports.functools_lru_cache
version: '1.5'
depends: ['backports', 'setuptools']
- beautifulsoup4 4.6.0
- beautifulsoup4 4.6.1
- beautifulsoup4 4.6.3
- name: cachecontrol
version: 0.12.3
depends: ['msgpack_python', 'requests']
- name: cachecontrol
version: 0.12.4
depends: ['msgpack_python', 'requests']
- name: cachecontrol
version: 0.12.5
depends: ['msgpack_python', 'requests']
- certifi 2017.11.5
- certifi 2017.7.27.1
- certifi 2018.1.18
- certifi 2018.4.16
- certifi 2018.8.13
# cffi is a bundled module in PyPy and causes resolution errors if pip
# tries to installed it. Give it a different name since we are simply
# checking the graph anyway and the identifier doesn't really matter.
- name: cffi_not_really
version: 1.10.0
depends: ['pycparser']
- name: cffi_not_really
version: 1.11.2
depends: ['pycparser']
- name: cffi_not_really
version: 1.11.4
depends: ['pycparser']
- name: cffi_not_really
version: 1.11.5
depends: ['pycparser']
- chardet 3.0.4
- click 6.7
- colorama 0.3.9
- colour 0.1.4
- colour 0.1.5
- contextlib2 0.5.5
- name: cryptography
version: 2.0.3
depends: ['asn1crypto >=0.21.0', 'cffi_not_really >=1.7', 'idna >=2.1', 'six >=1.4.1']
- name: cryptography
version: 2.1.3
depends: ['asn1crypto >=0.21.0', 'cffi_not_really >=1.7', 'idna >=2.1', 'six >=1.4.1']
- name: cryptography
version: 2.1.4
depends: ['asn1crypto >=0.21.0', 'cffi_not_really >=1.7', 'idna >=2.1', 'six >=1.4.1']
- name: cryptography
version: 2.2.1
depends: ['asn1crypto >=0.21.0', 'cffi_not_really >=1.7', 'idna >=2.1', 'six >=1.4.1']
- name: cryptography
version: '2.3'
depends: ['asn1crypto >=0.21.0', 'cffi_not_really >=1.7', 'cryptography_vectors ~=2.3', 'idna >=2.1', 'six >=1.4.1']
- cryptography_vectors 2.0.3
- cryptography_vectors 2.1.3
- cryptography_vectors 2.1.4
- cryptography_vectors 2.2.1
- cryptography_vectors 2.2.2
- cryptography_vectors 2.3.0
- name: cytoolz
version: 0.8.2
depends: ['toolz >=0.8.0']
- name: cytoolz
version: 0.9.0
depends: ['toolz >=0.8.0']
- name: cytoolz
version: 0.9.0.1
depends: ['toolz >=0.8.0']
- distlib 0.2.5
- distlib 0.2.6
- distlib 0.2.7
- enum34 1.1.6
- filelock 2.0.12
- filelock 2.0.13
- filelock 3.0.4
- future 0.16.0
- futures 3.1.1
- futures 3.2.0
- glob2 0.5
- glob2 0.6
- name: html5lib
version: '0.999999999'
depends: ['six >=1.9', 'webencodings']
- name: html5lib
version: 1.0.1
depends: ['six >=1.9', 'webencodings']
- idna 2.6
- idna 2.7
- ipaddress 1.0.18
- ipaddress 1.0.19
- ipaddress 1.0.22
- name: jinja2
version: '2.10'
depends: ['markupsafe >=0.23', 'setuptools']
- name: jinja2
version: 2.9.6
depends: ['markupsafe >=0.23', 'setuptools']
- lockfile 0.12.2
- markupsafe 1.0
- msgpack_python 0.4.8
- msgpack_python 0.5.1
- msgpack_python 0.5.5
- msgpack_python 0.5.6
- name: packaging
version: '16.8'
depends: ['pyparsing', 'six']
- name: packaging
version: '17.1'
depends: ['pyparsing', 'six']
- name: pip
version: 10.0.1
depends: ['setuptools', 'wheel']
- name: pip
version: 9.0.1
depends: ['cachecontrol', 'colorama', 'distlib', 'html5lib', 'lockfile', 'packaging', 'progress', 'requests', 'setuptools', 'webencodings', 'wheel']
- name: pip
version: 9.0.3
depends: ['setuptools', 'wheel']
- pkginfo 1.4.1
- pkginfo 1.4.2
- progress 1.3
- progress 1.4
- psutil 5.2.2
- psutil 5.3.1
- psutil 5.4.0
- psutil 5.4.1
- psutil 5.4.3
- psutil 5.4.5
- psutil 5.4.6
- pycosat 0.6.2
- pycosat 0.6.3
- pycparser 2.18
- name: pyopenssl
version: 17.2.0
depends: ['cryptography >=1.9', 'six >=1.5.2']
- name: pyopenssl
version: 17.4.0
depends: ['cryptography >=1.9', 'six >=1.5.2']
- name: pyopenssl
version: 17.5.0
depends: ['cryptography >=2.1.4', 'six >=1.5.2']
- name: pyopenssl
version: 18.0.0
depends: ['cryptography >=2.2.1', 'six >=1.5.2']
- pyparsing 2.2.0
- name: pysocks
version: 1.6.7
depends: ['win_inet_pton']
- name: pysocks
version: 1.6.8
depends: ['win_inet_pton']
- pywin32 221
- pywin32 222
- pywin32 223
- pyyaml 3.12
- pyyaml 3.13
- name: requests
version: 2.18.4
depends: ['certifi >=2017.4.17', 'chardet >=3.0.2,<3.1.0', 'idna >=2.5,<2.7', 'urllib3 >=1.21.1,<1.23']
- name: requests
version: 2.19.1
depends: ['certifi >=2017.4.17', 'chardet >=3.0.2,<3.1.0', 'idna >=2.5,<2.8', 'urllib3 >=1.21.1,<1.24']
- scandir 1.5
- scandir 1.6
- scandir 1.7
- scandir 1.8
- scandir 1.9.0
- name: setuptools
version: 36.2.2
depends: ['certifi', 'wincertstore']
- name: setuptools
version: 36.5.0
depends: ['certifi', 'wincertstore']
- name: setuptools
version: 38.4.0
depends: ['certifi >=2016.09', 'wincertstore >=0.2']
- name: setuptools
version: 38.5.1
depends: ['certifi >=2016.09', 'wincertstore >=0.2']
- name: setuptools
version: 39.0.1
depends: ['certifi >=2016.09', 'wincertstore >=0.2']
- name: setuptools
version: 39.1.0
depends: ['certifi >=2016.09', 'wincertstore >=0.2']
- name: setuptools
version: 39.2.0
depends: ['certifi >=2016.09', 'wincertstore >=0.2']
- name: setuptools
version: 40.0.0
depends: ['certifi >=2016.09', 'wincertstore >=0.2']
- six 1.8.2
- six 1.10.0
- six 1.11.0
- toolz 0.8.2
- toolz 0.9.0
- name: urllib3
version: '1.22'
depends: ['certifi', 'cryptography >=1.3.4', 'idna >=2.0.0', 'pyopenssl >=0.14', 'pysocks >=1.5.6,<2.0,!=1.5.7']
- name: urllib3
version: '1.23'
depends: ['certifi', 'cryptography >=1.3.4', 'idna >=2.0.0', 'pyopenssl >=0.14', 'pysocks >=1.5.6,<2.0,!=1.5.7']
- webencodings 0.5.1
- name: wheel
version: 0.29.0
depends: ['setuptools']
- name: wheel
version: 0.30.0
depends: ['setuptools']
- name: wheel
version: 0.31.0
depends: ['setuptools']
- name: wheel
version: 0.31.1
depends: ['setuptools']
- win_inet_pton 1.0.1
- wincertstore 0.2
cases:
-
request:
- install: affine
response:
- state:
- affine 2.2.1
-
request:
- install: cryptography
response:
- state:
- asn1crypto 0.24.0
- cffi_not_really 1.11.5
- cryptography 2.3
- cryptography_vectors 2.3.0
- idna 2.7
- pycparser 2.18
- six 1.11.0
skip: legacy
-
request:
- install: cachecontrol
response:
- state:
- asn1crypto 0.24.0
- cachecontrol 0.12.5
- certifi 2018.8.13
- cffi_not_really 1.11.5
- chardet 3.0.4
- cryptography 2.3
- cryptography_vectors 2.3.0
- idna 2.7
- msgpack_python 0.5.6
- pycparser 2.18
- pyopenssl 18.0.0
- pysocks 1.6.8
- requests 2.19.1
- six 1.11.0
- urllib3 1.23
- win_inet_pton 1.0.1
-
request:
- install: cytoolz
response:
- state:
- cytoolz 0.9.0.1
- toolz 0.9.0
-
request:
- install: ['html5lib', 'six ==1.8.2']
response:
- state: null
error:
code: 1
stderr: >-
Cannot install six==1.8.2, html5lib 1.0.1, six==1.8.2 and
html5lib 0.999999999 because these package versions have
conflicting dependencies.
skip: legacy

View File

@ -1,108 +0,0 @@
import re
import sys
from pprint import pprint
import yaml
sys.path.insert(0, '../../src')
sys.path.insert(0, '../..')
def check_dict(d, required=None, optional=None):
assert isinstance(d, dict)
if required is None:
required = []
if optional is None:
optional = []
for key in required:
if key not in d:
sys.exit("key %r is required" % key)
allowed_keys = set(required)
allowed_keys.update(optional)
for key in d.keys():
if key not in allowed_keys:
sys.exit("key %r is not allowed. Allowed keys are: %r" %
(key, allowed_keys))
def lint_case(case, verbose=False):
from tests.functional.test_yaml import convert_to_dict
if verbose:
print("--- linting case ---")
pprint(case)
check_dict(case, optional=['available', 'request', 'response', 'skip'])
available = case.get("available", [])
requests = case.get("request", [])
responses = case.get("response", [])
assert isinstance(available, list)
assert isinstance(requests, list)
assert isinstance(responses, list)
assert len(requests) == len(responses)
for package in available:
if isinstance(package, str):
package = convert_to_dict(package)
if verbose:
pprint(package)
check_dict(package,
required=['name', 'version'],
optional=['depends', 'extras'])
version = package['version']
assert isinstance(version, str), repr(version)
for request, response in zip(requests, responses):
check_dict(request, optional=['install', 'uninstall', 'options'])
check_dict(response, optional=['state', 'error'])
assert len(response) >= 1
assert isinstance(response.get('state') or [], list)
error = response.get('error')
if error:
check_dict(error, optional=['code', 'stderr'])
stderr = error.get('stderr')
if stderr:
if isinstance(stderr, str):
patters = [stderr]
elif isinstance(stderr, list):
patters = stderr
else:
raise "string or list expected, found %r" % stderr
for patter in patters:
re.compile(patter, re.I)
def lint_yml(yml_file, verbose=False):
if verbose:
print("=== linting: %s ===" % yml_file)
assert yml_file.endswith(".yml")
with open(yml_file) as fi:
data = yaml.safe_load(fi)
if verbose:
pprint(data)
check_dict(data, required=['cases'], optional=['base'])
base = data.get("base", {})
cases = data["cases"]
for _, case_template in enumerate(cases):
case = base.copy()
case.update(case_template)
lint_case(case, verbose)
if __name__ == '__main__':
from optparse import OptionParser
p = OptionParser(usage="usage: %prog [options] FILE ...",
description="linter for pip's yaml test FILE(s)")
p.add_option('-v', '--verbose',
action="store_true")
opts, args = p.parse_args()
if len(args) < 1:
p.error('at least one argument required, try -h')
for yml_file in args:
lint_yml(yml_file, opts.verbose)

View File

@ -1,24 +0,0 @@
base:
available:
- A 1.0.0; depends B < 2.0.0
- A 2.0.0; depends B < 3.0.0
- B 1.0.0
- B 2.0.0
- B 2.1.0
- B 3.0.0
cases:
-
request:
- install: A >= 2.0.0
response:
- state:
- A 2.0.0
- B 2.1.0
-
request:
- install: A < 2.0.0
response:
- state:
- A 1.0.0
- B 1.0.0

View File

@ -1,44 +0,0 @@
# https://medium.com/knerd/the-nine-circles-of-python-dependency-hell-481d53e3e025
# Circle 4: Overlapping transitive dependencies
base:
available:
- myapp 0.2.4; depends fussy, capridous
- name: fussy
version: 3.8.0
depends: ['requests >=1.2.0,<3']
- name: capridous
version: 1.1.0
depends: ['requests >=1.0.3,<2']
- requests 1.0.1
- requests 1.0.3
- requests 1.1.0
- requests 1.2.0
- requests 1.3.0
- requests 2.1.0
- requests 3.2.0
cases:
-
request:
- install: myapp
response:
- state:
- capridous 1.1.0
- fussy 3.8.0
- myapp 0.2.4
- requests 1.3.0
skip: legacy
-
request:
- install: fussy
response:
- state:
- fussy 3.8.0
- requests 2.1.0
-
request:
- install: capridous
response:
- state:
- capridous 1.1.0
- requests 1.3.0

View File

@ -1,29 +0,0 @@
base:
available:
- A 1.0.0
- A 2.0.0
- B 1.0.0; depends A == 1.0.0
- B 2.0.0; depends A == 2.0.0
cases:
-
request:
- install: B
response:
- state:
- A 2.0.0
- B 2.0.0
-
request:
- install: B == 2.0.0
response:
- state:
- A 2.0.0
- B 2.0.0
-
request:
- install: B == 1.0.0
response:
- state:
- A 1.0.0
- B 1.0.0

View File

@ -1,37 +0,0 @@
# https://github.com/pypa/pip/issues/988#issuecomment-606967707
base:
available:
- A 1.0.0; depends B >= 1.0.0, C >= 1.0.0
- A 2.0.0; depends B >= 2.0.0, C >= 1.0.0
- B 1.0.0; depends C >= 1.0.0
- B 2.0.0; depends C >= 2.0.0
- C 1.0.0
- C 2.0.0
cases:
-
request:
- install: C==1.0.0
- install: B==1.0.0
- install: A==1.0.0
- install: A==2.0.0
response:
- state:
- C 1.0.0
- state:
- B 1.0.0
- C 1.0.0
- state:
- A 1.0.0
- B 1.0.0
- C 1.0.0
- state:
- A 2.0.0
- B 2.0.0
- C 2.0.0
# for the last install (A==2.0.0) the old resolver gives
# - A 2.0.0
# - B 2.0.0
# - C 1.0.0
# but because B 2.0.0 depends on C >=2.0.0 this is wrong
skip: legacy

View File

@ -1,24 +0,0 @@
# see: https://github.com/python-poetry/poetry/issues/2298
base:
available:
- poetry 1.0.5; depends zappa == 0.51.0, sphinx == 3.0.1
- zappa 0.51.0; depends boto3
- sphinx 3.0.1; depends docutils
- boto3 1.4.5; depends botocore ~=1.5.0
- botocore 1.5.92; depends docutils <0.16
- docutils 0.16.0
- docutils 0.15.0
cases:
-
request:
- install: poetry
response:
- state:
- boto3 1.4.5
- botocore 1.5.92
- docutils 0.15.0
- poetry 1.0.5
- sphinx 3.0.1
- zappa 0.51.0
skip: legacy

View File

@ -1,47 +0,0 @@
base:
available:
- simple 0.1.0
- simple 0.2.0
- base 0.1.0; depends dep
- dep 0.1.0
cases:
-
request:
- install: simple
- uninstall: simple
response:
- state:
- simple 0.2.0
- state: null
-
request:
- install: simple
- install: dep
response:
- state:
- simple 0.2.0
- state:
- dep 0.1.0
- simple 0.2.0
-
request:
- install: base
response:
- state:
- base 0.1.0
- dep 0.1.0
-
request:
- install: base
options: --no-deps
response:
- state:
- base 0.1.0
-
request:
- install: ['dep', 'simple==0.1.0']
response:
- state:
- dep 0.1.0
- simple 0.1.0

View File

@ -1,24 +0,0 @@
base:
available:
- a 0.1.0
- b 0.2.0
- c 0.3.0
cases:
-
request:
- install: ['a', 'b']
- install: c
- uninstall: ['b', 'c']
- uninstall: a
response:
- state:
- a 0.1.0
- b 0.2.0
- state:
- a 0.1.0
- b 0.2.0
- c 0.3.0
- state:
- a 0.1.0
- state: null

View File

@ -6,7 +6,6 @@ pytest
pytest-cov
pytest-rerunfailures
pytest-xdist
pyyaml
scripttest
setuptools>=39.2.0 # Needed for `setuptools.wheel.Wheel` support.
https://github.com/pypa/virtualenv/archive/legacy.zip#egg=virtualenv