Merge branch 'master' into nr_constraints2

This commit is contained in:
Paul Moore 2020-05-05 11:41:09 +01:00
commit 8664218164
28 changed files with 872 additions and 115 deletions

View File

@ -565,7 +565,7 @@ While this cache attempts to minimize network activity, it does not prevent
network access altogether. If you want a local install solution that
circumvents accessing PyPI, see :ref:`Installing from local packages`.
The default location for the cache directory depends on the Operating System:
The default location for the cache directory depends on the operating system:
Unix
:file:`~/.cache/pip` and it respects the ``XDG_CACHE_HOME`` directory.
@ -574,6 +574,9 @@ macOS
Windows
:file:`<CSIDL_LOCAL_APPDATA>\\pip\\Cache`
Run ``pip cache dir`` to show the cache directory and see :ref:`pip cache` to
inspect and manage pips cache.
.. _`Wheel cache`:

1
news/8164.bugfix Normal file
View File

@ -0,0 +1 @@
Fix metadata permission issues when umask has the executable bit set.

View File

@ -567,7 +567,7 @@ def install_unpacked_wheel(
if msg is not None:
logger.warning(msg)
generated_file_mode = 0o666 - current_umask()
generated_file_mode = 0o666 & ~current_umask()
@contextlib.contextmanager
def _generate_file(path, **kwargs):

View File

@ -41,8 +41,12 @@ logger = logging.getLogger(__name__)
def make_install_req_from_link(link, parent):
# type: (Link, InstallRequirement) -> InstallRequirement
assert not parent.editable, "parent is editable"
return install_req_from_line(
link.url,
if parent.req:
line = str(parent.req)
else:
line = link.url
ireq = install_req_from_line(
line,
comes_from=parent.comes_from,
use_pep517=parent.use_pep517,
isolated=parent.isolated,
@ -53,6 +57,10 @@ def make_install_req_from_link(link, parent):
hashes=parent.hash_options
),
)
if ireq.link is None:
ireq.link = link
# TODO: Handle wheel cache resolution.
return ireq
def make_install_req_from_editable(link, parent):
@ -405,6 +413,8 @@ class ExtrasCandidate(Candidate):
]
# Add a dependency on the exact base.
# (See note 2b in the class docstring)
# FIXME: This does not work if the base candidate is specified by
# link, e.g. "pip install .[dev]" will fail.
spec = "{}=={}".format(self.base.name, self.base.version)
deps.append(factory.make_requirement_from_spec(spec, self.base._ireq))
return deps

View File

@ -42,6 +42,8 @@ if MYPY_CHECK_RUNNING:
class Factory(object):
_allowed_strategies = {"eager", "only-if-needed", "to-satisfy-only"}
def __init__(
self,
finder, # type: PackageFinder
@ -50,15 +52,21 @@ class Factory(object):
force_reinstall, # type: bool
ignore_installed, # type: bool
ignore_requires_python, # type: bool
upgrade_strategy, # type: str
py_version_info=None, # type: Optional[Tuple[int, ...]]
):
# type: (...) -> None
assert upgrade_strategy in self._allowed_strategies
self.finder = finder
self.preparer = preparer
self._python_candidate = RequiresPythonCandidate(py_version_info)
self._make_install_req_from_spec = make_install_req
self._force_reinstall = force_reinstall
self._ignore_requires_python = ignore_requires_python
self._upgrade_strategy = upgrade_strategy
self.root_reqs = set() # type: Set[str]
self._link_candidate_cache = {} # type: Cache[LinkCandidate]
self._editable_candidate_cache = {} # type: Cache[EditableCandidate]
@ -112,13 +120,23 @@ class Factory(object):
return ExtrasCandidate(base, extras)
return base
def _eligible_for_upgrade(self, dist_name):
# type: (str) -> bool
if self._upgrade_strategy == "eager":
return True
elif self._upgrade_strategy == "only-if-needed":
return (dist_name in self.root_reqs)
return False
def iter_found_candidates(self, ireq, extras):
# type: (InstallRequirement, Set[str]) -> Iterator[Candidate]
name = canonicalize_name(ireq.req.name)
if not self._force_reinstall:
installed_dist = self._installed_dists.get(name)
can_upgrade = self._eligible_for_upgrade(name)
else:
installed_dist = None
can_upgrade = False
found = self.finder.find_best_candidate(
project_name=ireq.req.name,
@ -128,6 +146,12 @@ class Factory(object):
for ican in found.iter_applicable():
if (installed_dist is not None and
installed_dist.parsed_version == ican.version):
if can_upgrade:
yield self._make_candidate_from_dist(
dist=installed_dist,
extras=extras,
parent=ireq,
)
continue
yield self._make_candidate_from_link(
link=ican.link,
@ -140,6 +164,7 @@ class Factory(object):
# Return installed distribution if it matches the specifier. This is
# done last so the resolver will prefer it over downloading links.
if (installed_dist is not None and
not can_upgrade and
installed_dist.parsed_version in ireq.req.specifier):
yield self._make_candidate_from_dist(
dist=installed_dist,
@ -149,12 +174,15 @@ class Factory(object):
def make_requirement_from_install_req(self, ireq):
# type: (InstallRequirement) -> Requirement
if ireq.is_direct and ireq.name:
self.root_reqs.add(canonicalize_name(ireq.name))
if ireq.link:
# TODO: Get name and version from ireq, if possible?
# Specifically, this might be needed in "name @ URL"
# syntax - need to check where that syntax is handled.
cand = self._make_candidate_from_link(
ireq.link, extras=set(), parent=ireq,
ireq.link, extras=set(ireq.extras), parent=ireq,
)
return ExplicitRequirement(cand, factory=self)
return SpecifierRequirement(ireq, factory=self)

View File

@ -16,10 +16,11 @@ from pip._internal.utils.typing import MYPY_CHECK_RUNNING
from .factory import Factory
if MYPY_CHECK_RUNNING:
from typing import Dict, List, Optional, Tuple
from typing import Dict, List, Optional, Set, Tuple
from pip._vendor.packaging.specifiers import SpecifierSet
from pip._vendor.resolvelib.resolvers import Result
from pip._vendor.resolvelib.structs import Graph
from pip._internal.cache import WheelCache
from pip._internal.index.package_finder import PackageFinder
@ -54,6 +55,7 @@ class Resolver(BaseResolver):
force_reinstall=force_reinstall,
ignore_installed=ignore_installed,
ignore_requires_python=ignore_requires_python,
upgrade_strategy=upgrade_strategy,
py_version_info=py_version_info,
)
self.ignore_dependencies = ignore_dependencies
@ -62,6 +64,13 @@ class Resolver(BaseResolver):
def resolve(self, root_reqs, check_supported_wheels):
# type: (List[InstallRequirement], bool) -> RequirementSet
# The factory should not have retained state from any previous usage.
# In theory this could only happen if self was reused to do a second
# resolve, which isn't something we do at the moment. We assert here
# in order to catch the issue if that ever changes.
# The persistent state that we care about is `root_reqs`.
assert len(self.factory.root_reqs) == 0, "Factory is being re-used"
constraints = defaultdict(list) # type: Dict[str,List[SpecifierSet]]
requirements = []
for req in root_reqs:
@ -123,42 +132,21 @@ class Resolver(BaseResolver):
def get_installation_order(self, req_set):
# type: (RequirementSet) -> List[InstallRequirement]
"""Create a list that orders given requirements for installation.
"""Get order for installation of requirements in RequirementSet.
The returned list should contain all requirements in ``req_set``,
so the caller can loop through it and have a requirement installed
before the requiring thing.
The returned list contains a requirement before another that depends on
it. This helps ensure that the environment is kept consistent as they
get installed one-by-one.
The current implementation walks the resolved dependency graph, and
make sure every node has a greater "weight" than all its parents.
The current implementation creates a topological ordering of the
dependency graph, while breaking any cycles in the graph at arbitrary
points. We make no guarantees about where the cycle would be broken,
other than they would be broken.
"""
assert self._result is not None, "must call resolve() first"
weights = {} # type: Dict[Optional[str], int]
graph = self._result.graph
key_count = len(self._result.mapping) + 1 # Packages plus sentinal.
while len(weights) < key_count:
progressed = False
for key in graph:
if key in weights:
continue
parents = list(graph.iter_parents(key))
if not all(p in weights for p in parents):
continue
if parents:
weight = max(weights[p] for p in parents) + 1
else:
weight = 0
weights[key] = weight
progressed = True
# FIXME: This check will fail if there are unbreakable cycles.
# Implement something to forcifully break them up to continue.
if not progressed:
raise InstallationError(
"Could not determine installation order due to cicular "
"dependency."
)
weights = get_topological_weights(graph)
sorted_items = sorted(
req_set.requirements.items(),
@ -168,6 +156,52 @@ class Resolver(BaseResolver):
return [ireq for _, ireq in sorted_items]
def get_topological_weights(graph):
# type: (Graph) -> Dict[Optional[str], int]
"""Assign weights to each node based on how "deep" they are.
This implementation may change at any point in the future without prior
notice.
We take the length for the longest path to any node from root, ignoring any
paths that contain a single node twice (i.e. cycles). This is done through
a depth-first search through the graph, while keeping track of the path to
the node.
Cycles in the graph result would result in node being revisited while also
being it's own path. In this case, take no action. This helps ensure we
don't get stuck in a cycle.
When assigning weight, the longer path (i.e. larger length) is preferred.
"""
path = set() # type: Set[Optional[str]]
weights = {} # type: Dict[Optional[str], int]
def visit(node):
# type: (Optional[str]) -> None
if node in path:
# We hit a cycle, so we'll break it here.
return
# Time to visit the children!
path.add(node)
for child in graph.iter_children(node):
visit(child)
path.remove(node)
last_known_parent_count = weights.get(node, 0)
weights[node] = max(last_known_parent_count, len(path))
# `None` is guaranteed to be the root node by resolvelib.
visit(None)
# Sanity checks
assert weights[None] == 0
assert len(weights) == len(graph)
return weights
def _req_set_item_sorter(
item, # type: Tuple[str, InstallRequirement]
weights, # type: Dict[Optional[str], int]

View File

@ -126,3 +126,32 @@ def test_install_special_extra(script):
assert (
"Could not find a version that satisfies the requirement missing_pkg"
) in result.stderr, str(result)
@pytest.mark.parametrize(
"extra_to_install, simple_version", [
['', '3.0'],
pytest.param('[extra1]', '2.0', marks=pytest.mark.xfail),
pytest.param('[extra2]', '1.0', marks=pytest.mark.xfail),
pytest.param('[extra1,extra2]', '1.0', marks=pytest.mark.xfail),
])
def test_install_extra_merging(script, data, extra_to_install, simple_version):
# Check that extra specifications in the extras section are honoured.
pkga_path = script.scratch_path / 'pkga'
pkga_path.mkdir()
pkga_path.joinpath("setup.py").write_text(textwrap.dedent("""
from setuptools import setup
setup(name='pkga',
version='0.1',
install_requires=['simple'],
extras_require={'extra1': ['simple<3'],
'extra2': ['simple==1.*']},
)
"""))
result = script.pip_install_local(
'{pkga_path}{extra_to_install}'.format(**locals()),
)
assert ('Successfully installed pkga-0.1 simple-{}'.format(simple_version)
) in result.stdout

View File

@ -582,3 +582,165 @@ def test_new_resolver_constraint_on_path(script):
msg = "installation from path or url cannot be constrained to a version"
assert msg in result.stderr, str(result)
def test_new_resolver_upgrade_needs_option(script):
# Install pkg 1.0.0
create_basic_wheel_for_package(script, "pkg", "1.0.0")
script.pip(
"install", "--unstable-feature=resolver",
"--no-cache-dir", "--no-index",
"--find-links", script.scratch_path,
"pkg",
)
# Now release a new version
create_basic_wheel_for_package(script, "pkg", "2.0.0")
# This should not upgrade because we don't specify --upgrade
result = script.pip(
"install", "--unstable-feature=resolver",
"--no-cache-dir", "--no-index",
"--find-links", script.scratch_path,
"pkg",
)
assert "Requirement already satisfied" in result.stdout, str(result)
assert_installed(script, pkg="1.0.0")
# This should upgrade
result = script.pip(
"install", "--unstable-feature=resolver",
"--no-cache-dir", "--no-index",
"--find-links", script.scratch_path,
"--upgrade",
"PKG", # Deliberately uppercase to check canonicalization
)
assert "Uninstalling pkg-1.0.0" in result.stdout, str(result)
assert "Successfully uninstalled pkg-1.0.0" in result.stdout, str(result)
assert script.site_packages / "pkg" in result.files_updated, (
"pkg not upgraded"
)
assert_installed(script, pkg="2.0.0")
def test_new_resolver_upgrade_strategy(script):
create_basic_wheel_for_package(script, "base", "1.0.0", depends=["dep"])
create_basic_wheel_for_package(script, "dep", "1.0.0")
script.pip(
"install", "--unstable-feature=resolver",
"--no-cache-dir", "--no-index",
"--find-links", script.scratch_path,
"base",
)
assert_installed(script, base="1.0.0")
assert_installed(script, dep="1.0.0")
# Now release new versions
create_basic_wheel_for_package(script, "base", "2.0.0", depends=["dep"])
create_basic_wheel_for_package(script, "dep", "2.0.0")
script.pip(
"install", "--unstable-feature=resolver",
"--no-cache-dir", "--no-index",
"--find-links", script.scratch_path,
"--upgrade",
"base",
)
# With upgrade strategy "only-if-needed" (the default), dep should not
# be upgraded.
assert_installed(script, base="2.0.0")
assert_installed(script, dep="1.0.0")
create_basic_wheel_for_package(script, "base", "3.0.0", depends=["dep"])
script.pip(
"install", "--unstable-feature=resolver",
"--no-cache-dir", "--no-index",
"--find-links", script.scratch_path,
"--upgrade", "--upgrade-strategy=eager",
"base",
)
# With upgrade strategy "eager", dep should be upgraded.
assert_installed(script, base="3.0.0")
assert_installed(script, dep="2.0.0")
class TestExtraMerge(object):
"""
Test installing a package that depends the same package with different
extras, one listed as required and the other as in extra.
"""
def _local_with_setup(script, name, version, requires, extras):
"""Create the package as a local source directory to install from path.
"""
return create_test_package_with_setup(
script,
name=name,
version=version,
install_requires=requires,
extras_require=extras,
)
def _direct_wheel(script, name, version, requires, extras):
"""Create the package as a wheel to install from path directly.
"""
return create_basic_wheel_for_package(
script,
name=name,
version=version,
depends=requires,
extras=extras,
)
def _wheel_from_index(script, name, version, requires, extras):
"""Create the package as a wheel to install from index.
"""
create_basic_wheel_for_package(
script,
name=name,
version=version,
depends=requires,
extras=extras,
)
return name
@pytest.mark.parametrize(
"pkg_builder",
[
pytest.param(
_local_with_setup, marks=pytest.mark.xfail(strict=True),
),
_direct_wheel,
_wheel_from_index,
],
)
def test_new_resolver_extra_merge_in_package(
self, monkeypatch, script, pkg_builder,
):
create_basic_wheel_for_package(script, "depdev", "1.0.0")
create_basic_wheel_for_package(
script,
"dep",
"1.0.0",
extras={"dev": ["depdev"]},
)
requirement = pkg_builder(
script,
name="pkg",
version="1.0.0",
requires=["dep"],
extras={"dev": ["dep[dev]"]},
)
script.pip(
"install", "--unstable-feature=resolver",
"--no-cache-dir", "--no-index",
"--find-links", script.scratch_path,
requirement + "[dev]",
)
assert_installed(script, pkg="1.0.0", dep="1.0.0", depdev="1.0.0")

View File

@ -29,7 +29,7 @@ def generate_yaml_tests(directory):
"""
Generate yaml test cases from the yaml files in the given directory
"""
for yml_file in directory.glob("*/*.yml"):
for yml_file in directory.glob("*.yml"):
data = yaml.safe_load(yml_file.read_text())
assert "cases" in data, "A fixture needs cases to be used in testing"
@ -40,18 +40,23 @@ def generate_yaml_tests(directory):
base = data.get("base", {})
cases = data["cases"]
for i, case_template in enumerate(cases):
case = base.copy()
case.update(case_template)
for resolver in 'old', 'new':
for i, case_template in enumerate(cases):
case = base.copy()
case.update(case_template)
case[":name:"] = base_name
if len(cases) > 1:
case[":name:"] += "-" + str(i)
case[":name:"] = base_name
if len(cases) > 1:
case[":name:"] += "-" + str(i)
case[":name:"] += "*" + resolver
case[":resolver:"] = resolver
if case.pop("skip", False):
case = pytest.param(case, marks=pytest.mark.xfail)
skip = case.pop("skip", False)
assert skip in [False, True, 'old', 'new']
if skip is True or skip == resolver:
case = pytest.param(case, marks=pytest.mark.xfail)
yield case
yield case
def id_func(param):
@ -92,60 +97,44 @@ def convert_to_dict(string):
return retval
def handle_request(script, action, requirement, options):
assert isinstance(requirement, str), (
"Need install requirement to be a string only"
)
def handle_request(script, action, requirement, options, new_resolver=False):
if action == 'install':
args = ['install', "--no-index", "--find-links",
path_to_url(script.scratch_path)]
args = ['install']
if new_resolver:
args.append("--unstable-feature=resolver")
args.extend(["--no-index", "--find-links",
path_to_url(script.scratch_path)])
elif action == 'uninstall':
args = ['uninstall', '--yes']
else:
raise "Did not excpet action: {!r}".format(action)
args.append(requirement)
if isinstance(requirement, str):
args.append(requirement)
elif isinstance(requirement, list):
args.extend(requirement)
else:
raise "requirement neither str nor list {!r}".format(requirement)
args.extend(options)
args.append("--verbose")
result = script.pip(*args,
allow_stderr_error=True,
allow_stderr_warning=True)
allow_stderr_warning=True,
allow_error=True)
retval = {
"_result_object": result,
}
if result.returncode == 0:
# Check which packages got installed
retval["state"] = []
# Check which packages got installed
state = []
for path in os.listdir(script.site_packages_path):
if path.endswith(".dist-info"):
name, version = (
os.path.basename(path)[:-len(".dist-info")]
).rsplit("-", 1)
# TODO: information about extras.
state.append(" ".join((name, version)))
for path in os.listdir(script.site_packages_path):
if path.endswith(".dist-info"):
name, version = (
os.path.basename(path)[:-len(".dist-info")]
).rsplit("-", 1)
# TODO: information about extras.
retval["state"].append(" ".join((name, version)))
retval["state"].sort()
elif "conflicting" in result.stderr.lower():
retval["conflicting"] = []
message = result.stderr.rsplit("\n", 1)[-1]
# XXX: There might be a better way than parsing the message
for match in re.finditer(message, _conflict_finder_pat):
di = match.groupdict()
retval["conflicting"].append(
{
"required_by": "{} {}".format(di["name"], di["version"]),
"selector": di["selector"]
}
)
return retval
return {"result": result, "state": sorted(state)}
@pytest.mark.yaml
@ -184,7 +173,26 @@ def test_yaml_based(script, case):
# Perform the requested action
effect = handle_request(script, action,
request[action],
request.get('options', '').split())
request.get('options', '').split(),
case[':resolver:'] == 'new')
assert effect['state'] == (response['state'] or []), \
str(effect["_result_object"])
if 0: # for analyzing output easier
with open(DATA_DIR.parent / "yaml" /
case[':name:'].replace('*', '-'), 'w') as fo:
result = effect['result']
fo.write("=== RETURNCODE = %d\n" % result.returncode)
fo.write("=== STDERR ===:\n%s\n" % result.stderr)
if 'state' in response:
assert effect['state'] == (response['state'] or []), \
str(effect["result"])
error = False
if 'conflicting' in response:
error = True
if error:
if case[":resolver:"] == 'old':
assert effect["result"].returncode == 0, str(effect["result"])
elif case[":resolver:"] == 'new':
assert effect["result"].returncode == 1, str(effect["result"])

View File

@ -533,6 +533,10 @@ class PipTestEnvironment(TestFileEnvironment):
`allow_stderr_warning` since warnings are weaker than errors.
:param allow_stderr_warning: whether a logged warning (or
deprecation message) is allowed in stderr.
:param allow_error: if True (default is False) does not raise
exception when the command exit value is non-zero. Implies
expect_error, but in contrast to expect_error will not assert
that the exit value is zero.
:param expect_error: if False (the default), asserts that the command
exits with 0. Otherwise, asserts that the command exits with a
non-zero exit code. Passing True also implies allow_stderr_error
@ -553,10 +557,14 @@ class PipTestEnvironment(TestFileEnvironment):
# Partial fix for ScriptTest.run using `shell=True` on Windows.
args = [str(a).replace('^', '^^').replace('&', '^&') for a in args]
# Remove `allow_stderr_error` and `allow_stderr_warning` before
# calling run() because PipTestEnvironment doesn't support them.
# Remove `allow_stderr_error`, `allow_stderr_warning` and
# `allow_error` before calling run() because PipTestEnvironment
# doesn't support them.
allow_stderr_error = kw.pop('allow_stderr_error', None)
allow_stderr_warning = kw.pop('allow_stderr_warning', None)
allow_error = kw.pop('allow_error', None)
if allow_error:
kw['expect_error'] = True
# Propagate default values.
expect_error = kw.get('expect_error')
@ -596,7 +604,7 @@ class PipTestEnvironment(TestFileEnvironment):
kw['expect_stderr'] = True
result = super(PipTestEnvironment, self).run(cwd=cwd, *args, **kw)
if expect_error:
if expect_error and not allow_error:
if result.returncode == 0:
__tracebackhide__ = True
raise AssertionError("Script passed unexpectedly.")

View File

@ -55,6 +55,7 @@ def factory(finder, preparer):
force_reinstall=False,
ignore_installed=False,
ignore_requires_python=False,
upgrade_strategy="to-satisfy-only",
py_version_info=None,
)

View File

@ -6,7 +6,10 @@ from pip._vendor.resolvelib.structs import DirectedGraph
from pip._internal.req.constructors import install_req_from_line
from pip._internal.req.req_set import RequirementSet
from pip._internal.resolution.resolvelib.resolver import Resolver
from pip._internal.resolution.resolvelib.resolver import (
Resolver,
get_topological_weights,
)
@pytest.fixture()
@ -21,11 +24,26 @@ def resolver(preparer, finder):
ignore_installed="not-used",
ignore_requires_python="not-used",
force_reinstall="not-used",
upgrade_strategy="not-used",
upgrade_strategy="to-satisfy-only",
)
return resolver
def _make_graph(edges):
"""Build graph from edge declarations.
"""
graph = DirectedGraph()
for parent, child in edges:
parent = canonicalize_name(parent) if parent else None
child = canonicalize_name(child) if child else None
for v in (parent, child):
if v not in graph:
graph.add(v)
graph.connect(parent, child)
return graph
@pytest.mark.parametrize(
"edges, ordered_reqs",
[
@ -40,9 +58,9 @@ def resolver(preparer, finder):
(
[
(None, "toporequires"),
(None, "toporequire2"),
(None, "toporequire3"),
(None, "toporequire4"),
(None, "toporequires2"),
(None, "toporequires3"),
(None, "toporequires4"),
("toporequires2", "toporequires"),
("toporequires3", "toporequires"),
("toporequires4", "toporequires"),
@ -59,15 +77,7 @@ def resolver(preparer, finder):
],
)
def test_new_resolver_get_installation_order(resolver, edges, ordered_reqs):
# Build graph from edge declarations.
graph = DirectedGraph()
for parent, child in edges:
parent = canonicalize_name(parent) if parent else None
child = canonicalize_name(child) if child else None
for v in (parent, child):
if v not in graph:
graph.add(v)
graph.connect(parent, child)
graph = _make_graph(edges)
# Mapping values and criteria are not used in test, so we stub them out.
mapping = {vertex: None for vertex in graph if vertex is not None}
@ -80,3 +90,147 @@ def test_new_resolver_get_installation_order(resolver, edges, ordered_reqs):
ireqs = resolver.get_installation_order(reqset)
req_strs = [str(r.req) for r in ireqs]
assert req_strs == ordered_reqs
@pytest.mark.parametrize(
"name, edges, expected_weights",
[
(
# From https://github.com/pypa/pip/pull/8127#discussion_r414564664
"deep second edge",
[
(None, "one"),
(None, "two"),
("one", "five"),
("two", "three"),
("three", "four"),
("four", "five"),
],
{None: 0, "one": 1, "two": 1, "three": 2, "four": 3, "five": 4},
),
(
"linear",
[
(None, "one"),
("one", "two"),
("two", "three"),
("three", "four"),
("four", "five"),
],
{None: 0, "one": 1, "two": 2, "three": 3, "four": 4, "five": 5},
),
(
"linear AND root -> two",
[
(None, "one"),
("one", "two"),
("two", "three"),
("three", "four"),
("four", "five"),
(None, "two"),
],
{None: 0, "one": 1, "two": 2, "three": 3, "four": 4, "five": 5},
),
(
"linear AND root -> three",
[
(None, "one"),
("one", "two"),
("two", "three"),
("three", "four"),
("four", "five"),
(None, "three"),
],
{None: 0, "one": 1, "two": 2, "three": 3, "four": 4, "five": 5},
),
(
"linear AND root -> four",
[
(None, "one"),
("one", "two"),
("two", "three"),
("three", "four"),
("four", "five"),
(None, "four"),
],
{None: 0, "one": 1, "two": 2, "three": 3, "four": 4, "five": 5},
),
(
"linear AND root -> five",
[
(None, "one"),
("one", "two"),
("two", "three"),
("three", "four"),
("four", "five"),
(None, "five"),
],
{None: 0, "one": 1, "two": 2, "three": 3, "four": 4, "five": 5},
),
(
"linear AND one -> four",
[
(None, "one"),
("one", "two"),
("two", "three"),
("three", "four"),
("four", "five"),
("one", "four"),
],
{None: 0, "one": 1, "two": 2, "three": 3, "four": 4, "five": 5},
),
(
"linear AND two -> four",
[
(None, "one"),
("one", "two"),
("two", "three"),
("three", "four"),
("four", "five"),
("two", "four"),
],
{None: 0, "one": 1, "two": 2, "three": 3, "four": 4, "five": 5},
),
(
"linear AND four -> one (cycle)",
[
(None, "one"),
("one", "two"),
("two", "three"),
("three", "four"),
("four", "five"),
("four", "one"),
],
{None: 0, "one": 1, "two": 2, "three": 3, "four": 4, "five": 5},
),
(
"linear AND four -> two (cycle)",
[
(None, "one"),
("one", "two"),
("two", "three"),
("three", "four"),
("four", "five"),
("four", "two"),
],
{None: 0, "one": 1, "two": 2, "three": 3, "four": 4, "five": 5},
),
(
"linear AND four -> three (cycle)",
[
(None, "one"),
("one", "two"),
("two", "three"),
("three", "four"),
("four", "five"),
("four", "three"),
],
{None: 0, "one": 1, "two": 2, "three": 3, "four": 4, "five": 5},
),
],
)
def test_new_resolver_topological_weights(name, edges, expected_weights):
graph = _make_graph(edges)
weights = get_topological_weights(graph)
assert weights == expected_weights

View File

@ -243,15 +243,15 @@ class TestInstallUnpackedWheel(object):
target_mode = os.stat(path).st_mode & 0o777
assert (target_mode & mode) == mode, oct(target_mode)
def assert_installed(self):
def assert_installed(self, expected_permission):
# lib
assert os.path.isdir(
os.path.join(self.scheme.purelib, 'sample'))
# dist-info
metadata = os.path.join(self.dest_dist_info, 'METADATA')
self.assert_permission(metadata, 0o644)
self.assert_permission(metadata, expected_permission)
record = os.path.join(self.dest_dist_info, 'RECORD')
self.assert_permission(record, 0o644)
self.assert_permission(record, expected_permission)
# data files
data_file = os.path.join(self.scheme.data, 'my_data', 'data_file')
assert os.path.isfile(data_file)
@ -268,7 +268,28 @@ class TestInstallUnpackedWheel(object):
scheme=self.scheme,
req_description=str(self.req),
)
self.assert_installed()
self.assert_installed(0o644)
@pytest.mark.parametrize("user_mask, expected_permission", [
(0o27, 0o640)
])
def test_std_install_with_custom_umask(self, data, tmpdir,
user_mask, expected_permission):
"""Test that the files created after install honor the permissions
set when the user sets a custom umask"""
prev_umask = os.umask(user_mask)
try:
self.prep(data, tmpdir)
wheel.install_wheel(
self.name,
self.wheelpath,
scheme=self.scheme,
req_description=str(self.req),
)
self.assert_installed(expected_permission)
finally:
os.umask(prev_umask)
def test_std_install_with_direct_url(self, data, tmpdir):
"""Test that install_wheel creates direct_url.json metadata when
@ -340,7 +361,7 @@ class TestInstallUnpackedWheel(object):
req_description=str(self.req),
_temp_dir_for_testing=self.src,
)
self.assert_installed()
self.assert_installed(0o644)
assert not os.path.isdir(
os.path.join(self.dest_dist_info, 'empty_dir'))

View File

@ -1,5 +1,31 @@
# Fixtures
This directory contains fixtures for testing pip's resolver. The fixtures are written as yml files, with a convenient format that allows for specifying a custom index for temporary use.
This directory contains fixtures for testing pip's resolver.
The fixtures are written as `.yml` files, with a convenient format
that allows for specifying a custom index for temporary use.
The `.yml` files are organized in the following way. A `base` section
which ...
The linter is very useful for initally checking `.yml` files, e.g.:
$ python linter.py -v simple.yml
To run only the yaml tests, use (from the root of the source tree):
$ tox -e py38 -- -m yaml -vv
Or, in order to avoid collecting all the test cases:
$ tox -e py38 -- tests/functional/test_yaml.py
Or, only a specific test:
$ tox -e py38 -- tests/functional/test_yaml.py -k simple
Or, just a specific test case:
$ tox -e py38 -- tests/functional/test_yaml.py -k simple-0
<!-- TODO: Add a good description of the format and how it can be used. -->

40
tests/yaml/backtrack.yml Normal file
View File

@ -0,0 +1,40 @@
# Pradyun's backtracking example
base:
available:
- A 1.0.0; depends B == 1.0.0
- A 2.0.0; depends B == 2.0.0, C == 1.0.0
- A 3.0.0; depends B == 3.0.0, C == 2.0.0
- A 4.0.0; depends B == 4.0.0, C == 3.0.0
- A 5.0.0; depends B == 5.0.0, C == 4.0.0
- A 6.0.0; depends B == 6.0.0, C == 5.0.0
- A 7.0.0; depends B == 7.0.0, C == 6.0.0
- A 8.0.0; depends B == 8.0.0, C == 7.0.0
- B 1.0.0; depends C == 1.0.0
- B 2.0.0; depends C == 2.0.0
- B 3.0.0; depends C == 3.0.0
- B 4.0.0; depends C == 4.0.0
- B 5.0.0; depends C == 5.0.0
- B 6.0.0; depends C == 6.0.0
- B 7.0.0; depends C == 7.0.0
- B 8.0.0; depends C == 8.0.0
- C 1.0.0
- C 2.0.0
- C 3.0.0
- C 4.0.0
- C 5.0.0
- C 6.0.0
- C 7.0.0
- C 8.0.0
cases:
-
request:
- install: A
response:
- state:
- A 1.0.0
- B 1.0.0
- C 1.0.0
skip: old

View File

@ -16,6 +16,7 @@ cases:
- B 1.0.0
- C 1.0.0
- D 1.0.0
skip: new
-
request:
- install: B
@ -25,6 +26,7 @@ cases:
- B 1.0.0
- C 1.0.0
- D 1.0.0
skip: new
-
request:
- install: C
@ -34,6 +36,7 @@ cases:
- B 1.0.0
- C 1.0.0
- D 1.0.0
skip: new
-
request:
- install: D
@ -43,3 +46,4 @@ cases:
- B 1.0.0
- C 1.0.0
- D 1.0.0
skip: new

View File

@ -39,4 +39,4 @@ cases:
- D 1.0.0
- E 1.0.0
- F 1.0.0
skip: true
skip: old

92
tests/yaml/linter.py Normal file
View File

@ -0,0 +1,92 @@
import sys
from pprint import pprint
import yaml
sys.path.insert(0, '../../src')
sys.path.insert(0, '../..')
def check_dict(d, required=None, optional=None):
assert isinstance(d, dict)
if required is None:
required = []
if optional is None:
optional = []
for key in required:
if key not in d:
sys.exit("key %r is required" % key)
allowed_keys = set(required)
allowed_keys.update(optional)
for key in d.keys():
if key not in allowed_keys:
sys.exit("key %r is not allowed. Allowed keys are: %r" %
(key, allowed_keys))
def lint_case(case, verbose=False):
from tests.functional.test_yaml import convert_to_dict
if verbose:
print("--- linting case ---")
pprint(case)
check_dict(case, optional=['available', 'request', 'response', 'skip'])
available = case.get("available", [])
requests = case.get("request", [])
responses = case.get("response", [])
assert isinstance(available, list)
assert isinstance(requests, list)
assert isinstance(responses, list)
assert len(requests) == len(responses)
for package in available:
if isinstance(package, str):
package = convert_to_dict(package)
if verbose:
pprint(package)
check_dict(package,
required=['name', 'version'],
optional=['depends', 'extras'])
for request, response in zip(requests, responses):
check_dict(request, optional=['install', 'uninstall', 'options'])
check_dict(response, optional=['state', 'conflicting'])
assert len(response) == 1
assert isinstance(response.get('state') or [], list)
def lint_yml(yml_file, verbose=False):
if verbose:
print("=== linting: %s ===" % yml_file)
assert yml_file.endswith(".yml")
with open(yml_file) as fi:
data = yaml.safe_load(fi)
if verbose:
pprint(data)
check_dict(data, required=['cases'], optional=['base'])
base = data.get("base", {})
cases = data["cases"]
for i, case_template in enumerate(cases):
case = base.copy()
case.update(case_template)
lint_case(case, verbose)
if __name__ == '__main__':
from optparse import OptionParser
p = OptionParser(usage="usage: %prog [options] FILE ...",
description="linter for pip's yaml test FILE(s)")
p.add_option('-v', '--verbose',
action="store_true")
opts, args = p.parse_args()
if len(args) < 1:
p.error('at least one argument required, try -h')
for yml_file in args:
lint_yml(yml_file, opts.verbose)

44
tests/yaml/overlap1.yml Normal file
View File

@ -0,0 +1,44 @@
# https://medium.com/knerd/the-nine-circles-of-python-dependency-hell-481d53e3e025
# Circle 4: Overlapping transitive dependencies
base:
available:
- myapp 0.2.4; depends fussy, capridous
- name: fussy
version: 3.8.0
depends: ['requests >=1.2.0,<3']
- name: capridous
version: 1.1.0
depends: ['requests >=1.0.3,<2']
- requests 1.0.1
- requests 1.0.3
- requests 1.1.0
- requests 1.2.0
- requests 1.3.0
- requests 2.1.0
- requests 3.2.0
cases:
-
request:
- install: myapp
response:
- state:
- capridous 1.1.0
- fussy 3.8.0
- myapp 0.2.4
- requests 1.3.0
skip: old
-
request:
- install: fussy
response:
- state:
- fussy 3.8.0
- requests 2.1.0
-
request:
- install: capridous
response:
- state:
- capridous 1.1.0
- requests 1.3.0

37
tests/yaml/pip988.yml Normal file
View File

@ -0,0 +1,37 @@
# https://github.com/pypa/pip/issues/988#issuecomment-606967707
base:
available:
- A 1.0.0; depends B >= 1.0.0, C >= 1.0.0
- A 2.0.0; depends B >= 2.0.0, C >= 1.0.0
- B 1.0.0; depends C >= 1.0.0
- B 2.0.0; depends C >= 2.0.0
- C 1.0.0
- C 2.0.0
cases:
-
request:
- install: C==1.0.0
- install: B==1.0.0
- install: A==1.0.0
- install: A==2.0.0
response:
- state:
- C 1.0.0
- state:
- B 1.0.0
- C 1.0.0
- state:
- A 1.0.0
- B 1.0.0
- C 1.0.0
- state:
- A 2.0.0
- B 2.0.0
- C 2.0.0
# for the last install (A==2.0.0) the old resolver gives
# - A 2.0.0
# - B 2.0.0
# - C 1.0.0
# but because B 2.0.0 depends on C >=2.0.0 this is wrong
skip: old

24
tests/yaml/poetry2298.yml Normal file
View File

@ -0,0 +1,24 @@
# see: https://github.com/python-poetry/poetry/issues/2298
base:
available:
- poetry 1.0.5; depends zappa == 0.51.0, sphinx == 3.0.1
- zappa 0.51.0; depends boto3
- sphinx 3.0.1; depends docutils
- boto3 1.4.5; depends botocore ~=1.5.0
- botocore 1.5.92; depends docutils <0.16
- docutils 0.16.0
- docutils 0.15.0
cases:
-
request:
- install: poetry
response:
- state:
- boto3 1.4.5
- botocore 1.5.92
- docutils 0.15.0
- poetry 1.0.5
- sphinx 3.0.1
- zappa 0.51.0
skip: old

View File

@ -38,3 +38,10 @@ cases:
response:
- state:
- base 0.1.0
-
request:
- install: ['dep', 'simple==0.1.0']
response:
- state:
- dep 0.1.0
- simple 0.1.0

24
tests/yaml/trivial.yml Normal file
View File

@ -0,0 +1,24 @@
base:
available:
- a 0.1.0
- b 0.2.0
- c 0.3.0
cases:
-
request:
- install: ['a', 'b']
- install: c
- uninstall: ['b', 'c']
- uninstall: a
response:
- state:
- a 0.1.0
- b 0.2.0
- state:
- a 0.1.0
- b 0.2.0
- c 0.3.0
- state:
- a 0.1.0
- state: null