fix test failures

This commit is contained in:
Danny McClanahan 2023-09-03 09:05:08 -04:00
parent ef9bcac9b5
commit 1dd5f4fa5e
No known key found for this signature in database
GPG Key ID: CE8D0DA71DEFC1DF
3 changed files with 29 additions and 50 deletions

View File

@ -12,6 +12,7 @@ from pathlib import Path
from typing import ( from typing import (
TYPE_CHECKING, TYPE_CHECKING,
Any, Any,
Callable,
Dict, Dict,
FrozenSet, FrozenSet,
Iterable, Iterable,
@ -618,6 +619,19 @@ class CandidateEvaluator:
) )
_FindCandidates = Callable[["PackageFinder", str], List[InstallationCandidate]]
def _canonicalize_arg(func: _FindCandidates) -> _FindCandidates:
@functools.wraps(func)
def wrapper(
self: "PackageFinder", project_name: str
) -> List[InstallationCandidate]:
return func(self, canonicalize_name(project_name))
return wrapper
class PackageFinder: class PackageFinder:
"""This finds packages. """This finds packages.
@ -1110,6 +1124,7 @@ class PackageFinder:
return package_links return package_links
@_canonicalize_arg
@functools.lru_cache(maxsize=None) @functools.lru_cache(maxsize=None)
def find_all_candidates(self, project_name: str) -> List[InstallationCandidate]: def find_all_candidates(self, project_name: str) -> List[InstallationCandidate]:
"""Find all available InstallationCandidate for project_name """Find all available InstallationCandidate for project_name

View File

@ -2,7 +2,6 @@ import itertools
import json import json
import logging import logging
import os import os
import re
import uuid import uuid
from pathlib import Path from pathlib import Path
from textwrap import dedent from textwrap import dedent
@ -661,55 +660,6 @@ def test_parse_links__metadata_file_data(
assert link._hashes == hashes assert link._hashes == hashes
def test_parse_links_caches_same_page_by_url() -> None:
raise Exception("todo!")
html = (
"<!DOCTYPE html>"
'<html><head><meta charset="utf-8"><head>'
'<body><a href="/pkg1-1.0.tar.gz"></a></body></html>'
)
html_bytes = html.encode("utf-8")
url = "https://example.com/simple/"
page_1 = IndexContent(
html_bytes,
"text/html",
encoding=None,
url=url,
)
# Make a second page with zero content, to ensure that it's not accessed,
# because the page was cached by url.
page_2 = IndexContent(
b"",
"text/html",
encoding=None,
url=url,
)
# Make a third page which represents an index url, which should not be
# cached, even for the same url. We modify the page content slightly to
# verify that the result is not cached.
page_3 = IndexContent(
re.sub(b"pkg1", b"pkg2", html_bytes),
"text/html",
encoding=None,
url=url,
# cache_link_parsing=False,
)
parsed_links_1 = list(parse_links(page_1))
assert len(parsed_links_1) == 1
assert "pkg1" in parsed_links_1[0].url
parsed_links_2 = list(parse_links(page_2))
assert parsed_links_2 == parsed_links_1
parsed_links_3 = list(parse_links(page_3))
assert len(parsed_links_3) == 1
assert parsed_links_3 != parsed_links_1
assert "pkg2" in parsed_links_3[0].url
@mock.patch("pip._internal.index.collector.raise_for_status") @mock.patch("pip._internal.index.collector.raise_for_status")
def test_request_http_error( def test_request_http_error(
mock_raise_for_status: mock.Mock, caplog: pytest.LogCaptureFixture mock_raise_for_status: mock.Mock, caplog: pytest.LogCaptureFixture

View File

@ -314,6 +314,7 @@ def test_finder_priority_file_over_page(data: TestData) -> None:
find_links=[data.find_links], find_links=[data.find_links],
index_urls=["http://pypi.org/simple/"], index_urls=["http://pypi.org/simple/"],
) )
assert req.name
all_versions = finder.find_all_candidates(req.name) all_versions = finder.find_all_candidates(req.name)
# 1 file InstallationCandidate followed by all https ones # 1 file InstallationCandidate followed by all https ones
assert all_versions[0].link.scheme == "file" assert all_versions[0].link.scheme == "file"
@ -332,6 +333,7 @@ def test_finder_priority_nonegg_over_eggfragments() -> None:
links = ["http://foo/bar.py#egg=bar-1.0", "http://foo/bar-1.0.tar.gz"] links = ["http://foo/bar.py#egg=bar-1.0", "http://foo/bar-1.0.tar.gz"]
finder = make_test_finder(links) finder = make_test_finder(links)
assert req.name
all_versions = finder.find_all_candidates(req.name) all_versions = finder.find_all_candidates(req.name)
assert all_versions[0].link.url.endswith("tar.gz") assert all_versions[0].link.url.endswith("tar.gz")
assert all_versions[1].link.url.endswith("#egg=bar-1.0") assert all_versions[1].link.url.endswith("#egg=bar-1.0")
@ -344,6 +346,7 @@ def test_finder_priority_nonegg_over_eggfragments() -> None:
links.reverse() links.reverse()
finder = make_test_finder(links) finder = make_test_finder(links)
assert req.name
all_versions = finder.find_all_candidates(req.name) all_versions = finder.find_all_candidates(req.name)
assert all_versions[0].link.url.endswith("tar.gz") assert all_versions[0].link.url.endswith("tar.gz")
assert all_versions[1].link.url.endswith("#egg=bar-1.0") assert all_versions[1].link.url.endswith("#egg=bar-1.0")
@ -546,6 +549,17 @@ def test_find_all_candidates_nothing() -> None:
assert not finder.find_all_candidates("pip") assert not finder.find_all_candidates("pip")
def test_find_all_candidates_cached(data: TestData) -> None:
"""Ensure the exact same list of candidates is returned when called twice for the
same project name."""
finder = make_test_finder(find_links=[data.find_links])
versions = finder.find_all_candidates("simple")
# Check that the exact same list is reused for a second call.
assert versions is finder.find_all_candidates("simple")
# Check that the project name is canonicalized before caching.
assert versions is finder.find_all_candidates("Simple")
def test_find_all_candidates_find_links(data: TestData) -> None: def test_find_all_candidates_find_links(data: TestData) -> None:
finder = make_test_finder(find_links=[data.find_links]) finder = make_test_finder(find_links=[data.find_links])
versions = finder.find_all_candidates("simple") versions = finder.find_all_candidates("simple")