Compare commits

...

12 Commits

Author SHA1 Message Date
Danny McClanahan 787d890d25
Merge 227d8e8dd2 into 2a0acb595c 2023-11-09 21:29:49 +00:00
Damian Shaw 2a0acb595c
Update and provide fixes for mypy pre-commit (#12389)
* Update mypy to 1.6.1

* Fix mypy "Source file found twice under different module names" error

* Ignore type of intialized abstract class in tests

* Use more specific type ignore method-assign

* Type ignore for message.get_all

* Remove unused type ignore

* Add SizedBuffer type for xmlrpc.client.Transport subclass

* Add Self type for RequestHandlerClass in test

* Add type ignore for shutil.rmtree onexc handler

* Quote SizedBuffer

* Add news entry

* Remove no longer correct comment

* Update self import

* Also ignore type onerror=handler

* Update news entry

* Update news entry
2023-11-07 09:39:01 +00:00
Damian Shaw 68529081c2
Enforce f-strings via Ruff (#12393) 2023-11-07 09:14:56 +00:00
Damian Shaw 9685f64fe8
Update ruff and config (#12390) 2023-11-06 09:30:05 +00:00
Danny McClanahan 227d8e8dd2
handle metadata email parsing errors 2023-09-14 16:39:03 -04:00
Danny McClanahan 5a34ca3a48
use scandir over listdir when searching normal wheel cache 2023-09-14 14:38:12 -04:00
Danny McClanahan ed11f1ecef
turn debug logs in fetching from cache into exceptions 2023-09-14 14:15:39 -04:00
Danny McClanahan f4ed8f4cb9
add news 2023-09-14 14:15:39 -04:00
Danny McClanahan 69147e01ac
fix test failures 2023-09-14 14:15:38 -04:00
Danny McClanahan db218a1a7a
make LinkMetadataCache
- catch an exception when parsing metadata which only occurs in CI
- handle --no-cache-dir
- call os.makedirs() before writing to cache too
- catch InvalidSchema when attempting git urls with BatchDownloader
- fix other test failures
- reuse should_cache(req) logic
- gzip compress link metadata for a slight reduction in disk space
- only cache built sdists
- don't check should_cache() when fetching
- cache lazy wheel dists

tmp
2023-09-14 14:14:46 -04:00
Jonathan Helmus eb096b126e
use .metadata distribution info when possible
When performing `install --dry-run` and PEP 658 .metadata files are
available to guide the resolve, do not download the associated wheels.

Rather use the distribution information directly from the .metadata
files when reporting the results on the CLI and in the --report file.

- describe the new --dry-run behavior
- finalize linked requirements immediately after resolve
- introduce is_concrete
- funnel InstalledDistribution through _get_prepared_distribution() too
2023-09-14 13:24:43 -04:00
Danny McClanahan 7419f08fe6
add test for new install --dry-run functionality (no downloading) 2023-09-14 13:24:40 -04:00
100 changed files with 1167 additions and 608 deletions

View File

@ -22,25 +22,26 @@ repos:
- id: black
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.0.292
rev: v0.1.4
hooks:
- id: ruff
args: [--fix, --exit-non-zero-on-fix]
- repo: https://github.com/pre-commit/mirrors-mypy
rev: v0.961
rev: v1.6.1
hooks:
- id: mypy
exclude: tests/data
args: ["--pretty", "--show-error-codes"]
additional_dependencies: [
'keyring==23.0.1',
'nox==2021.6.12',
'keyring==24.2.0',
'nox==2023.4.22',
'pytest',
'types-docutils==0.18.3',
'types-setuptools==57.4.14',
'types-freezegun==1.1.9',
'types-six==1.16.15',
'types-pyyaml==6.0.12.2',
'types-docutils==0.20.0.3',
'types-setuptools==68.2.0.0',
'types-freezegun==1.1.10',
'types-six==1.16.21.9',
'types-pyyaml==6.0.12.12',
]
- repo: https://github.com/pre-commit/pygrep-hooks

View File

@ -194,22 +194,17 @@ class PipReqFileOptionsReference(PipOptions):
opt = option()
opt_name = opt._long_opts[0]
if opt._short_opts:
short_opt_name = "{}, ".format(opt._short_opts[0])
short_opt_name = f"{opt._short_opts[0]}, "
else:
short_opt_name = ""
if option in cmdoptions.general_group["options"]:
prefix = ""
else:
prefix = "{}_".format(self.determine_opt_prefix(opt_name))
prefix = f"{self.determine_opt_prefix(opt_name)}_"
self.view_list.append(
"* :ref:`{short}{long}<{prefix}{opt_name}>`".format(
short=short_opt_name,
long=opt_name,
prefix=prefix,
opt_name=opt_name,
),
f"* :ref:`{short_opt_name}{opt_name}<{prefix}{opt_name}>`",
"\n",
)

1
news/12186.bugfix.rst Normal file
View File

@ -0,0 +1 @@
Avoid downloading any dists in ``install --dry-run`` if PEP 658 ``.metadata`` files or lazy wheels are available.

1
news/12256.feature.rst Normal file
View File

@ -0,0 +1 @@
Cache computed metadata from sdists and lazy wheels in ``~/.cache/pip/link-metadata``.

1
news/12389.bugfix.rst Normal file
View File

@ -0,0 +1 @@
Update mypy to 1.6.1 and fix/ignore types

1
news/12390.trivial.rst Normal file
View File

@ -0,0 +1 @@
Update ruff versions and config for dev

1
news/12393.trivial.rst Normal file
View File

@ -0,0 +1 @@
Enforce and update code to use f-strings via Ruff rule UP032

View File

@ -84,8 +84,8 @@ ignore = [
"B020",
"B904", # Ruff enables opinionated warnings by default
"B905", # Ruff enables opinionated warnings by default
"G202",
]
target-version = "py37"
line-length = 88
select = [
"ASYNC",
@ -102,6 +102,7 @@ select = [
"PLR0",
"W",
"RUF100",
"UP032",
]
[tool.ruff.isort]

View File

@ -77,7 +77,7 @@ setup(
entry_points={
"console_scripts": [
"pip=pip._internal.cli.main:main",
"pip{}=pip._internal.cli.main:main".format(sys.version_info[0]),
f"pip{sys.version_info[0]}=pip._internal.cli.main:main",
"pip{}.{}=pip._internal.cli.main:main".format(*sys.version_info[:2]),
],
},

View File

@ -1,12 +1,14 @@
"""Cache Management
"""
import abc
import hashlib
import json
import logging
import os
import re
from pathlib import Path
from typing import Any, Dict, List, Optional
from typing import Dict, Iterator, List, Optional, Tuple
from pip._vendor.packaging.tags import Tag, interpreter_name, interpreter_version
from pip._vendor.packaging.utils import canonicalize_name
@ -15,21 +17,71 @@ from pip._internal.exceptions import InvalidWheelFilename
from pip._internal.models.direct_url import DirectUrl
from pip._internal.models.link import Link
from pip._internal.models.wheel import Wheel
from pip._internal.req.req_install import InstallRequirement
from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
from pip._internal.utils.urls import path_to_url
from pip._internal.vcs import vcs
logger = logging.getLogger(__name__)
_egg_info_re = re.compile(r"([a-z0-9_.]+)-([a-z0-9_.!+-]+)", re.IGNORECASE)
ORIGIN_JSON_NAME = "origin.json"
def _contains_egg_info(s: str) -> bool:
"""Determine whether the string looks like an egg_info.
:param s: The string to parse. E.g. foo-2.1
"""
return bool(_egg_info_re.search(s))
def should_cache(
req: InstallRequirement,
) -> bool:
"""
Return whether a built InstallRequirement can be stored in the persistent
wheel cache, assuming the wheel cache is available, and _should_build()
has determined a wheel needs to be built.
"""
if not req.link:
return False
if req.link.is_wheel:
return False
if req.editable or not req.source_dir:
# never cache editable requirements
return False
if req.link and req.link.is_vcs:
# VCS checkout. Do not cache
# unless it points to an immutable commit hash.
assert not req.editable
assert req.source_dir
vcs_backend = vcs.get_backend_for_scheme(req.link.scheme)
assert vcs_backend
if vcs_backend.is_immutable_rev_checkout(req.link.url, req.source_dir):
return True
return False
assert req.link
base, ext = req.link.splitext()
if _contains_egg_info(base):
return True
# Otherwise, do not cache.
return False
def _hash_dict(d: Dict[str, str]) -> str:
"""Return a stable sha224 of a dictionary."""
s = json.dumps(d, sort_keys=True, separators=(",", ":"), ensure_ascii=True)
return hashlib.sha224(s.encode("ascii")).hexdigest()
class Cache:
class Cache(abc.ABC):
"""An abstract class - provides cache directories for data from links
:param cache_dir: The root of the cache.
@ -73,20 +125,28 @@ class Cache:
return parts
def _get_candidates(self, link: Link, canonical_package_name: str) -> List[Any]:
can_not_cache = not self.cache_dir or not canonical_package_name or not link
if can_not_cache:
return []
path = self.get_path_for_link(link)
if os.path.isdir(path):
return [(candidate, path) for candidate in os.listdir(path)]
return []
@abc.abstractmethod
def get_path_for_link(self, link: Link) -> str:
"""Return a directory to store cached items in for link."""
raise NotImplementedError()
...
def cache_path(self, link: Link) -> Path:
return Path(self.get_path_for_link(link))
class LinkMetadataCache(Cache):
"""Persistently store the metadata of dists found at each link."""
def get_path_for_link(self, link: Link) -> str:
parts = self._get_cache_path_parts(link)
assert self.cache_dir
return os.path.join(self.cache_dir, "link-metadata", *parts)
class WheelCacheBase(Cache):
"""Specializations to the cache concept for wheels."""
@abc.abstractmethod
def get(
self,
link: Link,
@ -96,10 +156,27 @@ class Cache:
"""Returns a link to a cached item if it exists, otherwise returns the
passed link.
"""
raise NotImplementedError()
...
def _can_cache(self, link: Link, canonical_package_name: str) -> bool:
return bool(self.cache_dir and canonical_package_name and link)
def _get_candidates(
self, link: Link, canonical_package_name: str
) -> Iterator[Tuple[str, str]]:
if not self._can_cache(link, canonical_package_name):
return
path = self.get_path_for_link(link)
if not os.path.isdir(path):
return
for candidate in os.scandir(path):
if candidate.is_file():
yield (candidate.name, path)
class SimpleWheelCache(Cache):
class SimpleWheelCache(WheelCacheBase):
"""A cache of wheels for future installs."""
def __init__(self, cache_dir: str) -> None:
@ -131,7 +208,7 @@ class SimpleWheelCache(Cache):
package_name: Optional[str],
supported_tags: List[Tag],
) -> Link:
candidates = []
candidates: List[Tuple[int, str, str]] = []
if not package_name:
return link
@ -205,7 +282,7 @@ class CacheEntry:
)
class WheelCache(Cache):
class WheelCache(WheelCacheBase):
"""Wraps EphemWheelCache and SimpleWheelCache into a single Cache
This Cache allows for gracefully degradation, using the ephem wheel cache
@ -223,6 +300,15 @@ class WheelCache(Cache):
def get_ephem_path_for_link(self, link: Link) -> str:
return self._ephem_cache.get_path_for_link(link)
def resolve_cache_dir(self, req: InstallRequirement) -> str:
"""Return the persistent or temporary cache directory where the built or
downloaded wheel should be stored."""
cache_available = bool(self.cache_dir)
assert req.link, req
if cache_available and should_cache(req):
return self.get_path_for_link(req.link)
return self.get_ephem_path_for_link(req.link)
def get(
self,
link: Link,

View File

@ -582,10 +582,7 @@ def _handle_python_version(
"""
version_info, error_msg = _convert_python_version(value)
if error_msg is not None:
msg = "invalid --python-version value: {!r}: {}".format(
value,
error_msg,
)
msg = f"invalid --python-version value: {value!r}: {error_msg}"
raise_option_error(parser, option=option, msg=msg)
parser.values.python_version = version_info
@ -921,9 +918,9 @@ def _handle_merge_hash(
algo, digest = value.split(":", 1)
except ValueError:
parser.error(
"Arguments to {} must be a hash name "
f"Arguments to {opt_str} must be a hash name "
"followed by a value, like --hash=sha256:"
"abcde...".format(opt_str)
"abcde..."
)
if algo not in STRONG_HASHES:
parser.error(

View File

@ -229,9 +229,9 @@ class ConfigOptionParser(CustomOptionParser):
val = strtobool(val)
except ValueError:
self.error(
"{} is not a valid value for {} option, "
f"{val} is not a valid value for {key} option, "
"please specify a boolean value like yes/no, "
"true/false or 1/0 instead.".format(val, key)
"true/false or 1/0 instead."
)
elif option.action == "count":
with suppress(ValueError):
@ -240,10 +240,10 @@ class ConfigOptionParser(CustomOptionParser):
val = int(val)
if not isinstance(val, int) or val < 0:
self.error(
"{} is not a valid value for {} option, "
f"{val} is not a valid value for {key} option, "
"please instead specify either a non-negative integer "
"or a boolean value like yes/no or false/true "
"which is equivalent to 1/0.".format(val, key)
"which is equivalent to 1/0."
)
elif option.action == "append":
val = val.split()

View File

@ -12,7 +12,7 @@ from functools import partial
from optparse import Values
from typing import TYPE_CHECKING, Any, List, Optional, Tuple
from pip._internal.cache import WheelCache
from pip._internal.cache import LinkMetadataCache, WheelCache
from pip._internal.cli import cmdoptions
from pip._internal.cli.base_command import Command
from pip._internal.cli.command_context import CommandContextMixIn
@ -305,6 +305,10 @@ class RequirementCommand(IndexGroupCommand):
"fast-deps has no effect when used with the legacy resolver."
)
if options.cache_dir:
metadata_cache = LinkMetadataCache(options.cache_dir)
else:
metadata_cache = None
return RequirementPreparer(
build_dir=temp_build_dir_path,
src_dir=options.src_dir,
@ -320,6 +324,7 @@ class RequirementCommand(IndexGroupCommand):
lazy_wheel=lazy_wheel,
verbosity=verbosity,
legacy_resolver=legacy_resolver,
metadata_cache=metadata_cache,
)
@classmethod

View File

@ -175,7 +175,7 @@ class CacheCommand(Command):
files += self._find_http_files(options)
else:
# Add the pattern to the log message
no_matching_msg += ' for pattern "{}"'.format(args[0])
no_matching_msg += f' for pattern "{args[0]}"'
if not files:
logger.warning(no_matching_msg)

View File

@ -242,17 +242,15 @@ class ConfigurationCommand(Command):
e.filename = editor
raise
except subprocess.CalledProcessError as e:
raise PipError(
"Editor Subprocess exited with exit code {}".format(e.returncode)
)
raise PipError(f"Editor Subprocess exited with exit code {e.returncode}")
def _get_n_args(self, args: List[str], example: str, n: int) -> Any:
"""Helper to make sure the command got the right number of arguments"""
if len(args) != n:
msg = (
"Got unexpected number of arguments, expected {}. "
'(example: "{} config {}")'
).format(n, get_prog(), example)
f"Got unexpected number of arguments, expected {n}. "
f'(example: "{get_prog()} config {example}")'
)
raise PipError(msg)
if n == 1:

View File

@ -95,7 +95,7 @@ def show_actual_vendor_versions(vendor_txt_versions: Dict[str, str]) -> None:
elif parse_version(actual_version) != parse_version(expected_version):
extra_message = (
" (CONFLICT: vendor.txt suggests version should"
" be {})".format(expected_version)
f" be {expected_version})"
)
logger.info("%s==%s%s", module_name, actual_version, extra_message)
@ -120,7 +120,7 @@ def show_tags(options: Values) -> None:
if formatted_target:
suffix = f" (target: {formatted_target})"
msg = "Compatible tags: {}{}".format(len(tags), suffix)
msg = f"Compatible tags: {len(tags)}{suffix}"
logger.info(msg)
if options.verbose < 1 and len(tags) > tag_limit:
@ -134,9 +134,7 @@ def show_tags(options: Values) -> None:
logger.info(str(tag))
if tags_limited:
msg = (
"...\n[First {tag_limit} tags shown. Pass --verbose to show all.]"
).format(tag_limit=tag_limit)
msg = f"...\n[First {tag_limit} tags shown. Pass --verbose to show all.]"
logger.info(msg)

View File

@ -130,6 +130,9 @@ class DownloadCommand(RequirementCommand):
self.trace_basic_info(finder)
requirement_set = resolver.resolve(reqs, check_supported_wheels=True)
preparer.finalize_linked_requirements(
requirement_set.requirements.values(), hydrate_virtual_reqs=True
)
downloaded: List[str] = []
for req in requirement_set.requirements.values():
@ -138,7 +141,6 @@ class DownloadCommand(RequirementCommand):
preparer.save_linked_requirement(req)
downloaded.append(req.name)
preparer.prepare_linked_requirements_more(requirement_set.requirements.values())
requirement_set.warn_legacy_versions_and_specifiers()
if downloaded:

View File

@ -128,12 +128,12 @@ class IndexCommand(IndexGroupCommand):
if not versions:
raise DistributionNotFound(
"No matching distribution found for {}".format(query)
f"No matching distribution found for {query}"
)
formatted_versions = [str(ver) for ver in sorted(versions, reverse=True)]
latest = formatted_versions[0]
write_output("{} ({})".format(query, latest))
write_output(f"{query} ({latest})")
write_output("Available versions: {}".format(", ".join(formatted_versions)))
print_dist_installation_info(query, latest)

View File

@ -84,7 +84,8 @@ class InstallCommand(RequirementCommand):
help=(
"Don't actually install anything, just print what would be. "
"Can be used in combination with --ignore-installed "
"to 'resolve' the requirements."
"to 'resolve' the requirements. If PEP 658 or fast-deps metadata is "
"available, --dry-run also avoids downloading the dependency at all."
),
)
self.cmd_opts.add_option(
@ -377,6 +378,10 @@ class InstallCommand(RequirementCommand):
requirement_set = resolver.resolve(
reqs, check_supported_wheels=not options.target_dir
)
preparer.finalize_linked_requirements(
requirement_set.requirements.values(),
hydrate_virtual_reqs=not options.dry_run,
)
if options.json_report_file:
report = InstallationReport(requirement_set.requirements_to_install)
@ -607,12 +612,8 @@ class InstallCommand(RequirementCommand):
version = package_set[project_name][0]
for dependency in missing[project_name]:
message = (
"{name} {version} requires {requirement}, "
f"{project_name} {version} requires {dependency[1]}, "
"which is not installed."
).format(
name=project_name,
version=version,
requirement=dependency[1],
)
parts.append(message)

View File

@ -145,6 +145,9 @@ class WheelCommand(RequirementCommand):
self.trace_basic_info(finder)
requirement_set = resolver.resolve(reqs, check_supported_wheels=True)
preparer.finalize_linked_requirements(
requirement_set.requirements.values(), hydrate_virtual_reqs=True
)
reqs_to_build: List[InstallRequirement] = []
for req in requirement_set.requirements.values():
@ -153,7 +156,6 @@ class WheelCommand(RequirementCommand):
elif should_build_for_wheel_command(req):
reqs_to_build.append(req)
preparer.prepare_linked_requirements_more(requirement_set.requirements.values())
requirement_set.warn_legacy_versions_and_specifiers()
# build wheels

View File

@ -59,8 +59,8 @@ def _disassemble_key(name: str) -> List[str]:
if "." not in name:
error_message = (
"Key does not contain dot separated section and key. "
"Perhaps you wanted to use 'global.{}' instead?"
).format(name)
f"Perhaps you wanted to use 'global.{name}' instead?"
)
raise ConfigurationError(error_message)
return name.split(".", 1)

View File

@ -1,4 +1,5 @@
from pip._internal.distributions.base import AbstractDistribution
from pip._internal.distributions.installed import InstalledDistribution
from pip._internal.distributions.sdist import SourceDistribution
from pip._internal.distributions.wheel import WheelDistribution
from pip._internal.req.req_install import InstallRequirement
@ -8,6 +9,10 @@ def make_distribution_for_install_requirement(
install_req: InstallRequirement,
) -> AbstractDistribution:
"""Returns a Distribution for the given InstallRequirement"""
# Only pre-installed requirements will have a .satisfied_by dist.
if install_req.satisfied_by:
return InstalledDistribution(install_req)
# Editable requirements will always be source distributions. They use the
# legacy logic until we create a modern standard for them.
if install_req.editable:

View File

@ -35,11 +35,17 @@ class AbstractDistribution(metaclass=abc.ABCMeta):
If None, then this dist has no work to do in the build tracker, and
``.prepare_distribution_metadata()`` will not be called."""
raise NotImplementedError()
...
@abc.abstractmethod
def get_metadata_distribution(self) -> BaseDistribution:
raise NotImplementedError()
"""Generate a concrete ``BaseDistribution`` instance for this artifact.
The implementation should also cache the result with
``self.req.cache_concrete_dist()`` so the distribution is available to other
users of the ``InstallRequirement``. This method is not called within the build
tracker context, so it should not identify any new setup requirements."""
...
@abc.abstractmethod
def prepare_distribution_metadata(
@ -48,4 +54,11 @@ class AbstractDistribution(metaclass=abc.ABCMeta):
build_isolation: bool,
check_build_deps: bool,
) -> None:
raise NotImplementedError()
"""Generate the information necessary to extract metadata from the artifact.
This method will be executed within the context of ``BuildTracker#track()``, so
it needs to fully identify any seutp requirements so they can be added to the
same active set of tracked builds, while ``.get_metadata_distribution()`` takes
care of generating and caching the ``BaseDistribution`` to expose to the rest of
the resolve."""
...

View File

@ -17,8 +17,10 @@ class InstalledDistribution(AbstractDistribution):
return None
def get_metadata_distribution(self) -> BaseDistribution:
assert self.req.satisfied_by is not None, "not actually installed"
return self.req.satisfied_by
dist = self.req.satisfied_by
assert dist is not None, "not actually installed"
self.req.cache_concrete_dist(dist)
return dist
def prepare_distribution_metadata(
self,

View File

@ -1,11 +1,11 @@
import logging
from typing import Iterable, Optional, Set, Tuple
from typing import Iterable, Set, Tuple
from pip._internal.build_env import BuildEnvironment
from pip._internal.distributions.base import AbstractDistribution
from pip._internal.exceptions import InstallationError
from pip._internal.index.package_finder import PackageFinder
from pip._internal.metadata import BaseDistribution
from pip._internal.metadata import BaseDistribution, get_directory_distribution
from pip._internal.utils.subprocess import runner_with_spinner_message
logger = logging.getLogger(__name__)
@ -19,13 +19,19 @@ class SourceDistribution(AbstractDistribution):
"""
@property
def build_tracker_id(self) -> Optional[str]:
def build_tracker_id(self) -> str:
"""Identify this requirement uniquely by its link."""
assert self.req.link
return self.req.link.url_without_fragment
def get_metadata_distribution(self) -> BaseDistribution:
return self.req.get_dist()
assert (
self.req.metadata_directory
), "Set as part of .prepare_distribution_metadata()"
dist = get_directory_distribution(self.req.metadata_directory)
self.req.cache_concrete_dist(dist)
self.req.validate_sdist_metadata()
return dist
def prepare_distribution_metadata(
self,
@ -64,7 +70,11 @@ class SourceDistribution(AbstractDistribution):
self._raise_conflicts("the backend dependencies", conflicting)
if missing:
self._raise_missing_reqs(missing)
self.req.prepare_metadata()
# NB: we must still call .cache_concrete_dist() and .validate_sdist_metadata()
# before the InstallRequirement itself has been updated with the metadata from
# this directory!
self.req.prepare_metadata_directory()
def _prepare_build_backend(self, finder: PackageFinder) -> None:
# Isolate in a BuildEnvironment and install the build-time

View File

@ -29,7 +29,9 @@ class WheelDistribution(AbstractDistribution):
assert self.req.local_file_path, "Set as part of preparation during download"
assert self.req.name, "Wheels are never unnamed"
wheel = FilesystemWheel(self.req.local_file_path)
return get_wheel_distribution(wheel, canonicalize_name(self.req.name))
dist = get_wheel_distribution(wheel, canonicalize_name(self.req.name))
self.req.cache_concrete_dist(dist)
return dist
def prepare_distribution_metadata(
self,

View File

@ -247,10 +247,26 @@ class NoneMetadataError(PipError):
def __str__(self) -> str:
# Use `dist` in the error message because its stringification
# includes more information, like the version and location.
return "None {} metadata found for distribution: {}".format(
self.metadata_name,
self.dist,
)
return f"None {self.metadata_name} metadata found for distribution: {self.dist}"
class CacheMetadataError(PipError):
"""Raised when de/serializing a requirement into the metadata cache."""
def __init__(
self,
req: "InstallRequirement",
reason: str,
) -> None:
"""
:param req: The requirement we attempted to cache.
:param reason: Context about the precise error that occurred.
"""
self.req = req
self.reason = reason
def __str__(self) -> str:
return f"{self.reason} for {self.req} from {self.req.link}"
class UserInstallationInvalid(InstallationError):
@ -594,7 +610,7 @@ class HashMismatch(HashError):
self.gots = gots
def body(self) -> str:
return " {}:\n{}".format(self._requirement_name(), self._hash_comparison())
return f" {self._requirement_name()}:\n{self._hash_comparison()}"
def _hash_comparison(self) -> str:
"""
@ -616,11 +632,9 @@ class HashMismatch(HashError):
lines: List[str] = []
for hash_name, expecteds in self.allowed.items():
prefix = hash_then_or(hash_name)
lines.extend(
(" Expected {} {}".format(next(prefix), e)) for e in expecteds
)
lines.extend((f" Expected {next(prefix)} {e}") for e in expecteds)
lines.append(
" Got {}\n".format(self.gots[hash_name].hexdigest())
f" Got {self.gots[hash_name].hexdigest()}\n"
)
return "\n".join(lines)

View File

@ -533,8 +533,8 @@ class CandidateEvaluator:
)
except ValueError:
raise UnsupportedWheel(
"{} is not a supported wheel for this platform. It "
"can't be sorted.".format(wheel.filename)
f"{wheel.filename} is not a supported wheel for this platform. It "
"can't be sorted."
)
if self._prefer_binary:
binary_preference = 1
@ -939,9 +939,7 @@ class PackageFinder:
_format_versions(best_candidate_result.iter_all()),
)
raise DistributionNotFound(
"No matching distribution found for {}".format(req)
)
raise DistributionNotFound(f"No matching distribution found for {req}")
def _should_install_candidate(
candidate: Optional[InstallationCandidate],

View File

@ -56,8 +56,7 @@ def distutils_scheme(
try:
d.parse_config_files()
except UnicodeDecodeError:
# Typeshed does not include find_config_files() for some reason.
paths = d.find_config_files() # type: ignore
paths = d.find_config_files()
logger.warning(
"Ignore distutils configs in %s due to encoding errors.",
", ".join(os.path.basename(p) for p in paths),

View File

@ -6,7 +6,14 @@ from typing import TYPE_CHECKING, List, Optional, Type, cast
from pip._internal.utils.misc import strtobool
from .base import BaseDistribution, BaseEnvironment, FilesystemWheel, MemoryWheel, Wheel
from .base import (
BaseDistribution,
BaseEnvironment,
FilesystemWheel,
MemoryWheel,
Wheel,
serialize_metadata,
)
if TYPE_CHECKING:
from typing import Literal, Protocol
@ -23,6 +30,7 @@ __all__ = [
"get_environment",
"get_wheel_distribution",
"select_backend",
"serialize_metadata",
]

View File

@ -64,10 +64,10 @@ def msg_to_json(msg: Message) -> Dict[str, Any]:
key = json_name(field)
if multi:
value: Union[str, List[str]] = [
sanitise_header(v) for v in msg.get_all(field)
sanitise_header(v) for v in msg.get_all(field) # type: ignore
]
else:
value = sanitise_header(msg.get(field))
value = sanitise_header(msg.get(field)) # type: ignore
if key == "keywords":
# Accept both comma-separated and space-separated
# forms, for better compatibility with old data.

View File

@ -1,6 +1,9 @@
import csv
import email.generator
import email.message
import email.policy
import functools
import io
import json
import logging
import pathlib
@ -97,6 +100,18 @@ def _convert_installed_files_path(
return str(pathlib.Path(*info, *entry))
def serialize_metadata(msg: email.message.Message) -> str:
"""Write a dist's metadata to a string.
Calling ``str(dist.metadata)`` may raise an error by misinterpreting RST directives
as email headers. This method uses the more robust ``email.policy.EmailPolicy`` to
avoid those parsing errors."""
out = io.StringIO()
g = email.generator.Generator(out, policy=email.policy.EmailPolicy())
g.flatten(msg)
return out.getvalue()
class RequiresEntry(NamedTuple):
requirement: str
extra: str
@ -104,6 +119,15 @@ class RequiresEntry(NamedTuple):
class BaseDistribution(Protocol):
@property
def is_concrete(self) -> bool:
"""Whether the distribution really exists somewhere on disk.
If this is false, it has been synthesized from metadata, e.g. via
``.from_metadata_file_contents()``, or ``.from_wheel()`` against
a ``MemoryWheel``."""
raise NotImplementedError()
@classmethod
def from_directory(cls, directory: str) -> "BaseDistribution":
"""Load the distribution from a metadata directory.
@ -681,6 +705,10 @@ class BaseEnvironment:
class Wheel(Protocol):
location: str
@property
def is_concrete(self) -> bool:
raise NotImplementedError()
def as_zipfile(self) -> zipfile.ZipFile:
raise NotImplementedError()
@ -689,6 +717,10 @@ class FilesystemWheel(Wheel):
def __init__(self, location: str) -> None:
self.location = location
@property
def is_concrete(self) -> bool:
return True
def as_zipfile(self) -> zipfile.ZipFile:
return zipfile.ZipFile(self.location, allowZip64=True)
@ -698,5 +730,9 @@ class MemoryWheel(Wheel):
self.location = location
self.stream = stream
@property
def is_concrete(self) -> bool:
return False
def as_zipfile(self) -> zipfile.ZipFile:
return zipfile.ZipFile(self.stream, allowZip64=True)

View File

@ -98,16 +98,22 @@ class Distribution(BaseDistribution):
dist: importlib.metadata.Distribution,
info_location: Optional[BasePath],
installed_location: Optional[BasePath],
concrete: bool,
) -> None:
self._dist = dist
self._info_location = info_location
self._installed_location = installed_location
self._concrete = concrete
@property
def is_concrete(self) -> bool:
return self._concrete
@classmethod
def from_directory(cls, directory: str) -> BaseDistribution:
info_location = pathlib.Path(directory)
dist = importlib.metadata.Distribution.at(info_location)
return cls(dist, info_location, info_location.parent)
return cls(dist, info_location, info_location.parent, concrete=True)
@classmethod
def from_metadata_file_contents(
@ -124,7 +130,7 @@ class Distribution(BaseDistribution):
metadata_path.write_bytes(metadata_contents)
# Construct dist pointing to the newly created directory.
dist = importlib.metadata.Distribution.at(metadata_path.parent)
return cls(dist, metadata_path.parent, None)
return cls(dist, metadata_path.parent, None, concrete=False)
@classmethod
def from_wheel(cls, wheel: Wheel, name: str) -> BaseDistribution:
@ -135,7 +141,12 @@ class Distribution(BaseDistribution):
raise InvalidWheel(wheel.location, name) from e
except UnsupportedWheel as e:
raise UnsupportedWheel(f"{name} has an invalid wheel, {e}")
return cls(dist, dist.info_location, pathlib.PurePosixPath(wheel.location))
return cls(
dist,
dist.info_location,
pathlib.PurePosixPath(wheel.location),
concrete=wheel.is_concrete,
)
@property
def location(self) -> Optional[str]:

View File

@ -81,7 +81,7 @@ class _DistributionFinder:
installed_location: Optional[BasePath] = None
else:
installed_location = info_location.parent
yield Distribution(dist, info_location, installed_location)
yield Distribution(dist, info_location, installed_location, concrete=True)
def find_linked(self, location: str) -> Iterator[BaseDistribution]:
"""Read location in egg-link files and return distributions in there.
@ -105,7 +105,7 @@ class _DistributionFinder:
continue
target_location = str(path.joinpath(target_rel))
for dist, info_location in self._find_impl(target_location):
yield Distribution(dist, info_location, path)
yield Distribution(dist, info_location, path, concrete=True)
def _find_eggs_in_dir(self, location: str) -> Iterator[BaseDistribution]:
from pip._vendor.pkg_resources import find_distributions
@ -117,7 +117,7 @@ class _DistributionFinder:
if not entry.name.endswith(".egg"):
continue
for dist in find_distributions(entry.path):
yield legacy.Distribution(dist)
yield legacy.Distribution(dist, concrete=True)
def _find_eggs_in_zip(self, location: str) -> Iterator[BaseDistribution]:
from pip._vendor.pkg_resources import find_eggs_in_zip
@ -129,7 +129,7 @@ class _DistributionFinder:
except zipimport.ZipImportError:
return
for dist in find_eggs_in_zip(importer, location):
yield legacy.Distribution(dist)
yield legacy.Distribution(dist, concrete=True)
def find_eggs(self, location: str) -> Iterator[BaseDistribution]:
"""Find eggs in a location.

View File

@ -73,8 +73,13 @@ class InMemoryMetadata:
class Distribution(BaseDistribution):
def __init__(self, dist: pkg_resources.Distribution) -> None:
def __init__(self, dist: pkg_resources.Distribution, concrete: bool) -> None:
self._dist = dist
self._concrete = concrete
@property
def is_concrete(self) -> bool:
return self._concrete
@classmethod
def from_directory(cls, directory: str) -> BaseDistribution:
@ -94,7 +99,7 @@ class Distribution(BaseDistribution):
dist_name = os.path.splitext(dist_dir_name)[0].split("-")[0]
dist = dist_cls(base_dir, project_name=dist_name, metadata=metadata)
return cls(dist)
return cls(dist, concrete=True)
@classmethod
def from_metadata_file_contents(
@ -111,7 +116,7 @@ class Distribution(BaseDistribution):
metadata=InMemoryMetadata(metadata_dict, filename),
project_name=project_name,
)
return cls(dist)
return cls(dist, concrete=False)
@classmethod
def from_wheel(cls, wheel: Wheel, name: str) -> BaseDistribution:
@ -132,7 +137,7 @@ class Distribution(BaseDistribution):
metadata=InMemoryMetadata(metadata_dict, wheel.location),
project_name=name,
)
return cls(dist)
return cls(dist, concrete=wheel.is_concrete)
@property
def location(self) -> Optional[str]:
@ -241,7 +246,7 @@ class Environment(BaseEnvironment):
def _iter_distributions(self) -> Iterator[BaseDistribution]:
for dist in self._ws:
yield Distribution(dist)
yield Distribution(dist, concrete=True)
def _search_distribution(self, name: str) -> Optional[BaseDistribution]:
"""Find a distribution matching the ``name`` in the environment.

View File

@ -27,8 +27,4 @@ class InstallationCandidate(KeyBasedCompareMixin):
)
def __str__(self) -> str:
return "{!r} candidate (version {} at {})".format(
self.name,
self.version,
self.link,
)
return f"{self.name!r} candidate (version {self.version} at {self.link})"

View File

@ -31,9 +31,7 @@ def _get(
value = d[key]
if not isinstance(value, expected_type):
raise DirectUrlValidationError(
"{!r} has unexpected type for {} (expected {})".format(
value, key, expected_type
)
f"{value!r} has unexpected type for {key} (expected {expected_type})"
)
return value

View File

@ -33,9 +33,7 @@ class FormatControl:
return all(getattr(self, k) == getattr(other, k) for k in self.__slots__)
def __repr__(self) -> str:
return "{}({}, {})".format(
self.__class__.__name__, self.no_binary, self.only_binary
)
return f"{self.__class__.__name__}({self.no_binary}, {self.only_binary})"
@staticmethod
def handle_mutual_excludes(value: str, target: Set[str], other: Set[str]) -> None:

View File

@ -32,7 +32,7 @@ class InstallationReport:
"requested": ireq.user_supplied,
# PEP 566 json encoding for metadata
# https://www.python.org/dev/peps/pep-0566/#json-compatible-metadata
"metadata": ireq.get_dist().metadata_dict,
"metadata": ireq.cached_dist.metadata_dict,
}
if ireq.user_supplied and ireq.extras:
# For top level requirements, the list of requested extras, if any.

View File

@ -368,9 +368,7 @@ class Link(KeyBasedCompareMixin):
else:
rp = ""
if self.comes_from:
return "{} (from {}){}".format(
redact_auth_from_url(self._url), self.comes_from, rp
)
return f"{redact_auth_from_url(self._url)} (from {self.comes_from}){rp}"
else:
return redact_auth_from_url(str(self._url))

View File

@ -42,7 +42,7 @@ def _prepare_download(
logged_url = redact_auth_from_url(url)
if total_length:
logged_url = "{} ({})".format(logged_url, format_size(total_length))
logged_url = f"{logged_url} ({format_size(total_length)})"
if is_from_cache(resp):
logger.info("Using cached %s", logged_url)
@ -113,7 +113,7 @@ def _get_http_response_filename(resp: Response, link: Link) -> str:
def _http_get_download(session: PipSession, link: Link) -> Response:
target_url = link.url.split("#", 1)[0]
target_url = link.url_without_fragment
resp = session.get(target_url, headers=HEADERS, stream=True)
raise_for_status(resp)
return resp

View File

@ -13,6 +13,8 @@ from pip._internal.network.utils import raise_for_status
if TYPE_CHECKING:
from xmlrpc.client import _HostType, _Marshallable
from _typeshed import SizedBuffer
logger = logging.getLogger(__name__)
@ -33,7 +35,7 @@ class PipXmlrpcTransport(xmlrpc.client.Transport):
self,
host: "_HostType",
handler: str,
request_body: bytes,
request_body: "SizedBuffer",
verbose: bool = False,
) -> Tuple["_Marshallable", ...]:
assert isinstance(host, str)

View File

@ -9,7 +9,6 @@ from pip._vendor.packaging.specifiers import LegacySpecifier
from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
from pip._vendor.packaging.version import LegacyVersion
from pip._internal.distributions import make_distribution_for_install_requirement
from pip._internal.metadata import get_default_environment
from pip._internal.metadata.base import DistributionVersion
from pip._internal.req.req_install import InstallRequirement
@ -127,8 +126,8 @@ def _simulate_installation_of(
# Modify it as installing requirement_set would (assuming no errors)
for inst_req in to_install:
abstract_dist = make_distribution_for_install_requirement(inst_req)
dist = abstract_dist.get_metadata_distribution()
assert inst_req.is_concrete
dist = inst_req.cached_dist
name = dist.canonical_name
package_set[name] = PackageDetails(dist.version, list(dist.iter_dependencies()))

View File

@ -164,16 +164,14 @@ def message_about_scripts_not_on_PATH(scripts: Sequence[str]) -> Optional[str]:
for parent_dir, dir_scripts in warn_for.items():
sorted_scripts: List[str] = sorted(dir_scripts)
if len(sorted_scripts) == 1:
start_text = "script {} is".format(sorted_scripts[0])
start_text = f"script {sorted_scripts[0]} is"
else:
start_text = "scripts {} are".format(
", ".join(sorted_scripts[:-1]) + " and " + sorted_scripts[-1]
)
msg_lines.append(
"The {} installed in '{}' which is not on PATH.".format(
start_text, parent_dir
)
f"The {start_text} installed in '{parent_dir}' which is not on PATH."
)
last_line_fmt = (
@ -321,9 +319,7 @@ def get_console_script_specs(console: Dict[str, str]) -> List[str]:
scripts_to_generate.append("pip = " + pip_script)
if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall":
scripts_to_generate.append(
"pip{} = {}".format(sys.version_info[0], pip_script)
)
scripts_to_generate.append(f"pip{sys.version_info[0]} = {pip_script}")
scripts_to_generate.append(f"pip{get_major_minor_version()} = {pip_script}")
# Delete any other versioned pip entry points
@ -336,9 +332,7 @@ def get_console_script_specs(console: Dict[str, str]) -> List[str]:
scripts_to_generate.append("easy_install = " + easy_install_script)
scripts_to_generate.append(
"easy_install-{} = {}".format(
get_major_minor_version(), easy_install_script
)
f"easy_install-{get_major_minor_version()} = {easy_install_script}"
)
# Delete any other versioned easy_install entry points
easy_install_ep = [
@ -408,10 +402,10 @@ class ScriptFile:
class MissingCallableSuffix(InstallationError):
def __init__(self, entry_point: str) -> None:
super().__init__(
"Invalid script entry point: {} - A callable "
f"Invalid script entry point: {entry_point} - A callable "
"suffix is required. Cf https://packaging.python.org/"
"specifications/entry-points/#use-for-scripts for more "
"information.".format(entry_point)
"information."
)
@ -712,7 +706,7 @@ def req_error_context(req_description: str) -> Generator[None, None, None]:
try:
yield
except InstallationError as e:
message = "For req: {}. {}".format(req_description, e.args[0])
message = f"For req: {req_description}. {e.args[0]}"
raise InstallationError(message) from e

View File

@ -4,17 +4,22 @@
# The following comment should be removed at some point in the future.
# mypy: strict-optional=False
import gzip
import json
import mimetypes
import os
import shutil
from dataclasses import dataclass
from pathlib import Path
from typing import Dict, Iterable, List, Optional
from typing import Dict, Iterable, List, Optional, Tuple
from pip._vendor.packaging.utils import canonicalize_name
from pip._vendor.requests.exceptions import InvalidSchema
from pip._internal.cache import LinkMetadataCache, should_cache
from pip._internal.distributions import make_distribution_for_install_requirement
from pip._internal.distributions.installed import InstalledDistribution
from pip._internal.exceptions import (
CacheMetadataError,
DirectoryUrlHashUnsupported,
HashMismatch,
HashUnpinned,
@ -24,7 +29,11 @@ from pip._internal.exceptions import (
VcsHashUnsupported,
)
from pip._internal.index.package_finder import PackageFinder
from pip._internal.metadata import BaseDistribution, get_metadata_distribution
from pip._internal.metadata import (
BaseDistribution,
get_metadata_distribution,
serialize_metadata,
)
from pip._internal.models.direct_url import ArchiveInfo
from pip._internal.models.link import Link
from pip._internal.models.wheel import Wheel
@ -62,16 +71,17 @@ def _get_prepared_distribution(
finder: PackageFinder,
build_isolation: bool,
check_build_deps: bool,
) -> BaseDistribution:
) -> Tuple[bool, BaseDistribution]:
"""Prepare a distribution for installation."""
abstract_dist = make_distribution_for_install_requirement(req)
tracker_id = abstract_dist.build_tracker_id
if tracker_id is not None:
builds_metadata = tracker_id is not None
if builds_metadata:
with build_tracker.track(req, tracker_id):
abstract_dist.prepare_distribution_metadata(
finder, build_isolation, check_build_deps
)
return abstract_dist.get_metadata_distribution()
return (builds_metadata, abstract_dist.get_metadata_distribution())
def unpack_vcs_link(link: Link, location: str, verbosity: int) -> None:
@ -188,6 +198,8 @@ def _check_download_dir(
) -> Optional[str]:
"""Check download_dir for previously downloaded file with correct hash
If a correct file is found return its path else None
If a file is found at the given path, but with an invalid hash, the file is deleted.
"""
download_path = os.path.join(download_dir, link.filename)
@ -210,10 +222,49 @@ def _check_download_dir(
return download_path
@dataclass(frozen=True)
class CacheableDist:
metadata: str
filename: Path
canonical_name: str
@classmethod
def from_dist(cls, link: Link, dist: BaseDistribution) -> "CacheableDist":
"""Extract the serializable data necessary to generate a metadata-only dist."""
return cls(
metadata=serialize_metadata(dist.metadata),
filename=Path(link.filename),
canonical_name=dist.canonical_name,
)
def to_dist(self) -> BaseDistribution:
"""Return a metadata-only dist from the deserialized cache entry."""
return get_metadata_distribution(
metadata_contents=self.metadata.encode("utf-8"),
filename=str(self.filename),
canonical_name=self.canonical_name,
)
def to_json(self) -> Dict[str, str]:
return {
"metadata": self.metadata,
"filename": str(self.filename),
"canonical_name": self.canonical_name,
}
@classmethod
def from_json(cls, args: Dict[str, str]) -> "CacheableDist":
return cls(
metadata=args["metadata"],
filename=Path(args["filename"]),
canonical_name=args["canonical_name"],
)
class RequirementPreparer:
"""Prepares a Requirement"""
def __init__(
def __init__( # noqa: PLR0913
self,
build_dir: str,
download_dir: Optional[str],
@ -229,6 +280,7 @@ class RequirementPreparer:
lazy_wheel: bool,
verbosity: int,
legacy_resolver: bool,
metadata_cache: Optional[LinkMetadataCache] = None,
) -> None:
super().__init__()
@ -271,6 +323,8 @@ class RequirementPreparer:
# Previous "header" printed for a link-based InstallRequirement
self._previous_requirement_header = ("", "")
self._metadata_cache = metadata_cache
def _log_preparing_link(self, req: InstallRequirement) -> None:
"""Provide context for the requirement being prepared."""
if req.link.is_file and not req.is_wheel_from_cache:
@ -363,14 +417,81 @@ class RequirementPreparer:
)
return None
if self.require_hashes:
# Hash checking also means hashes are provided for all reqs, so no resolve
# is necessary and metadata-only fetching provides no speedup.
logger.debug(
"Metadata-only fetching is not used as hash checking is required",
)
return None
# Try PEP 658 metadata first, then fall back to lazy wheel if unavailable.
return self._fetch_metadata_using_link_data_attr(
req
) or self._fetch_metadata_using_lazy_wheel(req.link)
return (
self._fetch_cached_metadata(req)
or self._fetch_metadata_using_link_data_attr(req)
or self._fetch_metadata_using_lazy_wheel(req)
)
def _locate_metadata_cache_entry(self, link: Link) -> Optional[Path]:
"""If the metadata cache is active, generate a filesystem path from the hash of
the given Link."""
if self._metadata_cache is None:
return None
return self._metadata_cache.cache_path(link)
def _fetch_cached_metadata(
self, req: InstallRequirement
) -> Optional[BaseDistribution]:
cached_path = self._locate_metadata_cache_entry(req.link)
if cached_path is None:
return None
# Quietly continue if the cache entry does not exist.
if not os.path.isfile(cached_path):
logger.debug(
"no cached metadata for link %s at %s",
req.link,
cached_path,
)
return None
try:
with gzip.open(cached_path, mode="rt", encoding="utf-8") as f:
logger.debug(
"found cached metadata for link %s at %s", req.link, f.name
)
args = json.load(f)
cached_dist = CacheableDist.from_json(args)
return cached_dist.to_dist()
except Exception:
raise CacheMetadataError(req, "error reading cached metadata")
def _cache_metadata(
self,
req: InstallRequirement,
metadata_dist: BaseDistribution,
) -> None:
cached_path = self._locate_metadata_cache_entry(req.link)
if cached_path is None:
return
# The cache file exists already, so we have nothing to do.
if os.path.isfile(cached_path):
logger.debug(
"metadata for link %s is already cached at %s", req.link, cached_path
)
return
# The metadata cache is split across several subdirectories, so ensure the
# containing directory for the cache file exists before writing.
os.makedirs(str(cached_path.parent), exist_ok=True)
try:
cacheable_dist = CacheableDist.from_dist(req.link, metadata_dist)
args = cacheable_dist.to_json()
logger.debug("caching metadata for link %s at %s", req.link, cached_path)
with gzip.open(cached_path, mode="wt", encoding="utf-8") as f:
json.dump(args, f)
except Exception:
raise CacheMetadataError(req, "failed to serialize metadata")
def _fetch_metadata_using_link_data_attr(
self,
@ -388,6 +509,9 @@ class RequirementPreparer:
metadata_link,
)
# (2) Download the contents of the METADATA file, separate from the dist itself.
# NB: this request will hit the CacheControl HTTP cache, which will be very
# quick since the METADATA file is very small. Therefore, we can rely on
# HTTP caching instead of LinkMetadataCache.
metadata_file = get_http_url(
metadata_link,
self._download,
@ -415,33 +539,45 @@ class RequirementPreparer:
def _fetch_metadata_using_lazy_wheel(
self,
link: Link,
req: InstallRequirement,
) -> Optional[BaseDistribution]:
"""Fetch metadata using lazy wheel, if possible."""
# --use-feature=fast-deps must be provided.
if not self.use_lazy_wheel:
return None
if link.is_file or not link.is_wheel:
if req.link.is_file or not req.link.is_wheel:
logger.debug(
"Lazy wheel is not used as %r does not point to a remote wheel",
link,
req.link,
)
return None
wheel = Wheel(link.filename)
wheel = Wheel(req.link.filename)
name = canonicalize_name(wheel.name)
logger.info(
"Obtaining dependency information from %s %s",
name,
wheel.version,
)
url = link.url.split("#", 1)[0]
try:
return dist_from_wheel_url(name, url, self._session)
lazy_wheel_dist = dist_from_wheel_url(
name, req.link.url_without_fragment, self._session
)
except HTTPRangeRequestUnsupported:
logger.debug("%s does not support range requests", url)
logger.debug("%s does not support range requests", req.link)
return None
# If we've used the lazy wheel approach, then PEP 658 metadata is not available.
# If the wheel is very large (>1GB), then retrieving it from the CacheControl
# HTTP cache may take multiple seconds, even on a fast computer, and the
# preparer will unnecessarily copy the cached response to disk before deleting
# it at the end of the run. Caching the dist metadata in LinkMetadataCache means
# later pip executions can retrieve metadata within milliseconds and avoid
# thrashing the disk.
self._cache_metadata(req, lazy_wheel_dist)
return lazy_wheel_dist
def _complete_partial_requirements(
self,
partially_downloaded_reqs: Iterable[InstallRequirement],
@ -458,7 +594,21 @@ class RequirementPreparer:
links_to_fully_download: Dict[Link, InstallRequirement] = {}
for req in partially_downloaded_reqs:
assert req.link
links_to_fully_download[req.link] = req
# (1) File URLs don't need to be downloaded, so skip them.
if req.link.scheme == "file":
continue
# (2) If this is e.g. a git url, we don't know how to handle that in the
# BatchDownloader, so leave it for self._prepare_linked_requirement() at
# the end of this method, which knows how to handle any URL.
can_simply_download = True
try:
# This will raise InvalidSchema if our Session can't download it.
self._session.get_adapter(req.link.url)
except InvalidSchema:
can_simply_download = False
if can_simply_download:
links_to_fully_download[req.link] = req
batch_download = self._batch_download(
links_to_fully_download.keys(),
@ -518,41 +668,92 @@ class RequirementPreparer:
# The file is not available, attempt to fetch only metadata
metadata_dist = self._fetch_metadata_only(req)
if metadata_dist is not None:
req.needs_more_preparation = True
# These reqs now have the dependency information from the downloaded
# metadata, without having downloaded the actual dist at all.
req.cache_virtual_metadata_only_dist(metadata_dist)
return metadata_dist
# None of the optimizations worked, fully prepare the requirement
return self._prepare_linked_requirement(req, parallel_builds)
def prepare_linked_requirements_more(
self, reqs: Iterable[InstallRequirement], parallel_builds: bool = False
) -> None:
"""Prepare linked requirements more, if needed."""
reqs = [req for req in reqs if req.needs_more_preparation]
def _ensure_download_info(self, reqs: Iterable[InstallRequirement]) -> None:
"""
`pip install --report` extracts the download info from each requirement for its
JSON output, so we need to make sure every requirement has this before finishing
the resolve. But .download_info will only be populated by the point this method
is called for requirements already found in the wheel cache, so we need to
synthesize it for uncached results. Luckily, a DirectUrl can be parsed directly
from a url without any other context. However, this also means the download info
will only contain a hash if the link itself declares the hash.
"""
for req in reqs:
self._populate_download_info(req)
def _force_fully_prepared(
self, reqs: Iterable[InstallRequirement], require_concrete: bool
) -> None:
"""
The legacy resolver seems to prepare requirements differently that can leave
them half-done in certain code paths. I'm not quite sure how it's doing things,
but at least we can do this to make sure they do things right.
"""
for req in reqs:
req.prepared = True
if require_concrete:
assert req.is_concrete
def finalize_linked_requirements(
self,
reqs: Iterable[InstallRequirement],
hydrate_virtual_reqs: bool,
parallel_builds: bool = False,
) -> None:
"""Prepare linked requirements more, if needed.
Neighboring .metadata files as per PEP 658 or lazy wheels via fast-deps will be
preferred to extract metadata from any concrete requirement (one that has been
mapped to a Link) without downloading the underlying wheel or sdist. When ``pip
install --dry-run`` is called, we want to avoid ever downloading the underlying
dist, but we still need to provide all of the results that pip commands expect
from the typical resolve process.
Those expectations vary, but one distinction lies in whether the command needs
an actual physical dist somewhere on the filesystem, or just the metadata about
it from the resolver (as in ``pip install --report``). If the command requires
actual physical filesystem locations for the resolved dists, it must call this
method with ``hydrate_virtual_reqs=True`` to fully download anything
that remains.
"""
if not hydrate_virtual_reqs:
self._ensure_download_info(reqs)
self._force_fully_prepared(reqs, require_concrete=False)
return
partially_downloaded_reqs: List[InstallRequirement] = []
for req in reqs:
if req.is_concrete:
continue
# Determine if any of these requirements were already downloaded.
if self.download_dir is not None and req.link.is_wheel:
hashes = self._get_linked_req_hashes(req)
file_path = _check_download_dir(req.link, self.download_dir, hashes)
# If the file is there, but doesn't match the hash, delete it and print
# a warning. We will be downloading it again via
# partially_downloaded_reqs.
file_path = _check_download_dir(
req.link, self.download_dir, hashes, warn_on_hash_mismatch=True
)
if file_path is not None:
# If the hash does match, then we still need to generate a concrete
# dist, but we don't have to download the wheel again.
self._downloaded[req.link.url] = file_path
req.needs_more_preparation = False
partially_downloaded_reqs.append(req)
# Prepare requirements we found were already downloaded for some
# reason. The other downloads will be completed separately.
partially_downloaded_reqs: List[InstallRequirement] = []
for req in reqs:
if req.needs_more_preparation:
partially_downloaded_reqs.append(req)
else:
self._prepare_linked_requirement(req, parallel_builds)
# TODO: separate this part out from RequirementPreparer when the v1
# resolver can be removed!
self._complete_partial_requirements(
partially_downloaded_reqs,
parallel_builds=parallel_builds,
)
# NB: Must call this method before returning!
self._force_fully_prepared(reqs, require_concrete=True)
def _prepare_linked_requirement(
self, req: InstallRequirement, parallel_builds: bool
@ -603,8 +804,8 @@ class RequirementPreparer:
)
except NetworkConnectionError as exc:
raise InstallationError(
"Could not install requirement {} because of HTTP "
"error {} for URL {}".format(req, exc, link)
f"Could not install requirement {req} because of HTTP "
f"error {exc} for URL {link}"
)
else:
file_path = self._downloaded[link.url]
@ -612,12 +813,31 @@ class RequirementPreparer:
hashes.check_against_path(file_path)
local_file = File(file_path, content_type=None)
# For use in later processing,
# preserve the file path on the requirement.
if local_file:
req.local_file_path = local_file.path
self._populate_download_info(req)
(builds_metadata, dist) = _get_prepared_distribution(
req,
self.build_tracker,
self.finder,
self.build_isolation,
self.check_build_deps,
)
if builds_metadata and should_cache(req):
self._cache_metadata(req, dist)
return dist
def _populate_download_info(self, req: InstallRequirement) -> None:
# If download_info is set, we got it from the wheel cache.
if req.download_info is None:
# Editables don't go through this function (see
# prepare_editable_requirement).
assert not req.editable
req.download_info = direct_url_from_link(link, req.source_dir)
req.download_info = direct_url_from_link(req.link, req.source_dir)
# Make sure we have a hash in download_info. If we got it as part of the
# URL, it will have been verified and we can rely on it. Otherwise we
# compute it from the downloaded file.
@ -625,30 +845,17 @@ class RequirementPreparer:
if (
isinstance(req.download_info.info, ArchiveInfo)
and not req.download_info.info.hashes
and local_file
and req.local_file_path
):
hash = hash_file(local_file.path)[0].hexdigest()
hash = hash_file(req.local_file_path)[0].hexdigest()
# We populate info.hash for backward compatibility.
# This will automatically populate info.hashes.
req.download_info.info.hash = f"sha256={hash}"
# For use in later processing,
# preserve the file path on the requirement.
if local_file:
req.local_file_path = local_file.path
dist = _get_prepared_distribution(
req,
self.build_tracker,
self.finder,
self.build_isolation,
self.check_build_deps,
)
return dist
def save_linked_requirement(self, req: InstallRequirement) -> None:
assert self.download_dir is not None
assert req.link is not None
assert req.is_concrete
link = req.link
if link.is_vcs or (link.is_existing_dir() and req.editable):
# Make a .zip of the source_dir we already created.
@ -684,16 +891,16 @@ class RequirementPreparer:
with indent_log():
if self.require_hashes:
raise InstallationError(
"The editable requirement {} cannot be installed when "
f"The editable requirement {req} cannot be installed when "
"requiring hashes, because there is no single file to "
"hash.".format(req)
"hash."
)
req.ensure_has_source_dir(self.src_dir)
req.update_editable()
assert req.source_dir
req.download_info = direct_url_for_editable(req.unpacked_source_directory)
dist = _get_prepared_distribution(
(_, dist) = _get_prepared_distribution(
req,
self.build_tracker,
self.finder,
@ -703,6 +910,8 @@ class RequirementPreparer:
req.check_if_exists(self.use_user_site)
# This should already have been populated by the preparation of the source dist.
assert req.is_concrete
return dist
def prepare_installed_requirement(
@ -714,7 +923,7 @@ class RequirementPreparer:
assert req.satisfied_by, "req should have been satisfied but isn't"
assert skip_reason is not None, (
"did not get skip reason skipped but req.satisfied_by "
"is set to {}".format(req.satisfied_by)
f"is set to {req.satisfied_by}"
)
logger.info(
"Requirement %s: %s (%s)", skip_reason, req, req.satisfied_by.version
@ -727,4 +936,13 @@ class RequirementPreparer:
"completely repeatable environment, install into an "
"empty virtualenv."
)
return InstalledDistribution(req).get_metadata_distribution()
(_, dist) = _get_prepared_distribution(
req,
self.build_tracker,
self.finder,
self.build_isolation,
self.check_build_deps,
)
assert req.is_concrete
return dist

View File

@ -462,7 +462,7 @@ def install_req_from_req_string(
raise InstallationError(
"Packages installed from PyPI cannot depend on packages "
"which are not also hosted on PyPI.\n"
"{} depends on {} ".format(comes_from.name, req)
f"{comes_from.name} depends on {req} "
)
return InstallRequirement(

View File

@ -23,10 +23,7 @@ from pip._internal.locations import get_scheme
from pip._internal.metadata import (
BaseDistribution,
get_default_environment,
get_directory_distribution,
get_wheel_distribution,
)
from pip._internal.metadata.base import FilesystemWheel
from pip._internal.models.direct_url import DirectUrl
from pip._internal.models.link import Link
from pip._internal.operations.build.metadata import generate_metadata
@ -88,7 +85,7 @@ class InstallRequirement:
permit_editable_wheels: bool = False,
) -> None:
assert req is None or isinstance(req, Requirement), req
self.req = req
self._req = req
self.comes_from = comes_from
self.constraint = constraint
self.editable = editable
@ -150,6 +147,7 @@ class InstallRequirement:
self.hash_options = hash_options if hash_options else {}
self.config_settings = config_settings
# Set to True after successful preparation of this requirement
# TODO: this is only used in the legacy resolver: remove this!
self.prepared = False
# User supplied requirement are explicitly requested for installation
# by the user via CLI arguments or requirements files, as opposed to,
@ -181,17 +179,34 @@ class InstallRequirement:
# but after loading this flag should be treated as read only.
self.use_pep517 = use_pep517
# This requirement needs more preparation before it can be built
self.needs_more_preparation = False
# When a dist is computed for this requirement, cache it here so it's visible
# everywhere within pip and isn't computed more than once. This may be
# a "virtual" dist without a physical location on the filesystem, or
# a "concrete" dist which has been fully downloaded.
self._cached_dist: Optional[BaseDistribution] = None
# Strictly used in testing: allow calling .cache_concrete_dist() twice.
self.allow_concrete_dist_overwrite = False
# This requirement needs to be unpacked before it can be installed.
self._archive_source: Optional[Path] = None
@property
def req(self) -> Optional[Requirement]:
"""Calculate a requirement from the cached dist if necessary."""
if self._req is not None:
return self._req
if self._cached_dist is not None:
name = self._cached_dist.canonical_name
version = str(self._cached_dist.version)
self._req = Requirement(f"{name}=={version}")
return self._req
return None
def __str__(self) -> str:
if self.req:
s = redact_auth_from_requirement(self.req)
if self.link:
s += " from {}".format(redact_auth_from_url(self.link.url))
s += f" from {redact_auth_from_url(self.link.url)}"
elif self.link:
s = redact_auth_from_url(self.link.url)
else:
@ -221,7 +236,7 @@ class InstallRequirement:
attributes = vars(self)
names = sorted(attributes)
state = ("{}={!r}".format(attr, attributes[attr]) for attr in sorted(names))
state = (f"{attr}={attributes[attr]!r}" for attr in sorted(names))
return "<{name} object: {{{state}}}>".format(
name=self.__class__.__name__,
state=", ".join(state),
@ -234,7 +249,7 @@ class InstallRequirement:
return None
return self.req.name
@functools.lru_cache() # use cached_property in python 3.8+
@functools.lru_cache(maxsize=None) # TODO: use cached_property in python 3.8+
def supports_pyproject_editable(self) -> bool:
if not self.use_pep517:
return False
@ -380,7 +395,7 @@ class InstallRequirement:
def _set_requirement(self) -> None:
"""Set requirement after generating metadata."""
assert self.req is None
assert self._req is None
assert self.metadata is not None
assert self.source_dir is not None
@ -390,7 +405,7 @@ class InstallRequirement:
else:
op = "==="
self.req = Requirement(
self._req = Requirement(
"".join(
[
self.metadata["Name"],
@ -416,7 +431,7 @@ class InstallRequirement:
metadata_name,
self.name,
)
self.req = Requirement(metadata_name)
self._req = Requirement(metadata_name)
def check_if_exists(self, use_user_site: bool) -> None:
"""Find an installed distribution that satisfies or conflicts
@ -552,11 +567,11 @@ class InstallRequirement:
f"Consider using a build backend that supports PEP 660."
)
def prepare_metadata(self) -> None:
def prepare_metadata_directory(self) -> None:
"""Ensure that project metadata is available.
Under PEP 517 and PEP 660, call the backend hook to prepare the metadata.
Under legacy processing, call setup.py egg-info.
Under PEP 517 and PEP 660, call the backend hook to prepare the metadata
directory. Under legacy processing, call setup.py egg-info.
"""
assert self.source_dir, f"No source dir for {self}"
details = self.name or f"from {self.link}"
@ -588,6 +603,8 @@ class InstallRequirement:
details=details,
)
def validate_sdist_metadata(self) -> None:
"""Ensure that we have a dist, and ensure it corresponds to expectations."""
# Act on the newly generated metadata, based on the name and version.
if not self.name:
self._set_requirement()
@ -598,24 +615,59 @@ class InstallRequirement:
@property
def metadata(self) -> Any:
# TODO: use cached_property in python 3.8+
if not hasattr(self, "_metadata"):
self._metadata = self.get_dist().metadata
self._metadata = self.cached_dist.metadata
return self._metadata
def get_dist(self) -> BaseDistribution:
if self.metadata_directory:
return get_directory_distribution(self.metadata_directory)
elif self.local_file_path and self.is_wheel:
assert self.req is not None
return get_wheel_distribution(
FilesystemWheel(self.local_file_path),
canonicalize_name(self.req.name),
@property
def cached_dist(self) -> BaseDistribution:
"""Retrieve the dist resolved from this requirement.
:raises AssertionError: if the resolver has not yet been executed.
"""
if self._cached_dist is None:
raise AssertionError(
f"InstallRequirement {self} has no dist; "
"ensure the resolver has been executed"
)
raise AssertionError(
f"InstallRequirement {self} has no metadata directory and no wheel: "
f"can't make a distribution."
)
return self._cached_dist
def cache_virtual_metadata_only_dist(self, dist: BaseDistribution) -> None:
"""Associate a "virtual" metadata-only dist to this requirement.
This dist cannot be installed, but it can be used to complete the resolve
process.
:raises AssertionError: if a dist has already been associated.
:raises AssertionError: if the provided dist is "concrete", i.e. exists
somewhere on the filesystem.
"""
assert self._cached_dist is None, self
assert not dist.is_concrete, dist
self._cached_dist = dist
def cache_concrete_dist(self, dist: BaseDistribution) -> None:
"""Associate a "concrete" dist to this requirement.
A concrete dist exists somewhere on the filesystem and can be installed.
:raises AssertionError: if a concrete dist has already been associated.
:raises AssertionError: if the provided dist is not concrete.
"""
if self._cached_dist is not None:
# If we set a dist twice for the same requirement, we must be hydrating
# a concrete dist for what was previously virtual. This will occur in the
# case of `install --dry-run` when PEP 658 metadata is available.
if not self.allow_concrete_dist_overwrite:
assert not self._cached_dist.is_concrete
assert dist.is_concrete
self._cached_dist = dist
@property
def is_concrete(self) -> bool:
return self._cached_dist is not None and self._cached_dist.is_concrete
def assert_source_matches_version(self) -> None:
assert self.source_dir, f"No source dir for {self}"
@ -754,8 +806,8 @@ class InstallRequirement:
if os.path.exists(archive_path):
response = ask_path_exists(
"The file {} exists. (i)gnore, (w)ipe, "
"(b)ackup, (a)bort ".format(display_path(archive_path)),
f"The file {display_path(archive_path)} exists. (i)gnore, (w)ipe, "
"(b)ackup, (a)bort ",
("i", "w", "b", "a"),
)
if response == "i":

View File

@ -46,7 +46,7 @@ class RequirementSet:
self.unnamed_requirements.append(install_req)
def add_named_requirement(self, install_req: InstallRequirement) -> None:
assert install_req.name
assert install_req.name, install_req
project_name = canonicalize_name(install_req.name)
self.requirements[project_name] = install_req
@ -86,7 +86,7 @@ class RequirementSet:
def warn_legacy_versions_and_specifiers(self) -> None:
for req in self.requirements_to_install:
version = req.get_dist().version
version = req.cached_dist.version
if isinstance(version, LegacyVersion):
deprecated(
reason=(
@ -101,7 +101,7 @@ class RequirementSet:
issue=12063,
gone_in="24.0",
)
for dep in req.get_dist().iter_dependencies():
for dep in req.cached_dist.iter_dependencies():
if any(isinstance(spec, LegacySpecifier) for spec in dep.specifier):
deprecated(
reason=(

View File

@ -71,16 +71,16 @@ def uninstallation_paths(dist: BaseDistribution) -> Generator[str, None, None]:
entries = dist.iter_declared_entries()
if entries is None:
msg = "Cannot uninstall {dist}, RECORD file not found.".format(dist=dist)
msg = f"Cannot uninstall {dist}, RECORD file not found."
installer = dist.installer
if not installer or installer == "pip":
dep = "{}=={}".format(dist.raw_name, dist.version)
dep = f"{dist.raw_name}=={dist.version}"
msg += (
" You might be able to recover from this via: "
"'pip install --force-reinstall --no-deps {}'.".format(dep)
f"'pip install --force-reinstall --no-deps {dep}'."
)
else:
msg += " Hint: The package was installed by {}.".format(installer)
msg += f" Hint: The package was installed by {installer}."
raise UninstallationError(msg)
for entry in entries:

View File

@ -231,9 +231,7 @@ class Resolver(BaseResolver):
tags = compatibility_tags.get_supported()
if requirement_set.check_supported_wheels and not wheel.supported(tags):
raise InstallationError(
"{} is not a supported wheel on this platform.".format(
wheel.filename
)
f"{wheel.filename} is not a supported wheel on this platform."
)
# This next bit is really a sanity check.
@ -287,9 +285,9 @@ class Resolver(BaseResolver):
)
if does_not_satisfy_constraint:
raise InstallationError(
"Could not satisfy constraints for '{}': "
f"Could not satisfy constraints for '{install_req.name}': "
"installation from path or url cannot be "
"constrained to a version".format(install_req.name)
"constrained to a version"
)
# If we're now installing a constraint, mark the existing
# object for real installation.
@ -398,9 +396,9 @@ class Resolver(BaseResolver):
# "UnicodeEncodeError: 'ascii' codec can't encode character"
# in Python 2 when the reason contains non-ascii characters.
"The candidate selected for download or install is a "
"yanked version: {candidate}\n"
"Reason for being yanked: {reason}"
).format(candidate=best_candidate, reason=reason)
f"yanked version: {best_candidate}\n"
f"Reason for being yanked: {reason}"
)
logger.warning(msg)
return link

View File

@ -159,10 +159,7 @@ class _InstallRequirementBackedCandidate(Candidate):
return f"{self.name} {self.version}"
def __repr__(self) -> str:
return "{class_name}({link!r})".format(
class_name=self.__class__.__name__,
link=str(self._link),
)
return f"{self.__class__.__name__}({str(self._link)!r})"
def __hash__(self) -> int:
return hash((self.__class__, self._link))
@ -354,10 +351,7 @@ class AlreadyInstalledCandidate(Candidate):
return str(self.dist)
def __repr__(self) -> str:
return "{class_name}({distribution!r})".format(
class_name=self.__class__.__name__,
distribution=self.dist,
)
return f"{self.__class__.__name__}({self.dist!r})"
def __hash__(self) -> int:
return hash((self.__class__, self.name, self.version))
@ -455,11 +449,7 @@ class ExtrasCandidate(Candidate):
return "{}[{}] {}".format(name, ",".join(self.extras), rest)
def __repr__(self) -> str:
return "{class_name}(base={base!r}, extras={extras!r})".format(
class_name=self.__class__.__name__,
base=self.base,
extras=self.extras,
)
return f"{self.__class__.__name__}(base={self.base!r}, extras={self.extras!r})"
def __hash__(self) -> int:
return hash((self.base, self.extras))

View File

@ -753,8 +753,8 @@ class Factory:
info = "the requested packages"
msg = (
"Cannot install {} because these package versions "
"have conflicting dependencies.".format(info)
f"Cannot install {info} because these package versions "
"have conflicting dependencies."
)
logger.critical(msg)
msg = "\nThe conflict is caused by:"

View File

@ -15,10 +15,7 @@ class ExplicitRequirement(Requirement):
return str(self.candidate)
def __repr__(self) -> str:
return "{class_name}({candidate!r})".format(
class_name=self.__class__.__name__,
candidate=self.candidate,
)
return f"{self.__class__.__name__}({self.candidate!r})"
@property
def project_name(self) -> NormalizedName:
@ -50,10 +47,7 @@ class SpecifierRequirement(Requirement):
return str(self._ireq.req)
def __repr__(self) -> str:
return "{class_name}({requirement!r})".format(
class_name=self.__class__.__name__,
requirement=str(self._ireq.req),
)
return f"{self.__class__.__name__}({str(self._ireq.req)!r})"
@property
def project_name(self) -> NormalizedName:
@ -116,10 +110,7 @@ class RequiresPythonRequirement(Requirement):
return f"Python {self.specifier}"
def __repr__(self) -> str:
return "{class_name}({specifier!r})".format(
class_name=self.__class__.__name__,
specifier=str(self.specifier),
)
return f"{self.__class__.__name__}({str(self.specifier)!r})"
@property
def project_name(self) -> NormalizedName:
@ -155,10 +146,7 @@ class UnsatisfiableRequirement(Requirement):
return f"{self._name} (unavailable)"
def __repr__(self) -> str:
return "{class_name}({name!r})".format(
class_name=self.__class__.__name__,
name=str(self._name),
)
return f"{self.__class__.__name__}({str(self._name)!r})"
@property
def project_name(self) -> NormalizedName:

View File

@ -175,11 +175,6 @@ class Resolver(BaseResolver):
req_set.add_named_requirement(ireq)
reqs = req_set.all_requirements
self.factory.preparer.prepare_linked_requirements_more(reqs)
for req in reqs:
req.prepared = True
req.needs_more_preparation = False
return req_set
def get_installation_order(

View File

@ -77,11 +77,7 @@ def get_pip_version() -> str:
pip_pkg_dir = os.path.join(os.path.dirname(__file__), "..", "..")
pip_pkg_dir = os.path.abspath(pip_pkg_dir)
return "pip {} from {} (python {})".format(
__version__,
pip_pkg_dir,
get_major_minor_version(),
)
return f"pip {__version__} from {pip_pkg_dir} (python {get_major_minor_version()})"
def normalize_version_info(py_version_info: Tuple[int, ...]) -> Tuple[int, int, int]:
@ -145,9 +141,9 @@ def rmtree(
)
if sys.version_info >= (3, 12):
# See https://docs.python.org/3.12/whatsnew/3.12.html#shutil.
shutil.rmtree(dir, onexc=handler)
shutil.rmtree(dir, onexc=handler) # type: ignore
else:
shutil.rmtree(dir, onerror=handler)
shutil.rmtree(dir, onerror=handler) # type: ignore
def _onerror_ignore(*_args: Any) -> None:
@ -279,13 +275,13 @@ def strtobool(val: str) -> int:
def format_size(bytes: float) -> str:
if bytes > 1000 * 1000:
return "{:.1f} MB".format(bytes / 1000.0 / 1000)
return f"{bytes / 1000.0 / 1000:.1f} MB"
elif bytes > 10 * 1000:
return "{} kB".format(int(bytes / 1000))
return f"{int(bytes / 1000)} kB"
elif bytes > 1000:
return "{:.1f} kB".format(bytes / 1000.0)
return f"{bytes / 1000.0:.1f} kB"
else:
return "{} bytes".format(int(bytes))
return f"{int(bytes)} bytes"
def tabulate(rows: Iterable[Iterable[Any]]) -> Tuple[List[str], List[int]]:
@ -522,9 +518,7 @@ def redact_netloc(netloc: str) -> str:
else:
user = urllib.parse.quote(user)
password = ":****"
return "{user}{password}@{netloc}".format(
user=user, password=password, netloc=netloc
)
return f"{user}{password}@{netloc}"
def _transform_url(
@ -592,7 +586,7 @@ class HiddenText:
self.redacted = redacted
def __repr__(self) -> str:
return "<HiddenText {!r}>".format(str(self))
return f"<HiddenText {str(self)!r}>"
def __str__(self) -> str:
return self.redacted

View File

@ -28,7 +28,7 @@ def parse_wheel(wheel_zip: ZipFile, name: str) -> Tuple[str, Message]:
metadata = wheel_metadata(wheel_zip, info_dir)
version = wheel_version(metadata)
except UnsupportedWheel as e:
raise UnsupportedWheel("{} has an invalid wheel, {}".format(name, str(e)))
raise UnsupportedWheel(f"{name} has an invalid wheel, {str(e)}")
check_compatibility(version, name)
@ -60,9 +60,7 @@ def wheel_dist_info_dir(source: ZipFile, name: str) -> str:
canonical_name = canonicalize_name(name)
if not info_dir_name.startswith(canonical_name):
raise UnsupportedWheel(
".dist-info directory {!r} does not start with {!r}".format(
info_dir, canonical_name
)
f".dist-info directory {info_dir!r} does not start with {canonical_name!r}"
)
return info_dir

View File

@ -405,9 +405,9 @@ class VersionControl:
scheme, netloc, path, query, frag = urllib.parse.urlsplit(url)
if "+" not in scheme:
raise ValueError(
"Sorry, {!r} is a malformed VCS url. "
f"Sorry, {url!r} is a malformed VCS url. "
"The format is <vcs>+<protocol>://<url>, "
"e.g. svn+http://myrepo/svn/MyApp#egg=MyApp".format(url)
"e.g. svn+http://myrepo/svn/MyApp#egg=MyApp"
)
# Remove the vcs prefix.
scheme = scheme.split("+", 1)[1]
@ -417,9 +417,9 @@ class VersionControl:
path, rev = path.rsplit("@", 1)
if not rev:
raise InstallationError(
"The URL {!r} has an empty revision (after @) "
f"The URL {url!r} has an empty revision (after @) "
"which is not supported. Include a revision after @ "
"or remove @ from the URL.".format(url)
"or remove @ from the URL."
)
url = urllib.parse.urlunsplit((scheme, netloc, path, query, ""))
return url, rev, user_pass
@ -566,7 +566,7 @@ class VersionControl:
self.name,
url,
)
response = ask_path_exists("What to do? {}".format(prompt[0]), prompt[1])
response = ask_path_exists(f"What to do? {prompt[0]}", prompt[1])
if response == "a":
sys.exit(-1)

View File

@ -3,7 +3,6 @@
import logging
import os.path
import re
import shutil
from typing import Iterable, List, Optional, Tuple
@ -25,23 +24,12 @@ from pip._internal.utils.setuptools_build import make_setuptools_clean_args
from pip._internal.utils.subprocess import call_subprocess
from pip._internal.utils.temp_dir import TempDirectory
from pip._internal.utils.urls import path_to_url
from pip._internal.vcs import vcs
logger = logging.getLogger(__name__)
_egg_info_re = re.compile(r"([a-z0-9_.]+)-([a-z0-9_.!+-]+)", re.IGNORECASE)
BuildResult = Tuple[List[InstallRequirement], List[InstallRequirement]]
def _contains_egg_info(s: str) -> bool:
"""Determine whether the string looks like an egg_info.
:param s: The string to parse. E.g. foo-2.1
"""
return bool(_egg_info_re.search(s))
def _should_build(
req: InstallRequirement,
need_wheel: bool,
@ -87,68 +75,20 @@ def should_build_for_install_command(
return _should_build(req, need_wheel=False)
def _should_cache(
req: InstallRequirement,
) -> Optional[bool]:
"""
Return whether a built InstallRequirement can be stored in the persistent
wheel cache, assuming the wheel cache is available, and _should_build()
has determined a wheel needs to be built.
"""
if req.editable or not req.source_dir:
# never cache editable requirements
return False
if req.link and req.link.is_vcs:
# VCS checkout. Do not cache
# unless it points to an immutable commit hash.
assert not req.editable
assert req.source_dir
vcs_backend = vcs.get_backend_for_scheme(req.link.scheme)
assert vcs_backend
if vcs_backend.is_immutable_rev_checkout(req.link.url, req.source_dir):
return True
return False
assert req.link
base, ext = req.link.splitext()
if _contains_egg_info(base):
return True
# Otherwise, do not cache.
return False
def _get_cache_dir(
req: InstallRequirement,
wheel_cache: WheelCache,
) -> str:
"""Return the persistent or temporary cache directory where the built
wheel need to be stored.
"""
cache_available = bool(wheel_cache.cache_dir)
assert req.link
if cache_available and _should_cache(req):
cache_dir = wheel_cache.get_path_for_link(req.link)
else:
cache_dir = wheel_cache.get_ephem_path_for_link(req.link)
return cache_dir
def _verify_one(req: InstallRequirement, wheel_path: str) -> None:
canonical_name = canonicalize_name(req.name or "")
w = Wheel(os.path.basename(wheel_path))
if canonicalize_name(w.name) != canonical_name:
raise InvalidWheelFilename(
"Wheel has unexpected file name: expected {!r}, "
"got {!r}".format(canonical_name, w.name),
f"Wheel has unexpected file name: expected {canonical_name!r}, "
f"got {w.name!r}",
)
dist = get_wheel_distribution(FilesystemWheel(wheel_path), canonical_name)
dist_verstr = str(dist.version)
if canonicalize_version(dist_verstr) != canonicalize_version(w.version):
raise InvalidWheelFilename(
"Wheel has unexpected file name: expected {!r}, "
"got {!r}".format(dist_verstr, w.version),
f"Wheel has unexpected file name: expected {dist_verstr!r}, "
f"got {w.version!r}",
)
metadata_version_value = dist.metadata_version
if metadata_version_value is None:
@ -160,8 +100,7 @@ def _verify_one(req: InstallRequirement, wheel_path: str) -> None:
raise UnsupportedWheel(msg)
if metadata_version >= Version("1.2") and not isinstance(dist.version, Version):
raise UnsupportedWheel(
"Metadata 1.2 mandates PEP 440 version, "
"but {!r} is not".format(dist_verstr)
f"Metadata 1.2 mandates PEP 440 version, but {dist_verstr!r} is not"
)
@ -316,7 +255,7 @@ def build(
build_successes, build_failures = [], []
for req in requirements:
assert req.name
cache_dir = _get_cache_dir(req, wheel_cache)
cache_dir = wheel_cache.resolve_cache_dir(req)
wheel_file = _build_one(
req,
cache_dir,

View File

@ -14,6 +14,7 @@ from hashlib import sha256
from pathlib import Path
from textwrap import dedent
from typing import (
TYPE_CHECKING,
Any,
AnyStr,
Callable,
@ -58,6 +59,9 @@ from tests.lib import (
from tests.lib.server import MockServer, make_mock_server
from tests.lib.venv import VirtualEnvironment, VirtualEnvironmentType
if TYPE_CHECKING:
from pip._vendor.typing_extensions import Self
def pytest_addoption(parser: Parser) -> None:
parser.addoption(
@ -141,7 +145,7 @@ def pytest_collection_modifyitems(config: Config, items: List[pytest.Function])
if "script" in item.fixturenames:
raise RuntimeError(
"Cannot use the ``script`` funcarg in a unit test: "
"(filename = {}, item = {})".format(module_path, item)
f"(filename = {module_path}, item = {item})"
)
else:
raise RuntimeError(f"Unknown test type (filename = {module_path})")
@ -705,6 +709,9 @@ class FakePackage:
requires_dist: Tuple[str, ...] = ()
# This will override the Name specified in the actual dist's METADATA.
metadata_name: Optional[str] = None
# Whether to delete the file this points to, which causes any attempt to fetch this
# package to fail unless it is processed as a metadata-only dist.
delete_linked_file: bool = False
def metadata_filename(self) -> str:
"""This is specified by PEP 658."""
@ -794,6 +801,27 @@ def fake_packages() -> Dict[str, List[FakePackage]]:
("simple==1.0",),
),
],
"complex-dist": [
FakePackage(
"complex-dist",
"0.1",
"complex_dist-0.1-py2.py3-none-any.whl",
MetadataKind.Unhashed,
# Validate that the wheel isn't fetched if metadata is available and
# --dry-run is on, when the metadata presents no hash itself.
delete_linked_file=True,
),
],
"corruptwheel": [
FakePackage(
"corruptwheel",
"1.0",
"corruptwheel-1.0-py2.py3-none-any.whl",
# Validate that the wheel isn't fetched if metadata is available and
# --dry-run is on, when the metadata *does* present a hash.
MetadataKind.Sha256,
),
],
"has-script": [
# Ensure we check PEP 658 metadata hashing errors for wheel files.
FakePackage(
@ -879,10 +907,10 @@ def html_index_for_packages(
f' <a href="{package_link.filename}" {package_link.generate_additional_tag()}>{package_link.filename}</a><br/>' # noqa: E501
)
# (3.2) Copy over the corresponding file in `shared_data.packages`.
shutil.copy(
shared_data.packages / package_link.filename,
pkg_subdir / package_link.filename,
)
cached_file = shared_data.packages / package_link.filename
new_file = pkg_subdir / package_link.filename
if not package_link.delete_linked_file:
shutil.copy(cached_file, new_file)
# (3.3) Write a metadata file, if applicable.
if package_link.metadata != MetadataKind.NoFile:
with open(pkg_subdir / package_link.metadata_filename(), "wb") as f:
@ -941,7 +969,7 @@ def html_index_with_onetime_server(
"""
class InDirectoryServer(http.server.ThreadingHTTPServer):
def finish_request(self, request: Any, client_address: Any) -> None:
def finish_request(self: "Self", request: Any, client_address: Any) -> None:
self.RequestHandlerClass(
request,
client_address,

View File

@ -119,7 +119,7 @@ def test_check_complicated_name_missing(script: PipTestEnvironment) -> None:
# Without dependency
result = script.pip("install", "--no-index", package_a_path, "--no-deps")
assert "Successfully installed package-A-1.0" in result.stdout, str(result)
assert "Successfully installed package-a-1.0" in result.stdout, str(result)
result = script.pip("check", expect_error=True)
expected_lines = ("package-a 1.0 requires dependency-b, which is not installed.",)
@ -142,7 +142,7 @@ def test_check_complicated_name_broken(script: PipTestEnvironment) -> None:
# With broken dependency
result = script.pip("install", "--no-index", package_a_path, "--no-deps")
assert "Successfully installed package-A-1.0" in result.stdout, str(result)
assert "Successfully installed package-a-1.0" in result.stdout, str(result)
result = script.pip(
"install",
@ -175,7 +175,7 @@ def test_check_complicated_name_clean(script: PipTestEnvironment) -> None:
)
result = script.pip("install", "--no-index", package_a_path, "--no-deps")
assert "Successfully installed package-A-1.0" in result.stdout, str(result)
assert "Successfully installed package-a-1.0" in result.stdout, str(result)
result = script.pip(
"install",
@ -203,7 +203,7 @@ def test_check_considers_conditional_reqs(script: PipTestEnvironment) -> None:
)
result = script.pip("install", "--no-index", package_a_path, "--no-deps")
assert "Successfully installed package-A-1.0" in result.stdout, str(result)
assert "Successfully installed package-a-1.0" in result.stdout, str(result)
result = script.pip("check", expect_error=True)
expected_lines = ("package-a 1.0 requires dependency-b, which is not installed.",)

View File

@ -23,7 +23,7 @@ def test_entrypoints_work(entrypoint: str, script: PipTestEnvironment) -> None:
fake_pkg.mkdir()
fake_pkg.joinpath("setup.py").write_text(
dedent(
"""
f"""
from setuptools import setup
setup(
@ -31,13 +31,11 @@ def test_entrypoints_work(entrypoint: str, script: PipTestEnvironment) -> None:
version="0.1.0",
entry_points={{
"console_scripts": [
{!r}
{entrypoint!r}
]
}}
)
""".format(
entrypoint
)
"""
)
)

View File

@ -400,7 +400,7 @@ def test_completion_path_after_option(
def test_completion_uses_same_executable_name(
autocomplete_script: PipTestEnvironment, flag: str, deprecated_python: bool
) -> None:
executable_name = "pip{}".format(sys.version_info[0])
executable_name = f"pip{sys.version_info[0]}"
# Deprecated python versions produce an extra deprecation warning
result = autocomplete_script.run(
executable_name,

View File

@ -68,7 +68,7 @@ def test_debug__tags(script: PipTestEnvironment, args: List[str]) -> None:
stdout = result.stdout
tags = compatibility_tags.get_supported()
expected_tag_header = "Compatible tags: {}".format(len(tags))
expected_tag_header = f"Compatible tags: {len(tags)}"
assert expected_tag_header in stdout
show_verbose_note = "--verbose" not in args

View File

@ -166,13 +166,11 @@ def test_freeze_with_invalid_names(script: PipTestEnvironment) -> None:
with open(egg_info_path, "w") as egg_info_file:
egg_info_file.write(
textwrap.dedent(
"""\
f"""\
Metadata-Version: 1.0
Name: {}
Name: {pkgname}
Version: 1.0
""".format(
pkgname
)
"""
)
)
@ -221,12 +219,10 @@ def test_freeze_editable_not_vcs(script: PipTestEnvironment) -> None:
# We need to apply os.path.normcase() to the path since that is what
# the freeze code does.
expected = textwrap.dedent(
"""\
f"""\
...# Editable install with no version control (version-pkg==0.1)
-e {}
...""".format(
os.path.normcase(pkg_path)
)
-e {os.path.normcase(pkg_path)}
..."""
)
_check_output(result.stdout, expected)
@ -248,12 +244,10 @@ def test_freeze_editable_git_with_no_remote(
# We need to apply os.path.normcase() to the path since that is what
# the freeze code does.
expected = textwrap.dedent(
"""\
f"""\
...# Editable Git install with no remote (version-pkg==0.1)
-e {}
...""".format(
os.path.normcase(pkg_path)
)
-e {os.path.normcase(pkg_path)}
..."""
)
_check_output(result.stdout, expected)
@ -653,9 +647,9 @@ def test_freeze_with_requirement_option_file_url_egg_not_installed(
expect_stderr=True,
)
expected_err = (
"WARNING: Requirement file [requirements.txt] contains {}, "
f"WARNING: Requirement file [requirements.txt] contains {url}, "
"but package 'Does.Not-Exist' is not installed\n"
).format(url)
)
if deprecated_python:
assert expected_err in result.stderr
else:

View File

@ -106,10 +106,10 @@ def test_pep518_refuses_conflicting_requires(
assert (
result.returncode != 0
and (
"Some build dependencies for {url} conflict "
f"Some build dependencies for {project_dir.as_uri()} conflict "
"with PEP 517/518 supported "
"requirements: setuptools==1.0 is incompatible with "
"setuptools>=40.8.0.".format(url=project_dir.as_uri())
"setuptools>=40.8.0."
)
in result.stderr
), str(result)
@ -595,8 +595,8 @@ def test_hashed_install_success(
with requirements_file(
"simple2==1.0 --hash=sha256:9336af72ca661e6336eb87bc7de3e8844d853e"
"3848c2b9bbd2e8bf01db88c2c7\n"
"{simple} --hash=sha256:393043e672415891885c9a2a0929b1af95fb866d6c"
"a016b42d2e6ce53619b653".format(simple=file_url),
f"{file_url} --hash=sha256:393043e672415891885c9a2a0929b1af95fb866d6c"
"a016b42d2e6ce53619b653",
tmpdir,
) as reqs_file:
script.pip_install_local("-r", reqs_file.resolve())
@ -1735,7 +1735,7 @@ def test_install_builds_wheels(script: PipTestEnvironment, data: TestData) -> No
# into the cache
assert wheels != [], str(res)
assert wheels == [
"Upper-2.0-py{}-none-any.whl".format(sys.version_info[0]),
f"Upper-2.0-py{sys.version_info[0]}-none-any.whl",
]
@ -2071,7 +2071,7 @@ def test_install_conflict_results_in_warning(
# Install pkgA without its dependency
result1 = script.pip("install", "--no-index", pkgA_path, "--no-deps")
assert "Successfully installed pkgA-1.0" in result1.stdout, str(result1)
assert "Successfully installed pkga-1.0" in result1.stdout, str(result1)
# Then install an incorrect version of the dependency
result2 = script.pip(
@ -2081,7 +2081,7 @@ def test_install_conflict_results_in_warning(
allow_stderr_error=True,
)
assert "pkga 1.0 requires pkgb==1.0" in result2.stderr, str(result2)
assert "Successfully installed pkgB-2.0" in result2.stdout, str(result2)
assert "Successfully installed pkgb-2.0" in result2.stdout, str(result2)
def test_install_conflict_warning_can_be_suppressed(
@ -2101,11 +2101,11 @@ def test_install_conflict_warning_can_be_suppressed(
# Install pkgA without its dependency
result1 = script.pip("install", "--no-index", pkgA_path, "--no-deps")
assert "Successfully installed pkgA-1.0" in result1.stdout, str(result1)
assert "Successfully installed pkga-1.0" in result1.stdout, str(result1)
# Then install an incorrect version of the dependency; suppressing warning
result2 = script.pip("install", "--no-index", pkgB_path, "--no-warn-conflicts")
assert "Successfully installed pkgB-2.0" in result2.stdout, str(result2)
assert "Successfully installed pkgb-2.0" in result2.stdout, str(result2)
def test_target_install_ignores_distutils_config_install_prefix(
@ -2387,7 +2387,7 @@ def test_install_verify_package_name_normalization(
assert "Successfully installed simple-package" in result.stdout
result = script.pip("install", package_name)
assert "Requirement already satisfied: {}".format(package_name) in result.stdout
assert f"Requirement already satisfied: {package_name}" in result.stdout
def test_install_logs_pip_version_in_debug(

View File

@ -28,7 +28,7 @@ def test_check_install_canonicalization(script: PipTestEnvironment) -> None:
# Let's install pkgA without its dependency
result = script.pip("install", "--no-index", pkga_path, "--no-deps")
assert "Successfully installed pkgA-1.0" in result.stdout, str(result)
assert "Successfully installed pkga-1.0" in result.stdout, str(result)
# Install the first missing dependency. Only an error for the
# second dependency should remain.

View File

@ -184,12 +184,10 @@ def test_config_file_override_stack(
config_file.write_text(
textwrap.dedent(
"""\
f"""\
[global]
index-url = {}/simple1
""".format(
base_address
)
index-url = {base_address}/simple1
"""
)
)
script.pip("install", "-vvv", "INITools", expect_error=True)
@ -197,14 +195,12 @@ def test_config_file_override_stack(
config_file.write_text(
textwrap.dedent(
"""\
f"""\
[global]
index-url = {address}/simple1
index-url = {base_address}/simple1
[install]
index-url = {address}/simple2
""".format(
address=base_address
)
index-url = {base_address}/simple2
"""
)
)
script.pip("install", "-vvv", "INITools", expect_error=True)

View File

@ -41,13 +41,11 @@ def test_find_links_requirements_file_relative_path(
"""Test find-links as a relative path to a reqs file."""
script.scratch_path.joinpath("test-req.txt").write_text(
textwrap.dedent(
"""
f"""
--no-index
--find-links={}
--find-links={data.packages.as_posix()}
parent==0.1
""".format(
data.packages.as_posix()
)
"""
)
)
result = script.pip(

View File

@ -0,0 +1,239 @@
import json
import re
from pathlib import Path
from typing import Any, Callable, Dict, Iterator, List, Tuple
import pytest
from pip._vendor.packaging.requirements import Requirement
from pip._internal.models.direct_url import DirectUrl
from pip._internal.utils.urls import path_to_url
from tests.lib import (
PipTestEnvironment,
TestPipResult,
)
@pytest.fixture(scope="function")
def install_with_generated_html_index(
script: PipTestEnvironment,
html_index_for_packages: Path,
tmpdir: Path,
) -> Callable[..., Tuple[TestPipResult, Dict[str, Any]]]:
"""Execute `pip download` against a generated PyPI index."""
output_file = tmpdir / "output_file.json"
def run_for_generated_index(
args: List[str],
*,
dry_run: bool = True,
allow_error: bool = False,
) -> Tuple[TestPipResult, Dict[str, Any]]:
"""
Produce a PyPI directory structure pointing to the specified packages, then
execute `pip install --report ... -i ...` pointing to our generated index.
"""
pip_args = [
"install",
*(("--dry-run",) if dry_run else ()),
"--ignore-installed",
"--report",
str(output_file),
"-i",
path_to_url(str(html_index_for_packages)),
*args,
]
result = script.pip(*pip_args, allow_error=allow_error)
try:
with open(output_file, "rb") as f:
report = json.load(f)
except FileNotFoundError:
if allow_error:
report = {}
else:
raise
return (result, report)
return run_for_generated_index
def iter_dists(report: Dict[str, Any]) -> Iterator[Tuple[Requirement, DirectUrl]]:
"""Parse a (req,url) tuple from each installed dist in the --report json."""
for inst in report["install"]:
metadata = inst["metadata"]
name = metadata["name"]
version = metadata["version"]
req = Requirement(f"{name}=={version}")
direct_url = DirectUrl.from_dict(inst["download_info"])
yield (req, direct_url)
@pytest.mark.parametrize(
"requirement_to_install, expected_outputs",
[
("simple2==1.0", ["simple2==1.0", "simple==1.0"]),
("simple==2.0", ["simple==2.0"]),
(
"colander",
["colander==0.9.9", "translationstring==1.1"],
),
(
"compilewheel",
["compilewheel==1.0", "simple==1.0"],
),
],
)
def test_install_with_metadata(
install_with_generated_html_index: Callable[
..., Tuple[TestPipResult, Dict[str, Any]]
],
requirement_to_install: str,
expected_outputs: List[str],
) -> None:
"""Verify that if a data-dist-info-metadata attribute is present, then it is used
instead of the actual dist's METADATA."""
_, report = install_with_generated_html_index(
[requirement_to_install],
)
installed = sorted(str(r) for r, _ in iter_dists(report))
assert installed == expected_outputs
@pytest.mark.parametrize(
"requirement_to_install, real_hash",
[
(
"simple==3.0",
"95e0f200b6302989bcf2cead9465cf229168295ea330ca30d1ffeab5c0fed996",
),
(
"has-script",
"16ba92d7f6f992f6de5ecb7d58c914675cf21f57f8e674fb29dcb4f4c9507e5b",
),
],
)
def test_incorrect_metadata_hash(
install_with_generated_html_index: Callable[
..., Tuple[TestPipResult, Dict[str, Any]]
],
requirement_to_install: str,
real_hash: str,
) -> None:
"""Verify that if a hash for data-dist-info-metadata is provided, it must match the
actual hash of the metadata file."""
result, _ = install_with_generated_html_index(
[requirement_to_install],
allow_error=True,
)
assert result.returncode != 0
expected_msg = f"""\
Expected sha256 WRONG-HASH
Got {real_hash}"""
assert expected_msg in result.stderr
@pytest.mark.parametrize(
"requirement_to_install, expected_url",
[
("simple2==2.0", "simple2-2.0.tar.gz.metadata"),
("priority", "priority-1.0-py2.py3-none-any.whl.metadata"),
],
)
def test_metadata_not_found(
install_with_generated_html_index: Callable[
..., Tuple[TestPipResult, Dict[str, Any]]
],
requirement_to_install: str,
expected_url: str,
) -> None:
"""Verify that if a data-dist-info-metadata attribute is provided, that pip will
fetch the .metadata file at the location specified by PEP 658, and error
if unavailable."""
result, _ = install_with_generated_html_index(
[requirement_to_install],
allow_error=True,
)
assert result.returncode != 0
expected_re = re.escape(expected_url)
pattern = re.compile(
f"ERROR: 404 Client Error: FileNotFoundError for url:.*{expected_re}"
)
assert pattern.search(result.stderr), (pattern, result.stderr)
def test_produces_error_for_mismatched_package_name_in_metadata(
install_with_generated_html_index: Callable[
..., Tuple[TestPipResult, Dict[str, Any]]
],
) -> None:
"""Verify that the package name from the metadata matches the requested package."""
result, _ = install_with_generated_html_index(
["simple2==3.0"],
allow_error=True,
)
assert result.returncode != 0
assert (
"simple2-3.0.tar.gz has inconsistent Name: expected 'simple2', but metadata "
"has 'not-simple2'"
) in result.stdout
@pytest.mark.parametrize(
"requirement",
(
"requires-simple-extra==0.1",
"REQUIRES_SIMPLE-EXTRA==0.1",
"REQUIRES....simple-_-EXTRA==0.1",
),
)
def test_canonicalizes_package_name_before_verifying_metadata(
install_with_generated_html_index: Callable[
..., Tuple[TestPipResult, Dict[str, Any]]
],
requirement: str,
) -> None:
"""Verify that the package name from the command line and the package's
METADATA are both canonicalized before comparison, while the name from the METADATA
is always used verbatim to represent the installed candidate in --report.
Regression test for https://github.com/pypa/pip/issues/12038
"""
_, report = install_with_generated_html_index(
[requirement],
)
reqs = [str(r) for r, _ in iter_dists(report)]
assert reqs == ["Requires_Simple.Extra==0.1"]
@pytest.mark.parametrize(
"requirement,err_string",
(
# It's important that we verify pip won't even attempt to fetch the file, so we
# construct an input that will cause it to error if it tries at all.
(
"complex-dist==0.1",
"Could not install packages due to an OSError: [Errno 2] No such file or directory", # noqa: E501
),
("corruptwheel==1.0", ".whl is invalid."),
),
)
def test_dry_run_avoids_downloading_metadata_only_dists(
install_with_generated_html_index: Callable[
..., Tuple[TestPipResult, Dict[str, Any]]
],
requirement: str,
err_string: str,
) -> None:
"""Verify that the underlying dist files are not downloaded at all when
`install --dry-run` is used to resolve dists with PEP 658 metadata."""
_, report = install_with_generated_html_index(
[requirement],
)
assert [requirement] == [str(r) for r, _ in iter_dists(report)]
result, _ = install_with_generated_html_index(
[requirement],
dry_run=False,
allow_error=True,
)
assert result.returncode != 0
assert err_string in result.stderr

View File

@ -95,7 +95,7 @@ def test_requirements_file(script: PipTestEnvironment) -> None:
result.did_create(script.site_packages / "INITools-0.2.dist-info")
result.did_create(script.site_packages / "initools")
assert result.files_created[script.site_packages / other_lib_name].dir
fn = "{}-{}.dist-info".format(other_lib_name, other_lib_version)
fn = f"{other_lib_name}-{other_lib_version}.dist-info"
assert result.files_created[script.site_packages / fn].dir
@ -260,13 +260,13 @@ def test_respect_order_in_requirements_file(
assert (
"parent" in downloaded[0]
), 'First download should be "parent" but was "{}"'.format(downloaded[0])
), f'First download should be "parent" but was "{downloaded[0]}"'
assert (
"child" in downloaded[1]
), 'Second download should be "child" but was "{}"'.format(downloaded[1])
), f'Second download should be "child" but was "{downloaded[1]}"'
assert (
"simple" in downloaded[2]
), 'Third download should be "simple" but was "{}"'.format(downloaded[2])
), f'Third download should be "simple" but was "{downloaded[2]}"'
def test_install_local_editable_with_extras(
@ -620,7 +620,7 @@ def test_install_distribution_full_union(
result = script.pip_install_local(
to_install, f"{to_install}[bar]", f"{to_install}[baz]"
)
assert "Building wheel for LocalExtras" in result.stdout
assert "Building wheel for localextras" in result.stdout
result.did_create(script.site_packages / "simple")
result.did_create(script.site_packages / "singlemodule.py")

View File

@ -169,9 +169,9 @@ def get_header_scheme_path_for_script(
) -> Path:
command = (
"from pip._internal.locations import get_scheme;"
"scheme = get_scheme({!r});"
f"scheme = get_scheme({dist_name!r});"
"print(scheme.headers);"
).format(dist_name)
)
result = script.run("python", "-c", command).stdout
return Path(result.strip())

View File

@ -1185,7 +1185,7 @@ def test_new_resolver_presents_messages_when_backtracking_a_lot(
for index in range(1, N + 1):
A_version = f"{index}.0.0"
B_version = f"{index}.0.0"
C_version = "{index_minus_one}.0.0".format(index_minus_one=index - 1)
C_version = f"{index - 1}.0.0"
depends = ["B == " + B_version]
if index != 1:

View File

@ -71,8 +71,8 @@ def test_new_resolver_conflict_constraints_file(
def test_new_resolver_requires_python_error(script: PipTestEnvironment) -> None:
compatible_python = ">={0.major}.{0.minor}".format(sys.version_info)
incompatible_python = "<{0.major}.{0.minor}".format(sys.version_info)
compatible_python = f">={sys.version_info.major}.{sys.version_info.minor}"
incompatible_python = f"<{sys.version_info.major}.{sys.version_info.minor}"
pkga = create_test_package_with_setup(
script,
@ -99,7 +99,7 @@ def test_new_resolver_requires_python_error(script: PipTestEnvironment) -> None:
def test_new_resolver_checks_requires_python_before_dependencies(
script: PipTestEnvironment,
) -> None:
incompatible_python = "<{0.major}.{0.minor}".format(sys.version_info)
incompatible_python = f"<{sys.version_info.major}.{sys.version_info.minor}"
pkg_dep = create_basic_wheel_for_package(
script,

View File

@ -24,18 +24,11 @@ def _create_find_links(script: PipTestEnvironment) -> _FindLinks:
index_html = script.scratch_path / "index.html"
index_html.write_text(
"""
f"""
<!DOCTYPE html>
<a href="{sdist_url}#sha256={sdist_hash}">{sdist_path.stem}</a>
<a href="{wheel_url}#sha256={wheel_hash}">{wheel_path.stem}</a>
""".format(
sdist_url=sdist_path.as_uri(),
sdist_hash=sdist_hash,
sdist_path=sdist_path,
wheel_url=wheel_path.as_uri(),
wheel_hash=wheel_hash,
wheel_path=wheel_path,
).strip()
<a href="{sdist_path.as_uri()}#sha256={sdist_hash}">{sdist_path.stem}</a>
<a href="{wheel_path.as_uri()}#sha256={wheel_hash}">{wheel_path.stem}</a>
""".strip()
)
return _FindLinks(index_html, sdist_hash, wheel_hash)
@ -99,9 +92,7 @@ def test_new_resolver_hash_intersect_from_constraint(
constraints_txt = script.scratch_path / "constraints.txt"
constraints_txt.write_text(
"base==0.1.0 --hash=sha256:{sdist_hash}".format(
sdist_hash=find_links.sdist_hash,
),
f"base==0.1.0 --hash=sha256:{find_links.sdist_hash}",
)
requirements_txt = script.scratch_path / "requirements.txt"
requirements_txt.write_text(
@ -200,13 +191,10 @@ def test_new_resolver_hash_intersect_empty_from_constraint(
constraints_txt = script.scratch_path / "constraints.txt"
constraints_txt.write_text(
"""
base==0.1.0 --hash=sha256:{sdist_hash}
base==0.1.0 --hash=sha256:{wheel_hash}
""".format(
sdist_hash=find_links.sdist_hash,
wheel_hash=find_links.wheel_hash,
),
f"""
base==0.1.0 --hash=sha256:{find_links.sdist_hash}
base==0.1.0 --hash=sha256:{find_links.wheel_hash}
""",
)
result = script.pip(
@ -240,19 +228,15 @@ def test_new_resolver_hash_requirement_and_url_constraint_can_succeed(
requirements_txt = script.scratch_path / "requirements.txt"
requirements_txt.write_text(
"""
f"""
base==0.1.0 --hash=sha256:{wheel_hash}
""".format(
wheel_hash=wheel_hash,
),
""",
)
constraints_txt = script.scratch_path / "constraints.txt"
constraint_text = "base @ {wheel_url}\n".format(wheel_url=wheel_path.as_uri())
constraint_text = f"base @ {wheel_path.as_uri()}\n"
if constrain_by_hash:
constraint_text += "base==0.1.0 --hash=sha256:{wheel_hash}\n".format(
wheel_hash=wheel_hash,
)
constraint_text += f"base==0.1.0 --hash=sha256:{wheel_hash}\n"
constraints_txt.write_text(constraint_text)
script.pip(
@ -280,19 +264,15 @@ def test_new_resolver_hash_requirement_and_url_constraint_can_fail(
requirements_txt = script.scratch_path / "requirements.txt"
requirements_txt.write_text(
"""
f"""
base==0.1.0 --hash=sha256:{other_hash}
""".format(
other_hash=other_hash,
),
""",
)
constraints_txt = script.scratch_path / "constraints.txt"
constraint_text = "base @ {wheel_url}\n".format(wheel_url=wheel_path.as_uri())
constraint_text = f"base @ {wheel_path.as_uri()}\n"
if constrain_by_hash:
constraint_text += "base==0.1.0 --hash=sha256:{other_hash}\n".format(
other_hash=other_hash,
)
constraint_text += f"base==0.1.0 --hash=sha256:{other_hash}\n"
constraints_txt.write_text(constraint_text)
result = script.pip(
@ -343,17 +323,12 @@ def test_new_resolver_hash_with_extras(script: PipTestEnvironment) -> None:
requirements_txt = script.scratch_path / "requirements.txt"
requirements_txt.write_text(
"""
f"""
child[extra]==0.1.0 --hash=sha256:{child_hash}
parent_with_extra==0.1.0 --hash=sha256:{parent_with_extra_hash}
parent_without_extra==0.1.0 --hash=sha256:{parent_without_extra_hash}
extra==0.1.0 --hash=sha256:{extra_hash}
""".format(
child_hash=child_hash,
parent_with_extra_hash=parent_with_extra_hash,
parent_without_extra_hash=parent_without_extra_hash,
extra_hash=extra_hash,
),
""",
)
script.pip(

View File

@ -58,12 +58,7 @@ def test_new_resolver_target_checks_compatibility_failure(
if platform:
args += ["--platform", platform]
args_tag = "{}{}-{}-{}".format(
implementation,
python_version,
abi,
platform,
)
args_tag = f"{implementation}{python_version}-{abi}-{platform}"
wheel_tag_matches = args_tag == fake_wheel_tag
result = script.pip(*args, expect_error=(not wheel_tag_matches))

View File

@ -159,9 +159,9 @@ def test_conflicting_pep517_backend_requirements(
expect_error=True,
)
msg = (
"Some build dependencies for {url} conflict with the backend "
f"Some build dependencies for {project_dir.as_uri()} conflict with the backend "
"dependencies: simplewheel==1.0 is incompatible with "
"simplewheel==2.0.".format(url=project_dir.as_uri())
"simplewheel==2.0."
)
assert result.returncode != 0 and msg in result.stderr, str(result)
@ -205,8 +205,8 @@ def test_validate_missing_pep517_backend_requirements(
expect_error=True,
)
msg = (
"Some build dependencies for {url} are missing: "
"'simplewheel==1.0', 'test_backend'.".format(url=project_dir.as_uri())
f"Some build dependencies for {project_dir.as_uri()} are missing: "
"'simplewheel==1.0', 'test_backend'."
)
assert result.returncode != 0 and msg in result.stderr, str(result)
@ -231,9 +231,9 @@ def test_validate_conflicting_pep517_backend_requirements(
expect_error=True,
)
msg = (
"Some build dependencies for {url} conflict with the backend "
f"Some build dependencies for {project_dir.as_uri()} conflict with the backend "
"dependencies: simplewheel==2.0 is incompatible with "
"simplewheel==1.0.".format(url=project_dir.as_uri())
"simplewheel==1.0."
)
assert result.returncode != 0 and msg in result.stderr, str(result)

View File

@ -604,9 +604,7 @@ def test_uninstall_without_record_fails(
"simple.dist==0.1'."
)
elif installer:
expected_error_message += " Hint: The package was installed by {}.".format(
installer
)
expected_error_message += f" Hint: The package was installed by {installer}."
assert result2.stderr.rstrip() == expected_error_message
assert_all_changes(result.files_after, result2, ignore_changes)

View File

@ -59,9 +59,7 @@ def test_pip_wheel_success(script: PipTestEnvironment, data: TestData) -> None:
wheel_file_path = script.scratch / wheel_file_name
assert re.search(
r"Created wheel for simple: "
r"filename={filename} size=\d+ sha256=[A-Fa-f0-9]{{64}}".format(
filename=re.escape(wheel_file_name)
),
rf"filename={re.escape(wheel_file_name)} size=\d+ sha256=[A-Fa-f0-9]{{64}}",
result.stdout,
)
assert re.search(r"^\s+Stored in directory: ", result.stdout, re.M)
@ -286,7 +284,7 @@ def test_wheel_package_with_latin1_setup(
pkg_to_wheel = data.packages.joinpath("SetupPyLatin1")
result = script.pip("wheel", pkg_to_wheel)
assert "Successfully built SetupPyUTF8" in result.stdout
assert "Successfully built setuppyutf8" in result.stdout
def test_pip_wheel_with_pep518_build_reqs(

View File

@ -747,7 +747,7 @@ class PipTestEnvironment(TestFileEnvironment):
for val in json.loads(ret.stdout)
}
expected = {(canonicalize_name(k), v) for k, v in kwargs.items()}
assert expected <= installed, "{!r} not all in {!r}".format(expected, installed)
assert expected <= installed, f"{expected!r} not all in {installed!r}"
def assert_not_installed(self, *args: str) -> None:
ret = self.pip("list", "--format=json")
@ -755,9 +755,7 @@ class PipTestEnvironment(TestFileEnvironment):
# None of the given names should be listed as installed, i.e. their
# intersection should be empty.
expected = {canonicalize_name(k) for k in args}
assert not (expected & installed), "{!r} contained in {!r}".format(
expected, installed
)
assert not (expected & installed), f"{expected!r} contained in {installed!r}"
# FIXME ScriptTest does something similar, but only within a single
@ -1028,7 +1026,7 @@ def _create_test_package_with_srcdir(
pkg_path.joinpath("__init__.py").write_text("")
subdir_path.joinpath("setup.py").write_text(
textwrap.dedent(
"""
f"""
from setuptools import setup, find_packages
setup(
name="{name}",
@ -1036,9 +1034,7 @@ def _create_test_package_with_srcdir(
packages=find_packages(),
package_dir={{"": "src"}},
)
""".format(
name=name
)
"""
)
)
return _vcs_add(dir_path, version_pkg_path, vcs)
@ -1052,7 +1048,7 @@ def _create_test_package(
_create_main_file(version_pkg_path, name=name, output="0.1")
version_pkg_path.joinpath("setup.py").write_text(
textwrap.dedent(
"""
f"""
from setuptools import setup, find_packages
setup(
name="{name}",
@ -1061,9 +1057,7 @@ def _create_test_package(
py_modules=["{name}"],
entry_points=dict(console_scripts=["{name}={name}:main"]),
)
""".format(
name=name
)
"""
)
)
return _vcs_add(dir_path, version_pkg_path, vcs)
@ -1137,7 +1131,7 @@ def urlsafe_b64encode_nopad(data: bytes) -> str:
def create_really_basic_wheel(name: str, version: str) -> bytes:
def digest(contents: bytes) -> str:
return "sha256={}".format(urlsafe_b64encode_nopad(sha256(contents).digest()))
return f"sha256={urlsafe_b64encode_nopad(sha256(contents).digest())}"
def add_file(path: str, text: str) -> None:
contents = text.encode("utf-8")
@ -1153,13 +1147,11 @@ def create_really_basic_wheel(name: str, version: str) -> bytes:
add_file(
f"{dist_info}/METADATA",
dedent(
"""\
f"""\
Metadata-Version: 2.1
Name: {}
Version: {}
""".format(
name, version
)
Name: {name}
Version: {version}
"""
),
)
z.writestr(record_path, "\n".join(",".join(r) for r in records))

View File

@ -38,7 +38,7 @@ class ConfigurationMixin:
old()
# https://github.com/python/mypy/issues/2427
self.configuration._load_config_files = overridden # type: ignore[assignment]
self.configuration._load_config_files = overridden # type: ignore[method-assign]
@contextlib.contextmanager
def tmpfile(self, contents: str) -> Iterator[str]:

View File

@ -56,7 +56,7 @@ def local_checkout(
assert vcs_backend is not None
vcs_backend.obtain(repo_url_path, url=hide_url(remote_repo), verbosity=0)
return "{}+{}".format(vcs_name, Path(repo_url_path).as_uri())
return f"{vcs_name}+{Path(repo_url_path).as_uri()}"
def local_repo(remote_repo: str, temp_path: Path) -> str:

View File

@ -152,7 +152,7 @@ def html5_page(text: str) -> str:
def package_page(spec: Dict[str, str]) -> "WSGIApplication":
def link(name: str, value: str) -> str:
return '<a href="{}">{}</a>'.format(value, name)
return f'<a href="{value}">{name}</a>'
links = "".join(link(*kv) for kv in spec.items())
return text_html_response(html5_page(links))

View File

@ -107,8 +107,8 @@ class TestPipTestEnvironment:
"""
command = (
"import logging; logging.basicConfig(level='INFO'); "
"logging.getLogger().info('sub: {}', 'foo')"
).format(sub_string)
f"logging.getLogger().info('sub: {sub_string}', 'foo')"
)
args = [sys.executable, "-c", command]
script.run(*args, **kwargs)

View File

@ -19,12 +19,12 @@ from tests.lib.wheel import (
def test_message_from_dict_one_value() -> None:
message = message_from_dict({"a": "1"})
assert set(message.get_all("a")) == {"1"}
assert set(message.get_all("a")) == {"1"} # type: ignore
def test_message_from_dict_multiple_values() -> None:
message = message_from_dict({"a": ["1", "2"]})
assert set(message.get_all("a")) == {"1", "2"}
assert set(message.get_all("a")) == {"1", "2"} # type: ignore
def message_from_bytes(contents: bytes) -> Message:
@ -67,7 +67,7 @@ def test_make_metadata_file_custom_value_list() -> None:
f = default_make_metadata(updates={"a": ["1", "2"]})
assert f is not None
message = default_metadata_checks(f)
assert set(message.get_all("a")) == {"1", "2"}
assert set(message.get_all("a")) == {"1", "2"} # type: ignore
def test_make_metadata_file_custom_value_overrides() -> None:
@ -101,7 +101,7 @@ def default_wheel_metadata_checks(f: File) -> Message:
assert message.get_all("Wheel-Version") == ["1.0"]
assert message.get_all("Generator") == ["pip-test-suite"]
assert message.get_all("Root-Is-Purelib") == ["true"]
assert set(message.get_all("Tag")) == {"py2-none-any", "py3-none-any"}
assert set(message.get_all("Tag")) == {"py2-none-any", "py3-none-any"} # type: ignore
return message
@ -122,7 +122,7 @@ def test_make_wheel_metadata_file_custom_value_list() -> None:
f = default_make_wheel_metadata(updates={"a": ["1", "2"]})
assert f is not None
message = default_wheel_metadata_checks(f)
assert set(message.get_all("a")) == {"1", "2"}
assert set(message.get_all("a")) == {"1", "2"} # type: ignore
def test_make_wheel_metadata_file_custom_value_override() -> None:

View File

@ -190,7 +190,7 @@ def urlsafe_b64encode_nopad(data: bytes) -> str:
def digest(contents: bytes) -> str:
return "sha256={}".format(urlsafe_b64encode_nopad(sha256(contents).digest()))
return f"sha256={urlsafe_b64encode_nopad(sha256(contents).digest())}"
def record_file_maker_wrapper(

View File

@ -23,7 +23,7 @@ def test_dist_get_direct_url_no_metadata(mock_read_text: mock.Mock) -> None:
class FakeDistribution(BaseDistribution):
pass
dist = FakeDistribution()
dist = FakeDistribution() # type: ignore
assert dist.direct_url is None
mock_read_text.assert_called_once_with(DIRECT_URL_METADATA_NAME)
@ -35,7 +35,7 @@ def test_dist_get_direct_url_invalid_json(
class FakeDistribution(BaseDistribution):
canonical_name = cast(NormalizedName, "whatever") # Needed for error logging.
dist = FakeDistribution()
dist = FakeDistribution() # type: ignore
with caplog.at_level(logging.WARNING):
assert dist.direct_url is None
@ -84,7 +84,7 @@ def test_dist_get_direct_url_valid_metadata(mock_read_text: mock.Mock) -> None:
class FakeDistribution(BaseDistribution):
pass
dist = FakeDistribution()
dist = FakeDistribution() # type: ignore
direct_url = dist.direct_url
assert direct_url is not None
mock_read_text.assert_called_once_with(DIRECT_URL_METADATA_NAME)

View File

@ -102,6 +102,7 @@ def test_wheel_metadata_works() -> None:
metadata=InMemoryMetadata({"METADATA": metadata.as_bytes()}, "<in-memory>"),
project_name=name,
),
concrete=False,
)
assert name == dist.canonical_name == dist.raw_name

View File

@ -151,7 +151,7 @@ def test_base_command_provides_tempdir_helpers() -> None:
c = Command("fake", "fake")
# https://github.com/python/mypy/issues/2427
c.run = Mock(side_effect=assert_helpers_set) # type: ignore[assignment]
c.run = Mock(side_effect=assert_helpers_set) # type: ignore[method-assign]
assert c.main(["fake"]) == SUCCESS
c.run.assert_called_once()
@ -176,7 +176,7 @@ def test_base_command_global_tempdir_cleanup(kind: str, exists: bool) -> None:
c = Command("fake", "fake")
# https://github.com/python/mypy/issues/2427
c.run = Mock(side_effect=create_temp_dirs) # type: ignore[assignment]
c.run = Mock(side_effect=create_temp_dirs) # type: ignore[method-assign]
assert c.main(["fake"]) == SUCCESS
c.run.assert_called_once()
assert os.path.exists(Holder.value) == exists
@ -200,6 +200,6 @@ def test_base_command_local_tempdir_cleanup(kind: str, exists: bool) -> None:
c = Command("fake", "fake")
# https://github.com/python/mypy/issues/2427
c.run = Mock(side_effect=create_temp_dirs) # type: ignore[assignment]
c.run = Mock(side_effect=create_temp_dirs) # type: ignore[method-assign]
assert c.main(["fake"]) == SUCCESS
c.run.assert_called_once()

View File

@ -1,13 +1,32 @@
import os
from pathlib import Path
import pytest
from pip._vendor.packaging.tags import Tag, interpreter_name, interpreter_version
from pip._internal.cache import WheelCache, _hash_dict
from pip._internal.cache import WheelCache, _contains_egg_info, _hash_dict
from pip._internal.models.link import Link
from pip._internal.utils.misc import ensure_dir
@pytest.mark.parametrize(
"s, expected",
[
# Trivial.
("pip-18.0", True),
# Ambiguous.
("foo-2-2", True),
("im-valid", True),
# Invalid.
("invalid", False),
("im_invalid", False),
],
)
def test_contains_egg_info(s: str, expected: bool) -> None:
result = _contains_egg_info(s)
assert result == expected
def test_falsey_path_none() -> None:
wc = WheelCache("")
assert wc.cache_dir is None

View File

@ -119,8 +119,8 @@ def test_get_index_content_invalid_content_type_archive(
assert (
"pip._internal.index.collector",
logging.WARNING,
"Skipping page {} because it looks like an archive, and cannot "
"be checked by a HTTP HEAD request.".format(url),
f"Skipping page {url} because it looks like an archive, and cannot "
"be checked by a HTTP HEAD request.",
) in caplog.record_tuples
@ -417,8 +417,8 @@ def _test_parse_links_data_attribute(
html = (
"<!DOCTYPE html>"
'<html><head><meta charset="utf-8"><head>'
"<body>{}</body></html>"
).format(anchor_html)
f"<body>{anchor_html}</body></html>"
)
html_bytes = html.encode("utf-8")
page = IndexContent(
html_bytes,
@ -764,8 +764,8 @@ def test_get_index_content_invalid_scheme(
(
"pip._internal.index.collector",
logging.WARNING,
"Cannot look at {} URL {} because it does not support "
"lookup as web pages.".format(vcs_scheme, url),
f"Cannot look at {vcs_scheme} URL {url} because it does not support "
"lookup as web pages.",
),
]

View File

@ -215,7 +215,7 @@ class TestConfigurationModification(ConfigurationMixin):
# Mock out the method
mymock = MagicMock(spec=self.configuration._mark_as_modified)
# https://github.com/python/mypy/issues/2427
self.configuration._mark_as_modified = mymock # type: ignore[assignment]
self.configuration._mark_as_modified = mymock # type: ignore[method-assign]
self.configuration.set_value("test.hello", "10")
@ -231,7 +231,7 @@ class TestConfigurationModification(ConfigurationMixin):
# Mock out the method
mymock = MagicMock(spec=self.configuration._mark_as_modified)
# https://github.com/python/mypy/issues/2427
self.configuration._mark_as_modified = mymock # type: ignore[assignment]
self.configuration._mark_as_modified = mymock # type: ignore[method-assign]
self.configuration.set_value("test.hello", "10")
@ -250,7 +250,7 @@ class TestConfigurationModification(ConfigurationMixin):
# Mock out the method
mymock = MagicMock(spec=self.configuration._mark_as_modified)
# https://github.com/python/mypy/issues/2427
self.configuration._mark_as_modified = mymock # type: ignore[assignment]
self.configuration._mark_as_modified = mymock # type: ignore[method-assign]
self.configuration.set_value("test.hello", "10")

View File

@ -143,10 +143,7 @@ class TestLink:
def test_is_hash_allowed(
self, hash_name: str, hex_digest: str, expected: bool
) -> None:
url = "https://example.com/wheel.whl#{hash_name}={hex_digest}".format(
hash_name=hash_name,
hex_digest=hex_digest,
)
url = f"https://example.com/wheel.whl#{hash_name}={hex_digest}"
link = Link(url)
hashes_data = {
"sha512": [128 * "a", 128 * "b"],

View File

@ -21,8 +21,8 @@ def test_raise_for_status_raises_exception(status_code: int, error_type: str) ->
with pytest.raises(NetworkConnectionError) as excinfo:
raise_for_status(resp)
assert str(excinfo.value) == (
"{} {}: Network Error for url:"
" http://www.example.com/whatever.tgz".format(status_code, error_type)
f"{status_code} {error_type}: Network Error for url:"
" http://www.example.com/whatever.tgz"
)

View File

@ -154,6 +154,7 @@ class TestRequirementSet:
os.fspath(data.packages.joinpath("LocalEnvironMarker")),
)
req.user_supplied = True
req.allow_concrete_dist_overwrite = True
reqset.add_unnamed_requirement(req)
finder = make_test_finder(find_links=[data.find_links])
with self._basic_resolver(finder) as resolver:
@ -235,8 +236,8 @@ class TestRequirementSet:
r"file \(line 1\)\)\n"
r"Can't verify hashes for these file:// requirements because "
r"they point to directories:\n"
r" file://.*{sep}data{sep}packages{sep}FSPkg "
r"\(from -r file \(line 2\)\)".format(sep=sep)
rf" file://.*{sep}data{sep}packages{sep}FSPkg "
r"\(from -r file \(line 2\)\)"
),
):
resolver.resolve(reqset.all_requirements, True)
@ -503,6 +504,7 @@ class TestRequirementSet:
with self._basic_resolver(finder) as resolver:
ireq_url = data.packages.joinpath("FSPkg").as_uri()
ireq = get_processed_req_from_line(f"-e {ireq_url}#egg=FSPkg")
ireq.allow_concrete_dist_overwrite = True
reqset = resolver.resolve([ireq], True)
assert len(reqset.all_requirements) == 1
req = reqset.all_requirements[0]

View File

@ -297,7 +297,7 @@ class TestProcessLine:
def test_yield_line_constraint(self, line_processor: LineProcessor) -> None:
line = "SomeProject"
filename = "filename"
comes_from = "-c {} (line {})".format(filename, 1)
comes_from = f"-c {filename} (line {1})"
req = install_req_from_line(line, comes_from=comes_from, constraint=True)
found_req = line_processor(line, filename, 1, constraint=True)[0]
assert repr(found_req) == repr(req)
@ -326,7 +326,7 @@ class TestProcessLine:
url = "git+https://url#egg=SomeProject"
line = f"-e {url}"
filename = "filename"
comes_from = "-c {} (line {})".format(filename, 1)
comes_from = f"-c {filename} (line {1})"
req = install_req_from_editable(url, comes_from=comes_from, constraint=True)
found_req = line_processor(line, filename, 1, constraint=True)[0]
assert repr(found_req) == repr(req)
@ -873,12 +873,10 @@ class TestParseRequirements:
) -> None:
global_option = "--dry-run"
content = """
content = f"""
--only-binary :all:
INITools==2.0 --global-option="{global_option}"
""".format(
global_option=global_option
)
"""
with requirements_file(content, tmpdir) as reqs_file:
req = next(

View File

@ -252,7 +252,7 @@ class TestCheckDistRequiresPython:
def metadata(self) -> email.message.Message:
raise FileNotFoundError(metadata_name)
dist = make_fake_dist(klass=NotWorkingFakeDist)
dist = make_fake_dist(klass=NotWorkingFakeDist) # type: ignore
with pytest.raises(NoneMetadataError) as exc:
_check_dist_requires_python(
@ -261,8 +261,8 @@ class TestCheckDistRequiresPython:
ignore_requires_python=False,
)
assert str(exc.value) == (
"None {} metadata found for distribution: "
"<distribution 'my-project'>".format(metadata_name)
f"None {metadata_name} metadata found for distribution: "
"<distribution 'my-project'>"
)

View File

@ -102,15 +102,13 @@ def test_get_legacy_build_wheel_path__multiple_names(
],
)
def test_get_entrypoints(tmp_path: pathlib.Path, console_scripts: str) -> None:
entry_points_text = """
entry_points_text = f"""
[console_scripts]
{}
{console_scripts}
[section]
common:one = module:func
common:two = module:other_func
""".format(
console_scripts
)
"""
distribution = make_wheel(
"simple",

View File

@ -5,7 +5,7 @@ from typing import Optional, cast
import pytest
from pip._internal import wheel_builder
from pip._internal import cache, wheel_builder
from pip._internal.models.link import Link
from pip._internal.operations.build.wheel_legacy import format_command_result
from pip._internal.req.req_install import InstallRequirement
@ -13,24 +13,6 @@ from pip._internal.vcs.git import Git
from tests.lib import _create_test_package
@pytest.mark.parametrize(
"s, expected",
[
# Trivial.
("pip-18.0", True),
# Ambiguous.
("foo-2-2", True),
("im-valid", True),
# Invalid.
("invalid", False),
("im_invalid", False),
],
)
def test_contains_egg_info(s: str, expected: bool) -> None:
result = wheel_builder._contains_egg_info(s)
assert result == expected
class ReqMock:
def __init__(
self,
@ -128,7 +110,7 @@ def test_should_build_for_wheel_command(req: ReqMock, expected: bool) -> None:
],
)
def test_should_cache(req: ReqMock, expected: bool) -> None:
assert wheel_builder._should_cache(cast(InstallRequirement, req)) is expected
assert cache.should_cache(cast(InstallRequirement, req)) is expected
def test_should_cache_git_sha(tmpdir: Path) -> None:
@ -138,12 +120,12 @@ def test_should_cache_git_sha(tmpdir: Path) -> None:
# a link referencing a sha should be cached
url = "git+https://g.c/o/r@" + commit + "#egg=mypkg"
req = ReqMock(link=Link(url), source_dir=repo_path)
assert wheel_builder._should_cache(cast(InstallRequirement, req))
assert cache.should_cache(cast(InstallRequirement, req))
# a link not referencing a sha should not be cached
url = "git+https://g.c/o/r@master#egg=mypkg"
req = ReqMock(link=Link(url), source_dir=repo_path)
assert not wheel_builder._should_cache(cast(InstallRequirement, req))
assert not cache.should_cache(cast(InstallRequirement, req))
def test_format_command_result__INFO(caplog: pytest.LogCaptureFixture) -> None:

0
tools/__init__.py Normal file
View File

View File

@ -27,7 +27,7 @@ def is_this_a_good_version_number(string: str) -> Optional[str]:
expected_major = datetime.now().year % 100
if len(release) not in [2, 3]:
return "Not of the form: {0}.N or {0}.N.P".format(expected_major)
return f"Not of the form: {expected_major}.N or {expected_major}.N.P"
return None