Enforce f-strings via Ruff (#12393)

This commit is contained in:
Damian Shaw 2023-11-07 04:14:56 -05:00 committed by GitHub
parent 9685f64fe8
commit 68529081c2
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
62 changed files with 201 additions and 334 deletions

View File

@ -194,22 +194,17 @@ class PipReqFileOptionsReference(PipOptions):
opt = option()
opt_name = opt._long_opts[0]
if opt._short_opts:
short_opt_name = "{}, ".format(opt._short_opts[0])
short_opt_name = f"{opt._short_opts[0]}, "
else:
short_opt_name = ""
if option in cmdoptions.general_group["options"]:
prefix = ""
else:
prefix = "{}_".format(self.determine_opt_prefix(opt_name))
prefix = f"{self.determine_opt_prefix(opt_name)}_"
self.view_list.append(
"* :ref:`{short}{long}<{prefix}{opt_name}>`".format(
short=short_opt_name,
long=opt_name,
prefix=prefix,
opt_name=opt_name,
),
f"* :ref:`{short_opt_name}{opt_name}<{prefix}{opt_name}>`",
"\n",
)

1
news/12393.trivial.rst Normal file
View File

@ -0,0 +1 @@
Enforce and update code to use f-strings via Ruff rule UP032

View File

@ -102,6 +102,7 @@ select = [
"PLR0",
"W",
"RUF100",
"UP032",
]
[tool.ruff.isort]

View File

@ -77,7 +77,7 @@ setup(
entry_points={
"console_scripts": [
"pip=pip._internal.cli.main:main",
"pip{}=pip._internal.cli.main:main".format(sys.version_info[0]),
f"pip{sys.version_info[0]}=pip._internal.cli.main:main",
"pip{}.{}=pip._internal.cli.main:main".format(*sys.version_info[:2]),
],
},

View File

@ -582,10 +582,7 @@ def _handle_python_version(
"""
version_info, error_msg = _convert_python_version(value)
if error_msg is not None:
msg = "invalid --python-version value: {!r}: {}".format(
value,
error_msg,
)
msg = f"invalid --python-version value: {value!r}: {error_msg}"
raise_option_error(parser, option=option, msg=msg)
parser.values.python_version = version_info
@ -921,9 +918,9 @@ def _handle_merge_hash(
algo, digest = value.split(":", 1)
except ValueError:
parser.error(
"Arguments to {} must be a hash name "
f"Arguments to {opt_str} must be a hash name "
"followed by a value, like --hash=sha256:"
"abcde...".format(opt_str)
"abcde..."
)
if algo not in STRONG_HASHES:
parser.error(

View File

@ -229,9 +229,9 @@ class ConfigOptionParser(CustomOptionParser):
val = strtobool(val)
except ValueError:
self.error(
"{} is not a valid value for {} option, "
f"{val} is not a valid value for {key} option, "
"please specify a boolean value like yes/no, "
"true/false or 1/0 instead.".format(val, key)
"true/false or 1/0 instead."
)
elif option.action == "count":
with suppress(ValueError):
@ -240,10 +240,10 @@ class ConfigOptionParser(CustomOptionParser):
val = int(val)
if not isinstance(val, int) or val < 0:
self.error(
"{} is not a valid value for {} option, "
f"{val} is not a valid value for {key} option, "
"please instead specify either a non-negative integer "
"or a boolean value like yes/no or false/true "
"which is equivalent to 1/0.".format(val, key)
"which is equivalent to 1/0."
)
elif option.action == "append":
val = val.split()

View File

@ -175,7 +175,7 @@ class CacheCommand(Command):
files += self._find_http_files(options)
else:
# Add the pattern to the log message
no_matching_msg += ' for pattern "{}"'.format(args[0])
no_matching_msg += f' for pattern "{args[0]}"'
if not files:
logger.warning(no_matching_msg)

View File

@ -242,17 +242,15 @@ class ConfigurationCommand(Command):
e.filename = editor
raise
except subprocess.CalledProcessError as e:
raise PipError(
"Editor Subprocess exited with exit code {}".format(e.returncode)
)
raise PipError(f"Editor Subprocess exited with exit code {e.returncode}")
def _get_n_args(self, args: List[str], example: str, n: int) -> Any:
"""Helper to make sure the command got the right number of arguments"""
if len(args) != n:
msg = (
"Got unexpected number of arguments, expected {}. "
'(example: "{} config {}")'
).format(n, get_prog(), example)
f"Got unexpected number of arguments, expected {n}. "
f'(example: "{get_prog()} config {example}")'
)
raise PipError(msg)
if n == 1:

View File

@ -95,7 +95,7 @@ def show_actual_vendor_versions(vendor_txt_versions: Dict[str, str]) -> None:
elif parse_version(actual_version) != parse_version(expected_version):
extra_message = (
" (CONFLICT: vendor.txt suggests version should"
" be {})".format(expected_version)
f" be {expected_version})"
)
logger.info("%s==%s%s", module_name, actual_version, extra_message)
@ -120,7 +120,7 @@ def show_tags(options: Values) -> None:
if formatted_target:
suffix = f" (target: {formatted_target})"
msg = "Compatible tags: {}{}".format(len(tags), suffix)
msg = f"Compatible tags: {len(tags)}{suffix}"
logger.info(msg)
if options.verbose < 1 and len(tags) > tag_limit:
@ -134,9 +134,7 @@ def show_tags(options: Values) -> None:
logger.info(str(tag))
if tags_limited:
msg = (
"...\n[First {tag_limit} tags shown. Pass --verbose to show all.]"
).format(tag_limit=tag_limit)
msg = f"...\n[First {tag_limit} tags shown. Pass --verbose to show all.]"
logger.info(msg)

View File

@ -128,12 +128,12 @@ class IndexCommand(IndexGroupCommand):
if not versions:
raise DistributionNotFound(
"No matching distribution found for {}".format(query)
f"No matching distribution found for {query}"
)
formatted_versions = [str(ver) for ver in sorted(versions, reverse=True)]
latest = formatted_versions[0]
write_output("{} ({})".format(query, latest))
write_output(f"{query} ({latest})")
write_output("Available versions: {}".format(", ".join(formatted_versions)))
print_dist_installation_info(query, latest)

View File

@ -607,12 +607,8 @@ class InstallCommand(RequirementCommand):
version = package_set[project_name][0]
for dependency in missing[project_name]:
message = (
"{name} {version} requires {requirement}, "
f"{project_name} {version} requires {dependency[1]}, "
"which is not installed."
).format(
name=project_name,
version=version,
requirement=dependency[1],
)
parts.append(message)

View File

@ -59,8 +59,8 @@ def _disassemble_key(name: str) -> List[str]:
if "." not in name:
error_message = (
"Key does not contain dot separated section and key. "
"Perhaps you wanted to use 'global.{}' instead?"
).format(name)
f"Perhaps you wanted to use 'global.{name}' instead?"
)
raise ConfigurationError(error_message)
return name.split(".", 1)

View File

@ -247,10 +247,7 @@ class NoneMetadataError(PipError):
def __str__(self) -> str:
# Use `dist` in the error message because its stringification
# includes more information, like the version and location.
return "None {} metadata found for distribution: {}".format(
self.metadata_name,
self.dist,
)
return f"None {self.metadata_name} metadata found for distribution: {self.dist}"
class UserInstallationInvalid(InstallationError):
@ -594,7 +591,7 @@ class HashMismatch(HashError):
self.gots = gots
def body(self) -> str:
return " {}:\n{}".format(self._requirement_name(), self._hash_comparison())
return f" {self._requirement_name()}:\n{self._hash_comparison()}"
def _hash_comparison(self) -> str:
"""
@ -616,11 +613,9 @@ class HashMismatch(HashError):
lines: List[str] = []
for hash_name, expecteds in self.allowed.items():
prefix = hash_then_or(hash_name)
lines.extend(
(" Expected {} {}".format(next(prefix), e)) for e in expecteds
)
lines.extend((f" Expected {next(prefix)} {e}") for e in expecteds)
lines.append(
" Got {}\n".format(self.gots[hash_name].hexdigest())
f" Got {self.gots[hash_name].hexdigest()}\n"
)
return "\n".join(lines)

View File

@ -533,8 +533,8 @@ class CandidateEvaluator:
)
except ValueError:
raise UnsupportedWheel(
"{} is not a supported wheel for this platform. It "
"can't be sorted.".format(wheel.filename)
f"{wheel.filename} is not a supported wheel for this platform. It "
"can't be sorted."
)
if self._prefer_binary:
binary_preference = 1
@ -939,9 +939,7 @@ class PackageFinder:
_format_versions(best_candidate_result.iter_all()),
)
raise DistributionNotFound(
"No matching distribution found for {}".format(req)
)
raise DistributionNotFound(f"No matching distribution found for {req}")
def _should_install_candidate(
candidate: Optional[InstallationCandidate],

View File

@ -27,8 +27,4 @@ class InstallationCandidate(KeyBasedCompareMixin):
)
def __str__(self) -> str:
return "{!r} candidate (version {} at {})".format(
self.name,
self.version,
self.link,
)
return f"{self.name!r} candidate (version {self.version} at {self.link})"

View File

@ -31,9 +31,7 @@ def _get(
value = d[key]
if not isinstance(value, expected_type):
raise DirectUrlValidationError(
"{!r} has unexpected type for {} (expected {})".format(
value, key, expected_type
)
f"{value!r} has unexpected type for {key} (expected {expected_type})"
)
return value

View File

@ -33,9 +33,7 @@ class FormatControl:
return all(getattr(self, k) == getattr(other, k) for k in self.__slots__)
def __repr__(self) -> str:
return "{}({}, {})".format(
self.__class__.__name__, self.no_binary, self.only_binary
)
return f"{self.__class__.__name__}({self.no_binary}, {self.only_binary})"
@staticmethod
def handle_mutual_excludes(value: str, target: Set[str], other: Set[str]) -> None:

View File

@ -368,9 +368,7 @@ class Link(KeyBasedCompareMixin):
else:
rp = ""
if self.comes_from:
return "{} (from {}){}".format(
redact_auth_from_url(self._url), self.comes_from, rp
)
return f"{redact_auth_from_url(self._url)} (from {self.comes_from}){rp}"
else:
return redact_auth_from_url(str(self._url))

View File

@ -42,7 +42,7 @@ def _prepare_download(
logged_url = redact_auth_from_url(url)
if total_length:
logged_url = "{} ({})".format(logged_url, format_size(total_length))
logged_url = f"{logged_url} ({format_size(total_length)})"
if is_from_cache(resp):
logger.info("Using cached %s", logged_url)

View File

@ -164,16 +164,14 @@ def message_about_scripts_not_on_PATH(scripts: Sequence[str]) -> Optional[str]:
for parent_dir, dir_scripts in warn_for.items():
sorted_scripts: List[str] = sorted(dir_scripts)
if len(sorted_scripts) == 1:
start_text = "script {} is".format(sorted_scripts[0])
start_text = f"script {sorted_scripts[0]} is"
else:
start_text = "scripts {} are".format(
", ".join(sorted_scripts[:-1]) + " and " + sorted_scripts[-1]
)
msg_lines.append(
"The {} installed in '{}' which is not on PATH.".format(
start_text, parent_dir
)
f"The {start_text} installed in '{parent_dir}' which is not on PATH."
)
last_line_fmt = (
@ -321,9 +319,7 @@ def get_console_script_specs(console: Dict[str, str]) -> List[str]:
scripts_to_generate.append("pip = " + pip_script)
if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall":
scripts_to_generate.append(
"pip{} = {}".format(sys.version_info[0], pip_script)
)
scripts_to_generate.append(f"pip{sys.version_info[0]} = {pip_script}")
scripts_to_generate.append(f"pip{get_major_minor_version()} = {pip_script}")
# Delete any other versioned pip entry points
@ -336,9 +332,7 @@ def get_console_script_specs(console: Dict[str, str]) -> List[str]:
scripts_to_generate.append("easy_install = " + easy_install_script)
scripts_to_generate.append(
"easy_install-{} = {}".format(
get_major_minor_version(), easy_install_script
)
f"easy_install-{get_major_minor_version()} = {easy_install_script}"
)
# Delete any other versioned easy_install entry points
easy_install_ep = [
@ -408,10 +402,10 @@ class ScriptFile:
class MissingCallableSuffix(InstallationError):
def __init__(self, entry_point: str) -> None:
super().__init__(
"Invalid script entry point: {} - A callable "
f"Invalid script entry point: {entry_point} - A callable "
"suffix is required. Cf https://packaging.python.org/"
"specifications/entry-points/#use-for-scripts for more "
"information.".format(entry_point)
"information."
)
@ -712,7 +706,7 @@ def req_error_context(req_description: str) -> Generator[None, None, None]:
try:
yield
except InstallationError as e:
message = "For req: {}. {}".format(req_description, e.args[0])
message = f"For req: {req_description}. {e.args[0]}"
raise InstallationError(message) from e

View File

@ -603,8 +603,8 @@ class RequirementPreparer:
)
except NetworkConnectionError as exc:
raise InstallationError(
"Could not install requirement {} because of HTTP "
"error {} for URL {}".format(req, exc, link)
f"Could not install requirement {req} because of HTTP "
f"error {exc} for URL {link}"
)
else:
file_path = self._downloaded[link.url]
@ -684,9 +684,9 @@ class RequirementPreparer:
with indent_log():
if self.require_hashes:
raise InstallationError(
"The editable requirement {} cannot be installed when "
f"The editable requirement {req} cannot be installed when "
"requiring hashes, because there is no single file to "
"hash.".format(req)
"hash."
)
req.ensure_has_source_dir(self.src_dir)
req.update_editable()
@ -714,7 +714,7 @@ class RequirementPreparer:
assert req.satisfied_by, "req should have been satisfied but isn't"
assert skip_reason is not None, (
"did not get skip reason skipped but req.satisfied_by "
"is set to {}".format(req.satisfied_by)
f"is set to {req.satisfied_by}"
)
logger.info(
"Requirement %s: %s (%s)", skip_reason, req, req.satisfied_by.version

View File

@ -462,7 +462,7 @@ def install_req_from_req_string(
raise InstallationError(
"Packages installed from PyPI cannot depend on packages "
"which are not also hosted on PyPI.\n"
"{} depends on {} ".format(comes_from.name, req)
f"{comes_from.name} depends on {req} "
)
return InstallRequirement(

View File

@ -191,7 +191,7 @@ class InstallRequirement:
if self.req:
s = redact_auth_from_requirement(self.req)
if self.link:
s += " from {}".format(redact_auth_from_url(self.link.url))
s += f" from {redact_auth_from_url(self.link.url)}"
elif self.link:
s = redact_auth_from_url(self.link.url)
else:
@ -221,7 +221,7 @@ class InstallRequirement:
attributes = vars(self)
names = sorted(attributes)
state = ("{}={!r}".format(attr, attributes[attr]) for attr in sorted(names))
state = (f"{attr}={attributes[attr]!r}" for attr in sorted(names))
return "<{name} object: {{{state}}}>".format(
name=self.__class__.__name__,
state=", ".join(state),
@ -754,8 +754,8 @@ class InstallRequirement:
if os.path.exists(archive_path):
response = ask_path_exists(
"The file {} exists. (i)gnore, (w)ipe, "
"(b)ackup, (a)bort ".format(display_path(archive_path)),
f"The file {display_path(archive_path)} exists. (i)gnore, (w)ipe, "
"(b)ackup, (a)bort ",
("i", "w", "b", "a"),
)
if response == "i":

View File

@ -71,16 +71,16 @@ def uninstallation_paths(dist: BaseDistribution) -> Generator[str, None, None]:
entries = dist.iter_declared_entries()
if entries is None:
msg = "Cannot uninstall {dist}, RECORD file not found.".format(dist=dist)
msg = f"Cannot uninstall {dist}, RECORD file not found."
installer = dist.installer
if not installer or installer == "pip":
dep = "{}=={}".format(dist.raw_name, dist.version)
dep = f"{dist.raw_name}=={dist.version}"
msg += (
" You might be able to recover from this via: "
"'pip install --force-reinstall --no-deps {}'.".format(dep)
f"'pip install --force-reinstall --no-deps {dep}'."
)
else:
msg += " Hint: The package was installed by {}.".format(installer)
msg += f" Hint: The package was installed by {installer}."
raise UninstallationError(msg)
for entry in entries:

View File

@ -231,9 +231,7 @@ class Resolver(BaseResolver):
tags = compatibility_tags.get_supported()
if requirement_set.check_supported_wheels and not wheel.supported(tags):
raise InstallationError(
"{} is not a supported wheel on this platform.".format(
wheel.filename
)
f"{wheel.filename} is not a supported wheel on this platform."
)
# This next bit is really a sanity check.
@ -287,9 +285,9 @@ class Resolver(BaseResolver):
)
if does_not_satisfy_constraint:
raise InstallationError(
"Could not satisfy constraints for '{}': "
f"Could not satisfy constraints for '{install_req.name}': "
"installation from path or url cannot be "
"constrained to a version".format(install_req.name)
"constrained to a version"
)
# If we're now installing a constraint, mark the existing
# object for real installation.
@ -398,9 +396,9 @@ class Resolver(BaseResolver):
# "UnicodeEncodeError: 'ascii' codec can't encode character"
# in Python 2 when the reason contains non-ascii characters.
"The candidate selected for download or install is a "
"yanked version: {candidate}\n"
"Reason for being yanked: {reason}"
).format(candidate=best_candidate, reason=reason)
f"yanked version: {best_candidate}\n"
f"Reason for being yanked: {reason}"
)
logger.warning(msg)
return link

View File

@ -159,10 +159,7 @@ class _InstallRequirementBackedCandidate(Candidate):
return f"{self.name} {self.version}"
def __repr__(self) -> str:
return "{class_name}({link!r})".format(
class_name=self.__class__.__name__,
link=str(self._link),
)
return f"{self.__class__.__name__}({str(self._link)!r})"
def __hash__(self) -> int:
return hash((self.__class__, self._link))
@ -354,10 +351,7 @@ class AlreadyInstalledCandidate(Candidate):
return str(self.dist)
def __repr__(self) -> str:
return "{class_name}({distribution!r})".format(
class_name=self.__class__.__name__,
distribution=self.dist,
)
return f"{self.__class__.__name__}({self.dist!r})"
def __hash__(self) -> int:
return hash((self.__class__, self.name, self.version))
@ -455,11 +449,7 @@ class ExtrasCandidate(Candidate):
return "{}[{}] {}".format(name, ",".join(self.extras), rest)
def __repr__(self) -> str:
return "{class_name}(base={base!r}, extras={extras!r})".format(
class_name=self.__class__.__name__,
base=self.base,
extras=self.extras,
)
return f"{self.__class__.__name__}(base={self.base!r}, extras={self.extras!r})"
def __hash__(self) -> int:
return hash((self.base, self.extras))

View File

@ -753,8 +753,8 @@ class Factory:
info = "the requested packages"
msg = (
"Cannot install {} because these package versions "
"have conflicting dependencies.".format(info)
f"Cannot install {info} because these package versions "
"have conflicting dependencies."
)
logger.critical(msg)
msg = "\nThe conflict is caused by:"

View File

@ -15,10 +15,7 @@ class ExplicitRequirement(Requirement):
return str(self.candidate)
def __repr__(self) -> str:
return "{class_name}({candidate!r})".format(
class_name=self.__class__.__name__,
candidate=self.candidate,
)
return f"{self.__class__.__name__}({self.candidate!r})"
@property
def project_name(self) -> NormalizedName:
@ -50,10 +47,7 @@ class SpecifierRequirement(Requirement):
return str(self._ireq.req)
def __repr__(self) -> str:
return "{class_name}({requirement!r})".format(
class_name=self.__class__.__name__,
requirement=str(self._ireq.req),
)
return f"{self.__class__.__name__}({str(self._ireq.req)!r})"
@property
def project_name(self) -> NormalizedName:
@ -116,10 +110,7 @@ class RequiresPythonRequirement(Requirement):
return f"Python {self.specifier}"
def __repr__(self) -> str:
return "{class_name}({specifier!r})".format(
class_name=self.__class__.__name__,
specifier=str(self.specifier),
)
return f"{self.__class__.__name__}({str(self.specifier)!r})"
@property
def project_name(self) -> NormalizedName:
@ -155,10 +146,7 @@ class UnsatisfiableRequirement(Requirement):
return f"{self._name} (unavailable)"
def __repr__(self) -> str:
return "{class_name}({name!r})".format(
class_name=self.__class__.__name__,
name=str(self._name),
)
return f"{self.__class__.__name__}({str(self._name)!r})"
@property
def project_name(self) -> NormalizedName:

View File

@ -77,11 +77,7 @@ def get_pip_version() -> str:
pip_pkg_dir = os.path.join(os.path.dirname(__file__), "..", "..")
pip_pkg_dir = os.path.abspath(pip_pkg_dir)
return "pip {} from {} (python {})".format(
__version__,
pip_pkg_dir,
get_major_minor_version(),
)
return f"pip {__version__} from {pip_pkg_dir} (python {get_major_minor_version()})"
def normalize_version_info(py_version_info: Tuple[int, ...]) -> Tuple[int, int, int]:
@ -279,13 +275,13 @@ def strtobool(val: str) -> int:
def format_size(bytes: float) -> str:
if bytes > 1000 * 1000:
return "{:.1f} MB".format(bytes / 1000.0 / 1000)
return f"{bytes / 1000.0 / 1000:.1f} MB"
elif bytes > 10 * 1000:
return "{} kB".format(int(bytes / 1000))
return f"{int(bytes / 1000)} kB"
elif bytes > 1000:
return "{:.1f} kB".format(bytes / 1000.0)
return f"{bytes / 1000.0:.1f} kB"
else:
return "{} bytes".format(int(bytes))
return f"{int(bytes)} bytes"
def tabulate(rows: Iterable[Iterable[Any]]) -> Tuple[List[str], List[int]]:
@ -522,9 +518,7 @@ def redact_netloc(netloc: str) -> str:
else:
user = urllib.parse.quote(user)
password = ":****"
return "{user}{password}@{netloc}".format(
user=user, password=password, netloc=netloc
)
return f"{user}{password}@{netloc}"
def _transform_url(
@ -592,7 +586,7 @@ class HiddenText:
self.redacted = redacted
def __repr__(self) -> str:
return "<HiddenText {!r}>".format(str(self))
return f"<HiddenText {str(self)!r}>"
def __str__(self) -> str:
return self.redacted

View File

@ -28,7 +28,7 @@ def parse_wheel(wheel_zip: ZipFile, name: str) -> Tuple[str, Message]:
metadata = wheel_metadata(wheel_zip, info_dir)
version = wheel_version(metadata)
except UnsupportedWheel as e:
raise UnsupportedWheel("{} has an invalid wheel, {}".format(name, str(e)))
raise UnsupportedWheel(f"{name} has an invalid wheel, {str(e)}")
check_compatibility(version, name)
@ -60,9 +60,7 @@ def wheel_dist_info_dir(source: ZipFile, name: str) -> str:
canonical_name = canonicalize_name(name)
if not info_dir_name.startswith(canonical_name):
raise UnsupportedWheel(
".dist-info directory {!r} does not start with {!r}".format(
info_dir, canonical_name
)
f".dist-info directory {info_dir!r} does not start with {canonical_name!r}"
)
return info_dir

View File

@ -405,9 +405,9 @@ class VersionControl:
scheme, netloc, path, query, frag = urllib.parse.urlsplit(url)
if "+" not in scheme:
raise ValueError(
"Sorry, {!r} is a malformed VCS url. "
f"Sorry, {url!r} is a malformed VCS url. "
"The format is <vcs>+<protocol>://<url>, "
"e.g. svn+http://myrepo/svn/MyApp#egg=MyApp".format(url)
"e.g. svn+http://myrepo/svn/MyApp#egg=MyApp"
)
# Remove the vcs prefix.
scheme = scheme.split("+", 1)[1]
@ -417,9 +417,9 @@ class VersionControl:
path, rev = path.rsplit("@", 1)
if not rev:
raise InstallationError(
"The URL {!r} has an empty revision (after @) "
f"The URL {url!r} has an empty revision (after @) "
"which is not supported. Include a revision after @ "
"or remove @ from the URL.".format(url)
"or remove @ from the URL."
)
url = urllib.parse.urlunsplit((scheme, netloc, path, query, ""))
return url, rev, user_pass
@ -566,7 +566,7 @@ class VersionControl:
self.name,
url,
)
response = ask_path_exists("What to do? {}".format(prompt[0]), prompt[1])
response = ask_path_exists(f"What to do? {prompt[0]}", prompt[1])
if response == "a":
sys.exit(-1)

View File

@ -140,15 +140,15 @@ def _verify_one(req: InstallRequirement, wheel_path: str) -> None:
w = Wheel(os.path.basename(wheel_path))
if canonicalize_name(w.name) != canonical_name:
raise InvalidWheelFilename(
"Wheel has unexpected file name: expected {!r}, "
"got {!r}".format(canonical_name, w.name),
f"Wheel has unexpected file name: expected {canonical_name!r}, "
f"got {w.name!r}",
)
dist = get_wheel_distribution(FilesystemWheel(wheel_path), canonical_name)
dist_verstr = str(dist.version)
if canonicalize_version(dist_verstr) != canonicalize_version(w.version):
raise InvalidWheelFilename(
"Wheel has unexpected file name: expected {!r}, "
"got {!r}".format(dist_verstr, w.version),
f"Wheel has unexpected file name: expected {dist_verstr!r}, "
f"got {w.version!r}",
)
metadata_version_value = dist.metadata_version
if metadata_version_value is None:
@ -160,8 +160,7 @@ def _verify_one(req: InstallRequirement, wheel_path: str) -> None:
raise UnsupportedWheel(msg)
if metadata_version >= Version("1.2") and not isinstance(dist.version, Version):
raise UnsupportedWheel(
"Metadata 1.2 mandates PEP 440 version, "
"but {!r} is not".format(dist_verstr)
f"Metadata 1.2 mandates PEP 440 version, but {dist_verstr!r} is not"
)

View File

@ -141,7 +141,7 @@ def pytest_collection_modifyitems(config: Config, items: List[pytest.Function])
if "script" in item.fixturenames:
raise RuntimeError(
"Cannot use the ``script`` funcarg in a unit test: "
"(filename = {}, item = {})".format(module_path, item)
f"(filename = {module_path}, item = {item})"
)
else:
raise RuntimeError(f"Unknown test type (filename = {module_path})")

View File

@ -23,7 +23,7 @@ def test_entrypoints_work(entrypoint: str, script: PipTestEnvironment) -> None:
fake_pkg.mkdir()
fake_pkg.joinpath("setup.py").write_text(
dedent(
"""
f"""
from setuptools import setup
setup(
@ -31,13 +31,11 @@ def test_entrypoints_work(entrypoint: str, script: PipTestEnvironment) -> None:
version="0.1.0",
entry_points={{
"console_scripts": [
{!r}
{entrypoint!r}
]
}}
)
""".format(
entrypoint
)
"""
)
)

View File

@ -400,7 +400,7 @@ def test_completion_path_after_option(
def test_completion_uses_same_executable_name(
autocomplete_script: PipTestEnvironment, flag: str, deprecated_python: bool
) -> None:
executable_name = "pip{}".format(sys.version_info[0])
executable_name = f"pip{sys.version_info[0]}"
# Deprecated python versions produce an extra deprecation warning
result = autocomplete_script.run(
executable_name,

View File

@ -68,7 +68,7 @@ def test_debug__tags(script: PipTestEnvironment, args: List[str]) -> None:
stdout = result.stdout
tags = compatibility_tags.get_supported()
expected_tag_header = "Compatible tags: {}".format(len(tags))
expected_tag_header = f"Compatible tags: {len(tags)}"
assert expected_tag_header in stdout
show_verbose_note = "--verbose" not in args

View File

@ -166,13 +166,11 @@ def test_freeze_with_invalid_names(script: PipTestEnvironment) -> None:
with open(egg_info_path, "w") as egg_info_file:
egg_info_file.write(
textwrap.dedent(
"""\
f"""\
Metadata-Version: 1.0
Name: {}
Name: {pkgname}
Version: 1.0
""".format(
pkgname
)
"""
)
)
@ -221,12 +219,10 @@ def test_freeze_editable_not_vcs(script: PipTestEnvironment) -> None:
# We need to apply os.path.normcase() to the path since that is what
# the freeze code does.
expected = textwrap.dedent(
"""\
f"""\
...# Editable install with no version control (version-pkg==0.1)
-e {}
...""".format(
os.path.normcase(pkg_path)
)
-e {os.path.normcase(pkg_path)}
..."""
)
_check_output(result.stdout, expected)
@ -248,12 +244,10 @@ def test_freeze_editable_git_with_no_remote(
# We need to apply os.path.normcase() to the path since that is what
# the freeze code does.
expected = textwrap.dedent(
"""\
f"""\
...# Editable Git install with no remote (version-pkg==0.1)
-e {}
...""".format(
os.path.normcase(pkg_path)
)
-e {os.path.normcase(pkg_path)}
..."""
)
_check_output(result.stdout, expected)
@ -653,9 +647,9 @@ def test_freeze_with_requirement_option_file_url_egg_not_installed(
expect_stderr=True,
)
expected_err = (
"WARNING: Requirement file [requirements.txt] contains {}, "
f"WARNING: Requirement file [requirements.txt] contains {url}, "
"but package 'Does.Not-Exist' is not installed\n"
).format(url)
)
if deprecated_python:
assert expected_err in result.stderr
else:

View File

@ -106,10 +106,10 @@ def test_pep518_refuses_conflicting_requires(
assert (
result.returncode != 0
and (
"Some build dependencies for {url} conflict "
f"Some build dependencies for {project_dir.as_uri()} conflict "
"with PEP 517/518 supported "
"requirements: setuptools==1.0 is incompatible with "
"setuptools>=40.8.0.".format(url=project_dir.as_uri())
"setuptools>=40.8.0."
)
in result.stderr
), str(result)
@ -595,8 +595,8 @@ def test_hashed_install_success(
with requirements_file(
"simple2==1.0 --hash=sha256:9336af72ca661e6336eb87bc7de3e8844d853e"
"3848c2b9bbd2e8bf01db88c2c7\n"
"{simple} --hash=sha256:393043e672415891885c9a2a0929b1af95fb866d6c"
"a016b42d2e6ce53619b653".format(simple=file_url),
f"{file_url} --hash=sha256:393043e672415891885c9a2a0929b1af95fb866d6c"
"a016b42d2e6ce53619b653",
tmpdir,
) as reqs_file:
script.pip_install_local("-r", reqs_file.resolve())
@ -1735,7 +1735,7 @@ def test_install_builds_wheels(script: PipTestEnvironment, data: TestData) -> No
# into the cache
assert wheels != [], str(res)
assert wheels == [
"Upper-2.0-py{}-none-any.whl".format(sys.version_info[0]),
f"Upper-2.0-py{sys.version_info[0]}-none-any.whl",
]
@ -2387,7 +2387,7 @@ def test_install_verify_package_name_normalization(
assert "Successfully installed simple-package" in result.stdout
result = script.pip("install", package_name)
assert "Requirement already satisfied: {}".format(package_name) in result.stdout
assert f"Requirement already satisfied: {package_name}" in result.stdout
def test_install_logs_pip_version_in_debug(

View File

@ -184,12 +184,10 @@ def test_config_file_override_stack(
config_file.write_text(
textwrap.dedent(
"""\
f"""\
[global]
index-url = {}/simple1
""".format(
base_address
)
index-url = {base_address}/simple1
"""
)
)
script.pip("install", "-vvv", "INITools", expect_error=True)
@ -197,14 +195,12 @@ def test_config_file_override_stack(
config_file.write_text(
textwrap.dedent(
"""\
f"""\
[global]
index-url = {address}/simple1
index-url = {base_address}/simple1
[install]
index-url = {address}/simple2
""".format(
address=base_address
)
index-url = {base_address}/simple2
"""
)
)
script.pip("install", "-vvv", "INITools", expect_error=True)

View File

@ -41,13 +41,11 @@ def test_find_links_requirements_file_relative_path(
"""Test find-links as a relative path to a reqs file."""
script.scratch_path.joinpath("test-req.txt").write_text(
textwrap.dedent(
"""
f"""
--no-index
--find-links={}
--find-links={data.packages.as_posix()}
parent==0.1
""".format(
data.packages.as_posix()
)
"""
)
)
result = script.pip(

View File

@ -95,7 +95,7 @@ def test_requirements_file(script: PipTestEnvironment) -> None:
result.did_create(script.site_packages / "INITools-0.2.dist-info")
result.did_create(script.site_packages / "initools")
assert result.files_created[script.site_packages / other_lib_name].dir
fn = "{}-{}.dist-info".format(other_lib_name, other_lib_version)
fn = f"{other_lib_name}-{other_lib_version}.dist-info"
assert result.files_created[script.site_packages / fn].dir
@ -260,13 +260,13 @@ def test_respect_order_in_requirements_file(
assert (
"parent" in downloaded[0]
), 'First download should be "parent" but was "{}"'.format(downloaded[0])
), f'First download should be "parent" but was "{downloaded[0]}"'
assert (
"child" in downloaded[1]
), 'Second download should be "child" but was "{}"'.format(downloaded[1])
), f'Second download should be "child" but was "{downloaded[1]}"'
assert (
"simple" in downloaded[2]
), 'Third download should be "simple" but was "{}"'.format(downloaded[2])
), f'Third download should be "simple" but was "{downloaded[2]}"'
def test_install_local_editable_with_extras(

View File

@ -169,9 +169,9 @@ def get_header_scheme_path_for_script(
) -> Path:
command = (
"from pip._internal.locations import get_scheme;"
"scheme = get_scheme({!r});"
f"scheme = get_scheme({dist_name!r});"
"print(scheme.headers);"
).format(dist_name)
)
result = script.run("python", "-c", command).stdout
return Path(result.strip())

View File

@ -1185,7 +1185,7 @@ def test_new_resolver_presents_messages_when_backtracking_a_lot(
for index in range(1, N + 1):
A_version = f"{index}.0.0"
B_version = f"{index}.0.0"
C_version = "{index_minus_one}.0.0".format(index_minus_one=index - 1)
C_version = f"{index - 1}.0.0"
depends = ["B == " + B_version]
if index != 1:

View File

@ -71,8 +71,8 @@ def test_new_resolver_conflict_constraints_file(
def test_new_resolver_requires_python_error(script: PipTestEnvironment) -> None:
compatible_python = ">={0.major}.{0.minor}".format(sys.version_info)
incompatible_python = "<{0.major}.{0.minor}".format(sys.version_info)
compatible_python = f">={sys.version_info.major}.{sys.version_info.minor}"
incompatible_python = f"<{sys.version_info.major}.{sys.version_info.minor}"
pkga = create_test_package_with_setup(
script,
@ -99,7 +99,7 @@ def test_new_resolver_requires_python_error(script: PipTestEnvironment) -> None:
def test_new_resolver_checks_requires_python_before_dependencies(
script: PipTestEnvironment,
) -> None:
incompatible_python = "<{0.major}.{0.minor}".format(sys.version_info)
incompatible_python = f"<{sys.version_info.major}.{sys.version_info.minor}"
pkg_dep = create_basic_wheel_for_package(
script,

View File

@ -24,18 +24,11 @@ def _create_find_links(script: PipTestEnvironment) -> _FindLinks:
index_html = script.scratch_path / "index.html"
index_html.write_text(
"""
f"""
<!DOCTYPE html>
<a href="{sdist_url}#sha256={sdist_hash}">{sdist_path.stem}</a>
<a href="{wheel_url}#sha256={wheel_hash}">{wheel_path.stem}</a>
""".format(
sdist_url=sdist_path.as_uri(),
sdist_hash=sdist_hash,
sdist_path=sdist_path,
wheel_url=wheel_path.as_uri(),
wheel_hash=wheel_hash,
wheel_path=wheel_path,
).strip()
<a href="{sdist_path.as_uri()}#sha256={sdist_hash}">{sdist_path.stem}</a>
<a href="{wheel_path.as_uri()}#sha256={wheel_hash}">{wheel_path.stem}</a>
""".strip()
)
return _FindLinks(index_html, sdist_hash, wheel_hash)
@ -99,9 +92,7 @@ def test_new_resolver_hash_intersect_from_constraint(
constraints_txt = script.scratch_path / "constraints.txt"
constraints_txt.write_text(
"base==0.1.0 --hash=sha256:{sdist_hash}".format(
sdist_hash=find_links.sdist_hash,
),
f"base==0.1.0 --hash=sha256:{find_links.sdist_hash}",
)
requirements_txt = script.scratch_path / "requirements.txt"
requirements_txt.write_text(
@ -200,13 +191,10 @@ def test_new_resolver_hash_intersect_empty_from_constraint(
constraints_txt = script.scratch_path / "constraints.txt"
constraints_txt.write_text(
"""
base==0.1.0 --hash=sha256:{sdist_hash}
base==0.1.0 --hash=sha256:{wheel_hash}
""".format(
sdist_hash=find_links.sdist_hash,
wheel_hash=find_links.wheel_hash,
),
f"""
base==0.1.0 --hash=sha256:{find_links.sdist_hash}
base==0.1.0 --hash=sha256:{find_links.wheel_hash}
""",
)
result = script.pip(
@ -240,19 +228,15 @@ def test_new_resolver_hash_requirement_and_url_constraint_can_succeed(
requirements_txt = script.scratch_path / "requirements.txt"
requirements_txt.write_text(
"""
f"""
base==0.1.0 --hash=sha256:{wheel_hash}
""".format(
wheel_hash=wheel_hash,
),
""",
)
constraints_txt = script.scratch_path / "constraints.txt"
constraint_text = "base @ {wheel_url}\n".format(wheel_url=wheel_path.as_uri())
constraint_text = f"base @ {wheel_path.as_uri()}\n"
if constrain_by_hash:
constraint_text += "base==0.1.0 --hash=sha256:{wheel_hash}\n".format(
wheel_hash=wheel_hash,
)
constraint_text += f"base==0.1.0 --hash=sha256:{wheel_hash}\n"
constraints_txt.write_text(constraint_text)
script.pip(
@ -280,19 +264,15 @@ def test_new_resolver_hash_requirement_and_url_constraint_can_fail(
requirements_txt = script.scratch_path / "requirements.txt"
requirements_txt.write_text(
"""
f"""
base==0.1.0 --hash=sha256:{other_hash}
""".format(
other_hash=other_hash,
),
""",
)
constraints_txt = script.scratch_path / "constraints.txt"
constraint_text = "base @ {wheel_url}\n".format(wheel_url=wheel_path.as_uri())
constraint_text = f"base @ {wheel_path.as_uri()}\n"
if constrain_by_hash:
constraint_text += "base==0.1.0 --hash=sha256:{other_hash}\n".format(
other_hash=other_hash,
)
constraint_text += f"base==0.1.0 --hash=sha256:{other_hash}\n"
constraints_txt.write_text(constraint_text)
result = script.pip(
@ -343,17 +323,12 @@ def test_new_resolver_hash_with_extras(script: PipTestEnvironment) -> None:
requirements_txt = script.scratch_path / "requirements.txt"
requirements_txt.write_text(
"""
f"""
child[extra]==0.1.0 --hash=sha256:{child_hash}
parent_with_extra==0.1.0 --hash=sha256:{parent_with_extra_hash}
parent_without_extra==0.1.0 --hash=sha256:{parent_without_extra_hash}
extra==0.1.0 --hash=sha256:{extra_hash}
""".format(
child_hash=child_hash,
parent_with_extra_hash=parent_with_extra_hash,
parent_without_extra_hash=parent_without_extra_hash,
extra_hash=extra_hash,
),
""",
)
script.pip(

View File

@ -58,12 +58,7 @@ def test_new_resolver_target_checks_compatibility_failure(
if platform:
args += ["--platform", platform]
args_tag = "{}{}-{}-{}".format(
implementation,
python_version,
abi,
platform,
)
args_tag = f"{implementation}{python_version}-{abi}-{platform}"
wheel_tag_matches = args_tag == fake_wheel_tag
result = script.pip(*args, expect_error=(not wheel_tag_matches))

View File

@ -159,9 +159,9 @@ def test_conflicting_pep517_backend_requirements(
expect_error=True,
)
msg = (
"Some build dependencies for {url} conflict with the backend "
f"Some build dependencies for {project_dir.as_uri()} conflict with the backend "
"dependencies: simplewheel==1.0 is incompatible with "
"simplewheel==2.0.".format(url=project_dir.as_uri())
"simplewheel==2.0."
)
assert result.returncode != 0 and msg in result.stderr, str(result)
@ -205,8 +205,8 @@ def test_validate_missing_pep517_backend_requirements(
expect_error=True,
)
msg = (
"Some build dependencies for {url} are missing: "
"'simplewheel==1.0', 'test_backend'.".format(url=project_dir.as_uri())
f"Some build dependencies for {project_dir.as_uri()} are missing: "
"'simplewheel==1.0', 'test_backend'."
)
assert result.returncode != 0 and msg in result.stderr, str(result)
@ -231,9 +231,9 @@ def test_validate_conflicting_pep517_backend_requirements(
expect_error=True,
)
msg = (
"Some build dependencies for {url} conflict with the backend "
f"Some build dependencies for {project_dir.as_uri()} conflict with the backend "
"dependencies: simplewheel==2.0 is incompatible with "
"simplewheel==1.0.".format(url=project_dir.as_uri())
"simplewheel==1.0."
)
assert result.returncode != 0 and msg in result.stderr, str(result)

View File

@ -604,9 +604,7 @@ def test_uninstall_without_record_fails(
"simple.dist==0.1'."
)
elif installer:
expected_error_message += " Hint: The package was installed by {}.".format(
installer
)
expected_error_message += f" Hint: The package was installed by {installer}."
assert result2.stderr.rstrip() == expected_error_message
assert_all_changes(result.files_after, result2, ignore_changes)

View File

@ -59,9 +59,7 @@ def test_pip_wheel_success(script: PipTestEnvironment, data: TestData) -> None:
wheel_file_path = script.scratch / wheel_file_name
assert re.search(
r"Created wheel for simple: "
r"filename={filename} size=\d+ sha256=[A-Fa-f0-9]{{64}}".format(
filename=re.escape(wheel_file_name)
),
rf"filename={re.escape(wheel_file_name)} size=\d+ sha256=[A-Fa-f0-9]{{64}}",
result.stdout,
)
assert re.search(r"^\s+Stored in directory: ", result.stdout, re.M)

View File

@ -747,7 +747,7 @@ class PipTestEnvironment(TestFileEnvironment):
for val in json.loads(ret.stdout)
}
expected = {(canonicalize_name(k), v) for k, v in kwargs.items()}
assert expected <= installed, "{!r} not all in {!r}".format(expected, installed)
assert expected <= installed, f"{expected!r} not all in {installed!r}"
def assert_not_installed(self, *args: str) -> None:
ret = self.pip("list", "--format=json")
@ -755,9 +755,7 @@ class PipTestEnvironment(TestFileEnvironment):
# None of the given names should be listed as installed, i.e. their
# intersection should be empty.
expected = {canonicalize_name(k) for k in args}
assert not (expected & installed), "{!r} contained in {!r}".format(
expected, installed
)
assert not (expected & installed), f"{expected!r} contained in {installed!r}"
# FIXME ScriptTest does something similar, but only within a single
@ -1028,7 +1026,7 @@ def _create_test_package_with_srcdir(
pkg_path.joinpath("__init__.py").write_text("")
subdir_path.joinpath("setup.py").write_text(
textwrap.dedent(
"""
f"""
from setuptools import setup, find_packages
setup(
name="{name}",
@ -1036,9 +1034,7 @@ def _create_test_package_with_srcdir(
packages=find_packages(),
package_dir={{"": "src"}},
)
""".format(
name=name
)
"""
)
)
return _vcs_add(dir_path, version_pkg_path, vcs)
@ -1052,7 +1048,7 @@ def _create_test_package(
_create_main_file(version_pkg_path, name=name, output="0.1")
version_pkg_path.joinpath("setup.py").write_text(
textwrap.dedent(
"""
f"""
from setuptools import setup, find_packages
setup(
name="{name}",
@ -1061,9 +1057,7 @@ def _create_test_package(
py_modules=["{name}"],
entry_points=dict(console_scripts=["{name}={name}:main"]),
)
""".format(
name=name
)
"""
)
)
return _vcs_add(dir_path, version_pkg_path, vcs)
@ -1137,7 +1131,7 @@ def urlsafe_b64encode_nopad(data: bytes) -> str:
def create_really_basic_wheel(name: str, version: str) -> bytes:
def digest(contents: bytes) -> str:
return "sha256={}".format(urlsafe_b64encode_nopad(sha256(contents).digest()))
return f"sha256={urlsafe_b64encode_nopad(sha256(contents).digest())}"
def add_file(path: str, text: str) -> None:
contents = text.encode("utf-8")
@ -1153,13 +1147,11 @@ def create_really_basic_wheel(name: str, version: str) -> bytes:
add_file(
f"{dist_info}/METADATA",
dedent(
"""\
f"""\
Metadata-Version: 2.1
Name: {}
Version: {}
""".format(
name, version
)
Name: {name}
Version: {version}
"""
),
)
z.writestr(record_path, "\n".join(",".join(r) for r in records))

View File

@ -56,7 +56,7 @@ def local_checkout(
assert vcs_backend is not None
vcs_backend.obtain(repo_url_path, url=hide_url(remote_repo), verbosity=0)
return "{}+{}".format(vcs_name, Path(repo_url_path).as_uri())
return f"{vcs_name}+{Path(repo_url_path).as_uri()}"
def local_repo(remote_repo: str, temp_path: Path) -> str:

View File

@ -152,7 +152,7 @@ def html5_page(text: str) -> str:
def package_page(spec: Dict[str, str]) -> "WSGIApplication":
def link(name: str, value: str) -> str:
return '<a href="{}">{}</a>'.format(value, name)
return f'<a href="{value}">{name}</a>'
links = "".join(link(*kv) for kv in spec.items())
return text_html_response(html5_page(links))

View File

@ -107,8 +107,8 @@ class TestPipTestEnvironment:
"""
command = (
"import logging; logging.basicConfig(level='INFO'); "
"logging.getLogger().info('sub: {}', 'foo')"
).format(sub_string)
f"logging.getLogger().info('sub: {sub_string}', 'foo')"
)
args = [sys.executable, "-c", command]
script.run(*args, **kwargs)

View File

@ -190,7 +190,7 @@ def urlsafe_b64encode_nopad(data: bytes) -> str:
def digest(contents: bytes) -> str:
return "sha256={}".format(urlsafe_b64encode_nopad(sha256(contents).digest()))
return f"sha256={urlsafe_b64encode_nopad(sha256(contents).digest())}"
def record_file_maker_wrapper(

View File

@ -119,8 +119,8 @@ def test_get_index_content_invalid_content_type_archive(
assert (
"pip._internal.index.collector",
logging.WARNING,
"Skipping page {} because it looks like an archive, and cannot "
"be checked by a HTTP HEAD request.".format(url),
f"Skipping page {url} because it looks like an archive, and cannot "
"be checked by a HTTP HEAD request.",
) in caplog.record_tuples
@ -417,8 +417,8 @@ def _test_parse_links_data_attribute(
html = (
"<!DOCTYPE html>"
'<html><head><meta charset="utf-8"><head>'
"<body>{}</body></html>"
).format(anchor_html)
f"<body>{anchor_html}</body></html>"
)
html_bytes = html.encode("utf-8")
page = IndexContent(
html_bytes,
@ -764,8 +764,8 @@ def test_get_index_content_invalid_scheme(
(
"pip._internal.index.collector",
logging.WARNING,
"Cannot look at {} URL {} because it does not support "
"lookup as web pages.".format(vcs_scheme, url),
f"Cannot look at {vcs_scheme} URL {url} because it does not support "
"lookup as web pages.",
),
]

View File

@ -143,10 +143,7 @@ class TestLink:
def test_is_hash_allowed(
self, hash_name: str, hex_digest: str, expected: bool
) -> None:
url = "https://example.com/wheel.whl#{hash_name}={hex_digest}".format(
hash_name=hash_name,
hex_digest=hex_digest,
)
url = f"https://example.com/wheel.whl#{hash_name}={hex_digest}"
link = Link(url)
hashes_data = {
"sha512": [128 * "a", 128 * "b"],

View File

@ -21,8 +21,8 @@ def test_raise_for_status_raises_exception(status_code: int, error_type: str) ->
with pytest.raises(NetworkConnectionError) as excinfo:
raise_for_status(resp)
assert str(excinfo.value) == (
"{} {}: Network Error for url:"
" http://www.example.com/whatever.tgz".format(status_code, error_type)
f"{status_code} {error_type}: Network Error for url:"
" http://www.example.com/whatever.tgz"
)

View File

@ -235,8 +235,8 @@ class TestRequirementSet:
r"file \(line 1\)\)\n"
r"Can't verify hashes for these file:// requirements because "
r"they point to directories:\n"
r" file://.*{sep}data{sep}packages{sep}FSPkg "
r"\(from -r file \(line 2\)\)".format(sep=sep)
rf" file://.*{sep}data{sep}packages{sep}FSPkg "
r"\(from -r file \(line 2\)\)"
),
):
resolver.resolve(reqset.all_requirements, True)

View File

@ -297,7 +297,7 @@ class TestProcessLine:
def test_yield_line_constraint(self, line_processor: LineProcessor) -> None:
line = "SomeProject"
filename = "filename"
comes_from = "-c {} (line {})".format(filename, 1)
comes_from = f"-c {filename} (line {1})"
req = install_req_from_line(line, comes_from=comes_from, constraint=True)
found_req = line_processor(line, filename, 1, constraint=True)[0]
assert repr(found_req) == repr(req)
@ -326,7 +326,7 @@ class TestProcessLine:
url = "git+https://url#egg=SomeProject"
line = f"-e {url}"
filename = "filename"
comes_from = "-c {} (line {})".format(filename, 1)
comes_from = f"-c {filename} (line {1})"
req = install_req_from_editable(url, comes_from=comes_from, constraint=True)
found_req = line_processor(line, filename, 1, constraint=True)[0]
assert repr(found_req) == repr(req)
@ -873,12 +873,10 @@ class TestParseRequirements:
) -> None:
global_option = "--dry-run"
content = """
content = f"""
--only-binary :all:
INITools==2.0 --global-option="{global_option}"
""".format(
global_option=global_option
)
"""
with requirements_file(content, tmpdir) as reqs_file:
req = next(

View File

@ -261,8 +261,8 @@ class TestCheckDistRequiresPython:
ignore_requires_python=False,
)
assert str(exc.value) == (
"None {} metadata found for distribution: "
"<distribution 'my-project'>".format(metadata_name)
f"None {metadata_name} metadata found for distribution: "
"<distribution 'my-project'>"
)

View File

@ -102,15 +102,13 @@ def test_get_legacy_build_wheel_path__multiple_names(
],
)
def test_get_entrypoints(tmp_path: pathlib.Path, console_scripts: str) -> None:
entry_points_text = """
entry_points_text = f"""
[console_scripts]
{}
{console_scripts}
[section]
common:one = module:func
common:two = module:other_func
""".format(
console_scripts
)
"""
distribution = make_wheel(
"simple",

View File

@ -27,7 +27,7 @@ def is_this_a_good_version_number(string: str) -> Optional[str]:
expected_major = datetime.now().year % 100
if len(release) not in [2, 3]:
return "Not of the form: {0}.N or {0}.N.P".format(expected_major)
return f"Not of the form: {expected_major}.N or {expected_major}.N.P"
return None