mirror of https://github.com/pypa/pip
Merge branch 'main' into joe/warm_cache_in_threadpool
This commit is contained in:
commit
291aec8b65
|
@ -91,7 +91,7 @@ jobs:
|
|||
- run: git diff --exit-code
|
||||
|
||||
tests-unix:
|
||||
name: tests / ${{ matrix.python }} / ${{ matrix.os }}
|
||||
name: tests / ${{ matrix.python.key || matrix.python }} / ${{ matrix.os }}
|
||||
runs-on: ${{ matrix.os }}-latest
|
||||
|
||||
needs: [packaging, determine-changes]
|
||||
|
@ -109,12 +109,14 @@ jobs:
|
|||
- "3.9"
|
||||
- "3.10"
|
||||
- "3.11"
|
||||
- "3.12"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ matrix.python }}
|
||||
allow-prereleases: true
|
||||
|
||||
- name: Install Ubuntu dependencies
|
||||
if: matrix.os == 'Ubuntu'
|
||||
|
@ -129,12 +131,12 @@ jobs:
|
|||
# Main check
|
||||
- name: Run unit tests
|
||||
run: >-
|
||||
nox -s test-${{ matrix.python }} --
|
||||
nox -s test-${{ matrix.python.key || matrix.python }} --
|
||||
-m unit
|
||||
--verbose --numprocesses auto --showlocals
|
||||
- name: Run integration tests
|
||||
run: >-
|
||||
nox -s test-${{ matrix.python }} --
|
||||
nox -s test-${{ matrix.python.key || matrix.python }} --
|
||||
-m integration
|
||||
--verbose --numprocesses auto --showlocals
|
||||
--durations=5
|
||||
|
|
|
@ -17,13 +17,13 @@ repos:
|
|||
exclude: .patch
|
||||
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 23.1.0
|
||||
rev: 23.7.0
|
||||
hooks:
|
||||
- id: black
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.0.270
|
||||
rev: v0.0.287
|
||||
hooks:
|
||||
- id: ruff
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@ build:
|
|||
python: "3.11"
|
||||
|
||||
sphinx:
|
||||
builder: htmldir
|
||||
builder: dirhtml
|
||||
configuration: docs/html/conf.py
|
||||
|
||||
python:
|
||||
|
|
|
@ -19,8 +19,6 @@ We release updates regularly, with a new version every 3 months. Find more detai
|
|||
* `Release notes`_
|
||||
* `Release process`_
|
||||
|
||||
In pip 20.3, we've `made a big improvement to the heart of pip`_; `learn more`_. We want your input, so `sign up for our user experience research studies`_ to help us do it right.
|
||||
|
||||
**Note**: pip 21.0, in January 2021, removed Python 2 support, per pip's `Python 2 support policy`_. Please migrate to Python 3.
|
||||
|
||||
If you find bugs, need help, or want to talk to the developers, please use our mailing lists or chat rooms:
|
||||
|
@ -49,9 +47,6 @@ rooms, and mailing lists is expected to follow the `PSF Code of Conduct`_.
|
|||
.. _Release process: https://pip.pypa.io/en/latest/development/release-process/
|
||||
.. _GitHub page: https://github.com/pypa/pip
|
||||
.. _Development documentation: https://pip.pypa.io/en/latest/development
|
||||
.. _made a big improvement to the heart of pip: https://pyfound.blogspot.com/2020/11/pip-20-3-new-resolver.html
|
||||
.. _learn more: https://pip.pypa.io/en/latest/user_guide/#changes-to-the-pip-dependency-resolver-in-20-3-2020
|
||||
.. _sign up for our user experience research studies: https://pyfound.blogspot.com/2020/03/new-pip-resolver-to-roll-out-this-year.html
|
||||
.. _Python 2 support policy: https://pip.pypa.io/en/latest/development/release-process/#python-2-support
|
||||
.. _Issue tracking: https://github.com/pypa/pip/issues
|
||||
.. _Discourse channel: https://discuss.python.org/c/packaging
|
||||
|
|
11
SECURITY.md
11
SECURITY.md
|
@ -1,3 +1,10 @@
|
|||
# Security and Vulnerability Reporting
|
||||
# Security Policy
|
||||
|
||||
If you find any security issues, please report to [security@python.org](mailto:security@python.org)
|
||||
## Reporting a Vulnerability
|
||||
|
||||
Please read the guidelines on reporting security issues [on the
|
||||
official website](https://www.python.org/dev/security/) for
|
||||
instructions on how to report a security-related problem to
|
||||
the Python Security Response Team responsibly.
|
||||
|
||||
To reach the response team, email `security at python dot org`.
|
||||
|
|
|
@ -103,7 +103,7 @@ $ pip install --upgrade pip
|
|||
The current version of pip works on:
|
||||
|
||||
- Windows, Linux and MacOS.
|
||||
- CPython 3.7, 3.8, 3.9, 3.10 and latest PyPy3.
|
||||
- CPython 3.7, 3.8, 3.9, 3.10, 3.11, 3.12, and latest PyPy3.
|
||||
|
||||
pip is tested to work on the latest patch version of the Python interpreter,
|
||||
for each of the minor versions listed above. Previous patch versions are
|
||||
|
|
|
@ -56,6 +56,9 @@ package with the following properties:
|
|||
URL reference. `false` if the requirements was provided as a name and version
|
||||
specifier.
|
||||
|
||||
- `is_yanked`: `true` if the requirement was yanked from the index, but was still
|
||||
selected by pip conform to [PEP 592](https://peps.python.org/pep-0592/#installers).
|
||||
|
||||
- `download_info`: Information about the artifact (to be) downloaded for installation,
|
||||
using the [direct URL data
|
||||
structure](https://packaging.python.org/en/latest/specifications/direct-url-data-structure/).
|
||||
|
@ -106,6 +109,7 @@ will produce an output similar to this (metadata abriged for brevity):
|
|||
}
|
||||
},
|
||||
"is_direct": false,
|
||||
"is_yanked": false,
|
||||
"requested": true,
|
||||
"metadata": {
|
||||
"name": "pydantic",
|
||||
|
@ -133,6 +137,7 @@ will produce an output similar to this (metadata abriged for brevity):
|
|||
}
|
||||
},
|
||||
"is_direct": true,
|
||||
"is_yanked": false,
|
||||
"requested": true,
|
||||
"metadata": {
|
||||
"name": "packaging",
|
||||
|
|
|
@ -68,7 +68,7 @@ man pages][netrc-docs].
|
|||
pip supports loading credentials stored in your keyring using the
|
||||
{pypi}`keyring` library, which can be enabled py passing `--keyring-provider`
|
||||
with a value of `auto`, `disabled`, `import`, or `subprocess`. The default
|
||||
value `auto` respects `--no-input` and not query keyring at all if the option
|
||||
value `auto` respects `--no-input` and does not query keyring at all if the option
|
||||
is used; otherwise it tries the `import`, `subprocess`, and `disabled`
|
||||
providers (in this order) and uses the first one that works.
|
||||
|
||||
|
|
|
@ -8,7 +8,7 @@ and this article is intended to help readers understand what is happening
|
|||
```{note}
|
||||
This document is a work in progress. The details included are accurate (at the
|
||||
time of writing), but there is additional information, in particular around
|
||||
pip's interface with resolvelib, which have not yet been included.
|
||||
pip's interface with resolvelib, which has not yet been included.
|
||||
|
||||
Contributions to improve this document are welcome.
|
||||
```
|
||||
|
@ -26,7 +26,7 @@ The practical implication of that is that there will always be some situations
|
|||
where pip cannot determine what to install in a reasonable length of time. We
|
||||
make every effort to ensure that such situations happen rarely, but eliminating
|
||||
them altogether isn't even theoretically possible. We'll discuss what options
|
||||
yopu have if you hit a problem situation like this a little later.
|
||||
you have if you hit a problem situation like this a little later.
|
||||
|
||||
## Python specific issues
|
||||
|
||||
|
@ -136,7 +136,7 @@ operations:
|
|||
that satisfy them. This is essentially where the finder interacts with the
|
||||
resolver.
|
||||
* `is_satisfied_by` - checks if a candidate satisfies a requirement. This is
|
||||
basically the implementation of what a requirement meams.
|
||||
basically the implementation of what a requirement means.
|
||||
* `get_dependencies` - get the dependency metadata for a candidate. This is
|
||||
the implementation of the process of getting and reading package metadata.
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
sphinx ~= 6.0
|
||||
sphinx ~= 7.0
|
||||
towncrier
|
||||
furo
|
||||
myst_parser
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
Ignore errors in temporary directory cleanup (show a warning instead).
|
|
@ -0,0 +1 @@
|
|||
Removed uses of ``datetime.datetime.utcnow`` from non-vendored code.
|
|
@ -0,0 +1,6 @@
|
|||
The metadata-fetching log message is moved to the VERBOSE level and now hidden
|
||||
by default. The more significant information in this message to most users are
|
||||
already available in surrounding logs (the package name and version of the
|
||||
metadata being fetched), while the URL to the exact metadata file is generally
|
||||
too long and clutters the output. The message can be brought back with
|
||||
``--verbose``.
|
|
@ -0,0 +1 @@
|
|||
Add test cases for some behaviors of ``install --dry-run`` and ``--use-feature=fast-deps``.
|
|
@ -0,0 +1 @@
|
|||
Fix improper handling of the new onexc argument of ``shutil.rmtree()`` in Python 3.12.
|
|
@ -0,0 +1 @@
|
|||
Prevent downloading sdists twice when PEP 658 metadata is present.
|
|
@ -0,0 +1 @@
|
|||
Add lots of comments to the ``BuildTracker``.
|
|
@ -0,0 +1 @@
|
|||
Improve use of datastructures to make candidate selection 1.6x faster
|
|
@ -0,0 +1 @@
|
|||
Allow ``pip install --dry-run`` to use platform and ABI overriding options similar to ``--target``.
|
|
@ -0,0 +1 @@
|
|||
Add ``is_yanked`` boolean entry to the installation report (``--report``) to indicate whether the requirement was yanked from the index, but was still selected by pip conform to PEP 592.
|
|
@ -0,0 +1 @@
|
|||
Filter out yanked links from the available versions error message: "(from versions: 1.0, 2.0, 3.0)" will not contain yanked versions conform PEP 592. The yanked versions (if any) will be mentioned in a separate error message.
|
|
@ -0,0 +1 @@
|
|||
Added reference to `vulnerability reporting guidelines <https://www.python.org/dev/security/>`_ to pip's security policy.
|
|
@ -0,0 +1 @@
|
|||
Add ruff rules ASYNC,C4,C90,PERF,PLE,PLR for minor optimizations and to set upper limits on code complexity.
|
|
@ -0,0 +1 @@
|
|||
Upgrade certifi to 2023.7.22
|
|
@ -67,7 +67,7 @@ def should_update_common_wheels() -> bool:
|
|||
# -----------------------------------------------------------------------------
|
||||
# Development Commands
|
||||
# -----------------------------------------------------------------------------
|
||||
@nox.session(python=["3.7", "3.8", "3.9", "3.10", "3.11", "pypy3"])
|
||||
@nox.session(python=["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "pypy3"])
|
||||
def test(session: nox.Session) -> None:
|
||||
# Get the common wheels.
|
||||
if should_update_common_wheels():
|
||||
|
@ -89,6 +89,7 @@ def test(session: nox.Session) -> None:
|
|||
shutil.rmtree(sdist_dir, ignore_errors=True)
|
||||
|
||||
# fmt: off
|
||||
session.install("setuptools")
|
||||
session.run(
|
||||
"python", "setup.py", "sdist", "--formats=zip", "--dist-dir", sdist_dir,
|
||||
silent=True,
|
||||
|
@ -351,6 +352,7 @@ def build_dists(session: nox.Session) -> List[str]:
|
|||
)
|
||||
|
||||
session.log("# Build distributions")
|
||||
session.install("setuptools", "wheel")
|
||||
session.run("python", "setup.py", "sdist", "bdist_wheel", silent=True)
|
||||
produced_dists = glob.glob("dist/*")
|
||||
|
||||
|
|
|
@ -74,9 +74,9 @@ webencodings = "https://github.com/SimonSapin/python-webencodings/raw/master/LIC
|
|||
|
||||
[tool.ruff]
|
||||
extend-exclude = [
|
||||
"_vendor",
|
||||
"./build",
|
||||
".scratch",
|
||||
"_vendor",
|
||||
"data",
|
||||
]
|
||||
ignore = [
|
||||
|
@ -88,21 +88,38 @@ ignore = [
|
|||
]
|
||||
line-length = 88
|
||||
select = [
|
||||
"ASYNC",
|
||||
"B",
|
||||
"C4",
|
||||
"C90",
|
||||
"E",
|
||||
"F",
|
||||
"W",
|
||||
"G",
|
||||
"ISC",
|
||||
"I",
|
||||
"ISC",
|
||||
"PERF",
|
||||
"PLE",
|
||||
"PLR0",
|
||||
"W",
|
||||
]
|
||||
|
||||
[tool.ruff.per-file-ignores]
|
||||
"noxfile.py" = ["G"]
|
||||
"tests/*" = ["B011"]
|
||||
|
||||
[tool.ruff.isort]
|
||||
# We need to explicitly make pip "first party" as it's imported by code in
|
||||
# the docs and tests directories.
|
||||
known-first-party = ["pip"]
|
||||
known-third-party = ["pip._vendor"]
|
||||
|
||||
[tool.ruff.mccabe]
|
||||
max-complexity = 33 # default is 10
|
||||
|
||||
[tool.ruff.per-file-ignores]
|
||||
"noxfile.py" = ["G"]
|
||||
"src/pip/_internal/*" = ["PERF203"]
|
||||
"tests/*" = ["B011"]
|
||||
"tests/unit/test_finder.py" = ["C414"]
|
||||
|
||||
[tool.ruff.pylint]
|
||||
max-args = 15 # default is 5
|
||||
max-branches = 28 # default is 12
|
||||
max-returns = 13 # default is 6
|
||||
max-statements = 134 # default is 50
|
||||
|
|
|
@ -78,12 +78,10 @@ class Cache:
|
|||
if can_not_cache:
|
||||
return []
|
||||
|
||||
candidates = []
|
||||
path = self.get_path_for_link(link)
|
||||
if os.path.isdir(path):
|
||||
for candidate in os.listdir(path):
|
||||
candidates.append((candidate, path))
|
||||
return candidates
|
||||
return [(candidate, path) for candidate in os.listdir(path)]
|
||||
return []
|
||||
|
||||
def get_path_for_link(self, link: Link) -> str:
|
||||
"""Return a directory to store cached items in for link."""
|
||||
|
|
|
@ -71,8 +71,9 @@ def autocomplete() -> None:
|
|||
|
||||
for opt in subcommand.parser.option_list_all:
|
||||
if opt.help != optparse.SUPPRESS_HELP:
|
||||
for opt_str in opt._long_opts + opt._short_opts:
|
||||
options.append((opt_str, opt.nargs))
|
||||
options += [
|
||||
(opt_str, opt.nargs) for opt_str in opt._long_opts + opt._short_opts
|
||||
]
|
||||
|
||||
# filter out previously specified options from available options
|
||||
prev_opts = [x.split("=")[0] for x in cwords[1 : cword - 1]]
|
||||
|
|
|
@ -92,10 +92,10 @@ def check_dist_restriction(options: Values, check_target: bool = False) -> None:
|
|||
)
|
||||
|
||||
if check_target:
|
||||
if dist_restriction_set and not options.target_dir:
|
||||
if not options.dry_run and dist_restriction_set and not options.target_dir:
|
||||
raise CommandError(
|
||||
"Can not use any platform or abi specific options unless "
|
||||
"installing via '--target'"
|
||||
"installing via '--target' or using '--dry-run'"
|
||||
)
|
||||
|
||||
|
||||
|
|
|
@ -3,10 +3,10 @@ import textwrap
|
|||
from optparse import Values
|
||||
from typing import Any, List
|
||||
|
||||
import pip._internal.utils.filesystem as filesystem
|
||||
from pip._internal.cli.base_command import Command
|
||||
from pip._internal.cli.status_codes import ERROR, SUCCESS
|
||||
from pip._internal.exceptions import CommandError, PipError
|
||||
from pip._internal.utils import filesystem
|
||||
from pip._internal.utils.logging import getLogger
|
||||
|
||||
logger = getLogger(__name__)
|
||||
|
@ -151,14 +151,8 @@ class CacheCommand(Command):
|
|||
logger.info("\n".join(sorted(results)))
|
||||
|
||||
def format_for_abspath(self, files: List[str]) -> None:
|
||||
if not files:
|
||||
return
|
||||
|
||||
results = []
|
||||
for filename in files:
|
||||
results.append(filename)
|
||||
|
||||
logger.info("\n".join(sorted(results)))
|
||||
if files:
|
||||
logger.info("\n".join(sorted(files)))
|
||||
|
||||
def remove_cache_items(self, options: Values, args: List[Any]) -> None:
|
||||
if len(args) > 1:
|
||||
|
|
|
@ -105,7 +105,7 @@ def show_tags(options: Values) -> None:
|
|||
tag_limit = 10
|
||||
|
||||
target_python = make_target_python(options)
|
||||
tags = target_python.get_tags()
|
||||
tags = target_python.get_sorted_tags()
|
||||
|
||||
# Display the target options that were explicitly provided.
|
||||
formatted_target = target_python.format_given()
|
||||
|
@ -134,10 +134,9 @@ def show_tags(options: Values) -> None:
|
|||
|
||||
|
||||
def ca_bundle_info(config: Configuration) -> str:
|
||||
levels = set()
|
||||
for key, _ in config.items():
|
||||
levels.add(key.split(".")[0])
|
||||
|
||||
# Ruff misidentifies config as a dict.
|
||||
# Configuration does not have support the mapping interface.
|
||||
levels = {key.split(".", 1)[0] for key, _ in config.items()} # noqa: PERF102
|
||||
if not levels:
|
||||
return "Not specified"
|
||||
|
||||
|
|
|
@ -297,7 +297,7 @@ class ListCommand(IndexGroupCommand):
|
|||
|
||||
# Create and add a separator.
|
||||
if len(data) > 0:
|
||||
pkg_strings.insert(1, " ".join(map(lambda x: "-" * x, sizes)))
|
||||
pkg_strings.insert(1, " ".join("-" * x for x in sizes))
|
||||
|
||||
for val in pkg_strings:
|
||||
write_output(val)
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import abc
|
||||
from typing import Optional
|
||||
|
||||
from pip._internal.index.package_finder import PackageFinder
|
||||
from pip._internal.metadata.base import BaseDistribution
|
||||
|
@ -19,12 +20,23 @@ class AbstractDistribution(metaclass=abc.ABCMeta):
|
|||
|
||||
- we must be able to create a Distribution object exposing the
|
||||
above metadata.
|
||||
|
||||
- if we need to do work in the build tracker, we must be able to generate a unique
|
||||
string to identify the requirement in the build tracker.
|
||||
"""
|
||||
|
||||
def __init__(self, req: InstallRequirement) -> None:
|
||||
super().__init__()
|
||||
self.req = req
|
||||
|
||||
@abc.abstractproperty
|
||||
def build_tracker_id(self) -> Optional[str]:
|
||||
"""A string that uniquely identifies this requirement to the build tracker.
|
||||
|
||||
If None, then this dist has no work to do in the build tracker, and
|
||||
``.prepare_distribution_metadata()`` will not be called."""
|
||||
raise NotImplementedError()
|
||||
|
||||
@abc.abstractmethod
|
||||
def get_metadata_distribution(self) -> BaseDistribution:
|
||||
raise NotImplementedError()
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
from typing import Optional
|
||||
|
||||
from pip._internal.distributions.base import AbstractDistribution
|
||||
from pip._internal.index.package_finder import PackageFinder
|
||||
from pip._internal.metadata import BaseDistribution
|
||||
|
@ -10,6 +12,10 @@ class InstalledDistribution(AbstractDistribution):
|
|||
been computed.
|
||||
"""
|
||||
|
||||
@property
|
||||
def build_tracker_id(self) -> Optional[str]:
|
||||
return None
|
||||
|
||||
def get_metadata_distribution(self) -> BaseDistribution:
|
||||
assert self.req.satisfied_by is not None, "not actually installed"
|
||||
return self.req.satisfied_by
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import logging
|
||||
from typing import Iterable, Set, Tuple
|
||||
from typing import Iterable, Optional, Set, Tuple
|
||||
|
||||
from pip._internal.build_env import BuildEnvironment
|
||||
from pip._internal.distributions.base import AbstractDistribution
|
||||
|
@ -18,6 +18,12 @@ class SourceDistribution(AbstractDistribution):
|
|||
generated, either using PEP 517 or using the legacy `setup.py egg_info`.
|
||||
"""
|
||||
|
||||
@property
|
||||
def build_tracker_id(self) -> Optional[str]:
|
||||
"""Identify this requirement uniquely by its link."""
|
||||
assert self.req.link
|
||||
return self.req.link.url_without_fragment
|
||||
|
||||
def get_metadata_distribution(self) -> BaseDistribution:
|
||||
return self.req.get_dist()
|
||||
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
from typing import Optional
|
||||
|
||||
from pip._vendor.packaging.utils import canonicalize_name
|
||||
|
||||
from pip._internal.distributions.base import AbstractDistribution
|
||||
|
@ -15,6 +17,10 @@ class WheelDistribution(AbstractDistribution):
|
|||
This does not need any preparation as wheels can be directly unpacked.
|
||||
"""
|
||||
|
||||
@property
|
||||
def build_tracker_id(self) -> Optional[str]:
|
||||
return None
|
||||
|
||||
def get_metadata_distribution(self) -> BaseDistribution:
|
||||
"""Loads the metadata from the wheel file into memory and returns a
|
||||
Distribution that uses it, not relying on the wheel file or
|
||||
|
|
|
@ -198,7 +198,7 @@ class LinkEvaluator:
|
|||
reason = f"wrong project name (not {self.project_name})"
|
||||
return (LinkType.different_project, reason)
|
||||
|
||||
supported_tags = self._target_python.get_tags()
|
||||
supported_tags = self._target_python.get_unsorted_tags()
|
||||
if not wheel.supported(supported_tags):
|
||||
# Include the wheel's tags in the reason string to
|
||||
# simplify troubleshooting compatibility issues.
|
||||
|
@ -414,7 +414,7 @@ class CandidateEvaluator:
|
|||
if specifier is None:
|
||||
specifier = specifiers.SpecifierSet()
|
||||
|
||||
supported_tags = target_python.get_tags()
|
||||
supported_tags = target_python.get_sorted_tags()
|
||||
|
||||
return cls(
|
||||
project_name=project_name,
|
||||
|
|
|
@ -89,7 +89,7 @@ def distutils_scheme(
|
|||
# finalize_options(); we only want to override here if the user
|
||||
# has explicitly requested it hence going back to the config
|
||||
if "install_lib" in d.get_option_dict("install"):
|
||||
scheme.update(dict(purelib=i.install_lib, platlib=i.install_lib))
|
||||
scheme.update({"purelib": i.install_lib, "platlib": i.install_lib})
|
||||
|
||||
if running_under_virtualenv():
|
||||
if home:
|
||||
|
|
|
@ -23,6 +23,9 @@ class InstallationReport:
|
|||
# includes editable requirements), and false if the requirement was
|
||||
# downloaded from a PEP 503 index or --find-links.
|
||||
"is_direct": ireq.is_direct,
|
||||
# is_yanked is true if the requirement was yanked from the index, but
|
||||
# was still selected by pip to conform to PEP 592.
|
||||
"is_yanked": ireq.link.is_yanked if ireq.link else False,
|
||||
# requested is true if the requirement was specified by the user (aka
|
||||
# top level requirement), and false if it was installed as a dependency of a
|
||||
# requirement. https://peps.python.org/pep-0376/#requested
|
||||
|
@ -33,7 +36,7 @@ class InstallationReport:
|
|||
}
|
||||
if ireq.user_supplied and ireq.extras:
|
||||
# For top level requirements, the list of requested extras, if any.
|
||||
res["requested_extras"] = list(sorted(ireq.extras))
|
||||
res["requested_extras"] = sorted(ireq.extras)
|
||||
return res
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import sys
|
||||
from typing import List, Optional, Tuple
|
||||
from typing import List, Optional, Set, Tuple
|
||||
|
||||
from pip._vendor.packaging.tags import Tag
|
||||
|
||||
|
@ -22,6 +22,7 @@ class TargetPython:
|
|||
"py_version",
|
||||
"py_version_info",
|
||||
"_valid_tags",
|
||||
"_valid_tags_set",
|
||||
]
|
||||
|
||||
def __init__(
|
||||
|
@ -61,8 +62,9 @@ class TargetPython:
|
|||
self.py_version = py_version
|
||||
self.py_version_info = py_version_info
|
||||
|
||||
# This is used to cache the return value of get_tags().
|
||||
# This is used to cache the return value of get_(un)sorted_tags.
|
||||
self._valid_tags: Optional[List[Tag]] = None
|
||||
self._valid_tags_set: Optional[Set[Tag]] = None
|
||||
|
||||
def format_given(self) -> str:
|
||||
"""
|
||||
|
@ -84,7 +86,7 @@ class TargetPython:
|
|||
f"{key}={value!r}" for key, value in key_values if value is not None
|
||||
)
|
||||
|
||||
def get_tags(self) -> List[Tag]:
|
||||
def get_sorted_tags(self) -> List[Tag]:
|
||||
"""
|
||||
Return the supported PEP 425 tags to check wheel candidates against.
|
||||
|
||||
|
@ -108,3 +110,13 @@ class TargetPython:
|
|||
self._valid_tags = tags
|
||||
|
||||
return self._valid_tags
|
||||
|
||||
def get_unsorted_tags(self) -> Set[Tag]:
|
||||
"""Exactly the same as get_sorted_tags, but returns a set.
|
||||
|
||||
This is important for performance.
|
||||
"""
|
||||
if self._valid_tags_set is None:
|
||||
self._valid_tags_set = set(self.get_sorted_tags())
|
||||
|
||||
return self._valid_tags_set
|
||||
|
|
|
@ -51,10 +51,22 @@ def get_build_tracker() -> Generator["BuildTracker", None, None]:
|
|||
yield tracker
|
||||
|
||||
|
||||
class TrackerId(str):
|
||||
"""Uniquely identifying string provided to the build tracker."""
|
||||
|
||||
|
||||
class BuildTracker:
|
||||
"""Ensure that an sdist cannot request itself as a setup requirement.
|
||||
|
||||
When an sdist is prepared, it identifies its setup requirements in the
|
||||
context of ``BuildTracker.track()``. If a requirement shows up recursively, this
|
||||
raises an exception.
|
||||
|
||||
This stops fork bombs embedded in malicious packages."""
|
||||
|
||||
def __init__(self, root: str) -> None:
|
||||
self._root = root
|
||||
self._entries: Set[InstallRequirement] = set()
|
||||
self._entries: Dict[TrackerId, InstallRequirement] = {}
|
||||
logger.debug("Created build tracker: %s", self._root)
|
||||
|
||||
def __enter__(self) -> "BuildTracker":
|
||||
|
@ -69,16 +81,15 @@ class BuildTracker:
|
|||
) -> None:
|
||||
self.cleanup()
|
||||
|
||||
def _entry_path(self, link: Link) -> str:
|
||||
hashed = hashlib.sha224(link.url_without_fragment.encode()).hexdigest()
|
||||
def _entry_path(self, key: TrackerId) -> str:
|
||||
hashed = hashlib.sha224(key.encode()).hexdigest()
|
||||
return os.path.join(self._root, hashed)
|
||||
|
||||
def add(self, req: InstallRequirement) -> None:
|
||||
def add(self, req: InstallRequirement, key: TrackerId) -> None:
|
||||
"""Add an InstallRequirement to build tracking."""
|
||||
|
||||
assert req.link
|
||||
# Get the file to write information about this requirement.
|
||||
entry_path = self._entry_path(req.link)
|
||||
entry_path = self._entry_path(key)
|
||||
|
||||
# Try reading from the file. If it exists and can be read from, a build
|
||||
# is already in progress, so a LookupError is raised.
|
||||
|
@ -92,33 +103,37 @@ class BuildTracker:
|
|||
raise LookupError(message)
|
||||
|
||||
# If we're here, req should really not be building already.
|
||||
assert req not in self._entries
|
||||
assert key not in self._entries
|
||||
|
||||
# Start tracking this requirement.
|
||||
with open(entry_path, "w", encoding="utf-8") as fp:
|
||||
fp.write(str(req))
|
||||
self._entries.add(req)
|
||||
self._entries[key] = req
|
||||
|
||||
logger.debug("Added %s to build tracker %r", req, self._root)
|
||||
|
||||
def remove(self, req: InstallRequirement) -> None:
|
||||
def remove(self, req: InstallRequirement, key: TrackerId) -> None:
|
||||
"""Remove an InstallRequirement from build tracking."""
|
||||
|
||||
assert req.link
|
||||
# Delete the created file and the corresponding entries.
|
||||
os.unlink(self._entry_path(req.link))
|
||||
self._entries.remove(req)
|
||||
# Delete the created file and the corresponding entry.
|
||||
os.unlink(self._entry_path(key))
|
||||
del self._entries[key]
|
||||
|
||||
logger.debug("Removed %s from build tracker %r", req, self._root)
|
||||
|
||||
def cleanup(self) -> None:
|
||||
for req in set(self._entries):
|
||||
self.remove(req)
|
||||
for key, req in list(self._entries.items()):
|
||||
self.remove(req, key)
|
||||
|
||||
logger.debug("Removed build tracker: %r", self._root)
|
||||
|
||||
@contextlib.contextmanager
|
||||
def track(self, req: InstallRequirement) -> Generator[None, None, None]:
|
||||
self.add(req)
|
||||
def track(self, req: InstallRequirement, key: str) -> Generator[None, None, None]:
|
||||
"""Ensure that `key` cannot install itself as a setup requirement.
|
||||
|
||||
:raises LookupError: If `key` was already provided in a parent invocation of
|
||||
the context introduced by this method."""
|
||||
tracker_id = TrackerId(key)
|
||||
self.add(req, tracker_id)
|
||||
yield
|
||||
self.remove(req)
|
||||
self.remove(req, tracker_id)
|
||||
|
|
|
@ -267,9 +267,9 @@ def get_csv_rows_for_installed(
|
|||
path = _fs_to_record_path(f, lib_dir)
|
||||
digest, length = rehash(f)
|
||||
installed_rows.append((path, digest, length))
|
||||
for installed_record_path in installed.values():
|
||||
installed_rows.append((installed_record_path, "", ""))
|
||||
return installed_rows
|
||||
return installed_rows + [
|
||||
(installed_record_path, "", "") for installed_record_path in installed.values()
|
||||
]
|
||||
|
||||
|
||||
def get_console_script_specs(console: Dict[str, str]) -> List[str]:
|
||||
|
|
|
@ -4,10 +4,10 @@
|
|||
# The following comment should be removed at some point in the future.
|
||||
# mypy: strict-optional=False
|
||||
|
||||
import logging
|
||||
import mimetypes
|
||||
import os
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
from typing import Dict, Iterable, List, Optional
|
||||
|
||||
from pip._vendor.packaging.utils import canonicalize_name
|
||||
|
@ -21,7 +21,6 @@ from pip._internal.exceptions import (
|
|||
InstallationError,
|
||||
MetadataInconsistent,
|
||||
NetworkConnectionError,
|
||||
PreviousBuildDirError,
|
||||
VcsHashUnsupported,
|
||||
)
|
||||
from pip._internal.index.package_finder import PackageFinder
|
||||
|
@ -37,6 +36,7 @@ from pip._internal.network.lazy_wheel import (
|
|||
from pip._internal.network.session import PipSession
|
||||
from pip._internal.operations.build.build_tracker import BuildTracker
|
||||
from pip._internal.req.req_install import InstallRequirement
|
||||
from pip._internal.utils._log import getLogger
|
||||
from pip._internal.utils.direct_url_helpers import (
|
||||
direct_url_for_editable,
|
||||
direct_url_from_link,
|
||||
|
@ -47,13 +47,12 @@ from pip._internal.utils.misc import (
|
|||
display_path,
|
||||
hash_file,
|
||||
hide_url,
|
||||
is_installable_dir,
|
||||
)
|
||||
from pip._internal.utils.temp_dir import TempDirectory
|
||||
from pip._internal.utils.unpacking import unpack_file
|
||||
from pip._internal.vcs import vcs
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
logger = getLogger(__name__)
|
||||
|
||||
|
||||
def _get_prepared_distribution(
|
||||
|
@ -65,10 +64,12 @@ def _get_prepared_distribution(
|
|||
) -> BaseDistribution:
|
||||
"""Prepare a distribution for installation."""
|
||||
abstract_dist = make_distribution_for_install_requirement(req)
|
||||
with build_tracker.track(req):
|
||||
abstract_dist.prepare_distribution_metadata(
|
||||
finder, build_isolation, check_build_deps
|
||||
)
|
||||
tracker_id = abstract_dist.build_tracker_id
|
||||
if tracker_id is not None:
|
||||
with build_tracker.track(req, tracker_id):
|
||||
abstract_dist.prepare_distribution_metadata(
|
||||
finder, build_isolation, check_build_deps
|
||||
)
|
||||
return abstract_dist.get_metadata_distribution()
|
||||
|
||||
|
||||
|
@ -317,21 +318,7 @@ class RequirementPreparer:
|
|||
autodelete=True,
|
||||
parallel_builds=parallel_builds,
|
||||
)
|
||||
|
||||
# If a checkout exists, it's unwise to keep going. version
|
||||
# inconsistencies are logged later, but do not fail the
|
||||
# installation.
|
||||
# FIXME: this won't upgrade when there's an existing
|
||||
# package unpacked in `req.source_dir`
|
||||
# TODO: this check is now probably dead code
|
||||
if is_installable_dir(req.source_dir):
|
||||
raise PreviousBuildDirError(
|
||||
"pip can't proceed with requirements '{}' due to a"
|
||||
"pre-existing build directory ({}). This is likely "
|
||||
"due to a previous installation that failed . pip is "
|
||||
"being responsible and not assuming it can delete this. "
|
||||
"Please delete it and try again.".format(req, req.source_dir)
|
||||
)
|
||||
req.ensure_pristine_source_checkout()
|
||||
|
||||
def _get_linked_req_hashes(self, req: InstallRequirement) -> Hashes:
|
||||
# By the time this is called, the requirement's link should have
|
||||
|
@ -394,7 +381,7 @@ class RequirementPreparer:
|
|||
if metadata_link is None:
|
||||
return None
|
||||
assert req.req is not None
|
||||
logger.info(
|
||||
logger.verbose(
|
||||
"Obtaining dependency information for %s from %s",
|
||||
req.req,
|
||||
metadata_link,
|
||||
|
@ -479,20 +466,19 @@ class RequirementPreparer:
|
|||
for link, (filepath, _) in batch_download:
|
||||
logger.debug("Downloading link %s to %s", link, filepath)
|
||||
req = links_to_fully_download[link]
|
||||
# Record the downloaded file path so wheel reqs can extract a Distribution
|
||||
# in .get_dist().
|
||||
req.local_file_path = filepath
|
||||
# TODO: This needs fixing for sdists
|
||||
# This is an emergency fix for #11847, which reports that
|
||||
# distributions get downloaded twice when metadata is loaded
|
||||
# from a PEP 658 standalone metadata file. Setting _downloaded
|
||||
# fixes this for wheels, but breaks the sdist case (tests
|
||||
# test_download_metadata). As PyPI is currently only serving
|
||||
# metadata for wheels, this is not an immediate issue.
|
||||
# Fixing the problem properly looks like it will require a
|
||||
# complete refactoring of the `prepare_linked_requirements_more`
|
||||
# logic, and I haven't a clue where to start on that, so for now
|
||||
# I have fixed the issue *just* for wheels.
|
||||
if req.is_wheel:
|
||||
self._downloaded[req.link.url] = filepath
|
||||
# Record that the file is downloaded so we don't do it again in
|
||||
# _prepare_linked_requirement().
|
||||
self._downloaded[req.link.url] = filepath
|
||||
|
||||
# If this is an sdist, we need to unpack it after downloading, but the
|
||||
# .source_dir won't be set up until we are in _prepare_linked_requirement().
|
||||
# Add the downloaded archive to the install requirement to unpack after
|
||||
# preparing the source dir.
|
||||
if not req.is_wheel:
|
||||
req.needs_unpacked_archive(Path(filepath))
|
||||
|
||||
# This step is necessary to ensure all lazy wheels are processed
|
||||
# successfully by the 'download', 'wheel', and 'install' commands.
|
||||
|
|
|
@ -1,6 +1,3 @@
|
|||
# The following comment should be removed at some point in the future.
|
||||
# mypy: strict-optional=False
|
||||
|
||||
import functools
|
||||
import logging
|
||||
import os
|
||||
|
@ -9,6 +6,7 @@ import sys
|
|||
import uuid
|
||||
import zipfile
|
||||
from optparse import Values
|
||||
from pathlib import Path
|
||||
from typing import Any, Collection, Dict, Iterable, List, Optional, Sequence, Union
|
||||
|
||||
from pip._vendor.packaging.markers import Marker
|
||||
|
@ -20,7 +18,7 @@ from pip._vendor.packaging.version import parse as parse_version
|
|||
from pip._vendor.pyproject_hooks import BuildBackendHookCaller
|
||||
|
||||
from pip._internal.build_env import BuildEnvironment, NoOpBuildEnvironment
|
||||
from pip._internal.exceptions import InstallationError
|
||||
from pip._internal.exceptions import InstallationError, PreviousBuildDirError
|
||||
from pip._internal.locations import get_scheme
|
||||
from pip._internal.metadata import (
|
||||
BaseDistribution,
|
||||
|
@ -50,11 +48,13 @@ from pip._internal.utils.misc import (
|
|||
backup_dir,
|
||||
display_path,
|
||||
hide_url,
|
||||
is_installable_dir,
|
||||
redact_auth_from_url,
|
||||
)
|
||||
from pip._internal.utils.packaging import safe_extra
|
||||
from pip._internal.utils.subprocess import runner_with_spinner_message
|
||||
from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
|
||||
from pip._internal.utils.unpacking import unpack_file
|
||||
from pip._internal.utils.virtualenv import running_under_virtualenv
|
||||
from pip._internal.vcs import vcs
|
||||
|
||||
|
@ -183,6 +183,9 @@ class InstallRequirement:
|
|||
# This requirement needs more preparation before it can be built
|
||||
self.needs_more_preparation = False
|
||||
|
||||
# This requirement needs to be unpacked before it can be installed.
|
||||
self._archive_source: Optional[Path] = None
|
||||
|
||||
def __str__(self) -> str:
|
||||
if self.req:
|
||||
s = str(self.req)
|
||||
|
@ -244,6 +247,7 @@ class InstallRequirement:
|
|||
|
||||
@property
|
||||
def specifier(self) -> SpecifierSet:
|
||||
assert self.req is not None
|
||||
return self.req.specifier
|
||||
|
||||
@property
|
||||
|
@ -257,7 +261,8 @@ class InstallRequirement:
|
|||
|
||||
For example, some-package==1.2 is pinned; some-package>1.2 is not.
|
||||
"""
|
||||
specifiers = self.specifier
|
||||
assert self.req is not None
|
||||
specifiers = self.req.specifier
|
||||
return len(specifiers) == 1 and next(iter(specifiers)).operator in {"==", "==="}
|
||||
|
||||
def match_markers(self, extras_requested: Optional[Iterable[str]] = None) -> bool:
|
||||
|
@ -305,6 +310,7 @@ class InstallRequirement:
|
|||
else:
|
||||
link = None
|
||||
if link and link.hash:
|
||||
assert link.hash_name is not None
|
||||
good_hashes.setdefault(link.hash_name, []).append(link.hash)
|
||||
return Hashes(good_hashes)
|
||||
|
||||
|
@ -314,6 +320,7 @@ class InstallRequirement:
|
|||
return None
|
||||
s = str(self.req)
|
||||
if self.comes_from:
|
||||
comes_from: Optional[str]
|
||||
if isinstance(self.comes_from, str):
|
||||
comes_from = self.comes_from
|
||||
else:
|
||||
|
@ -345,7 +352,7 @@ class InstallRequirement:
|
|||
|
||||
# When parallel builds are enabled, add a UUID to the build directory
|
||||
# name so multiple builds do not interfere with each other.
|
||||
dir_name: str = canonicalize_name(self.name)
|
||||
dir_name: str = canonicalize_name(self.req.name)
|
||||
if parallel_builds:
|
||||
dir_name = f"{dir_name}_{uuid.uuid4().hex}"
|
||||
|
||||
|
@ -388,6 +395,7 @@ class InstallRequirement:
|
|||
)
|
||||
|
||||
def warn_on_mismatching_name(self) -> None:
|
||||
assert self.req is not None
|
||||
metadata_name = canonicalize_name(self.metadata["Name"])
|
||||
if canonicalize_name(self.req.name) == metadata_name:
|
||||
# Everything is fine.
|
||||
|
@ -457,6 +465,7 @@ class InstallRequirement:
|
|||
# Things valid for sdists
|
||||
@property
|
||||
def unpacked_source_directory(self) -> str:
|
||||
assert self.source_dir, f"No source dir for {self}"
|
||||
return os.path.join(
|
||||
self.source_dir, self.link and self.link.subdirectory_fragment or ""
|
||||
)
|
||||
|
@ -543,7 +552,7 @@ class InstallRequirement:
|
|||
Under PEP 517 and PEP 660, call the backend hook to prepare the metadata.
|
||||
Under legacy processing, call setup.py egg-info.
|
||||
"""
|
||||
assert self.source_dir
|
||||
assert self.source_dir, f"No source dir for {self}"
|
||||
details = self.name or f"from {self.link}"
|
||||
|
||||
if self.use_pep517:
|
||||
|
@ -592,8 +601,10 @@ class InstallRequirement:
|
|||
if self.metadata_directory:
|
||||
return get_directory_distribution(self.metadata_directory)
|
||||
elif self.local_file_path and self.is_wheel:
|
||||
assert self.req is not None
|
||||
return get_wheel_distribution(
|
||||
FilesystemWheel(self.local_file_path), canonicalize_name(self.name)
|
||||
FilesystemWheel(self.local_file_path),
|
||||
canonicalize_name(self.req.name),
|
||||
)
|
||||
raise AssertionError(
|
||||
f"InstallRequirement {self} has no metadata directory and no wheel: "
|
||||
|
@ -601,9 +612,9 @@ class InstallRequirement:
|
|||
)
|
||||
|
||||
def assert_source_matches_version(self) -> None:
|
||||
assert self.source_dir
|
||||
assert self.source_dir, f"No source dir for {self}"
|
||||
version = self.metadata["version"]
|
||||
if self.req.specifier and version not in self.req.specifier:
|
||||
if self.req and self.req.specifier and version not in self.req.specifier:
|
||||
logger.warning(
|
||||
"Requested %s, but installing version %s",
|
||||
self,
|
||||
|
@ -640,6 +651,27 @@ class InstallRequirement:
|
|||
parallel_builds=parallel_builds,
|
||||
)
|
||||
|
||||
def needs_unpacked_archive(self, archive_source: Path) -> None:
|
||||
assert self._archive_source is None
|
||||
self._archive_source = archive_source
|
||||
|
||||
def ensure_pristine_source_checkout(self) -> None:
|
||||
"""Ensure the source directory has not yet been built in."""
|
||||
assert self.source_dir is not None
|
||||
if self._archive_source is not None:
|
||||
unpack_file(str(self._archive_source), self.source_dir)
|
||||
elif is_installable_dir(self.source_dir):
|
||||
# If a checkout exists, it's unwise to keep going.
|
||||
# version inconsistencies are logged later, but do not fail
|
||||
# the installation.
|
||||
raise PreviousBuildDirError(
|
||||
f"pip can't proceed with requirements '{self}' due to a "
|
||||
f"pre-existing build directory ({self.source_dir}). This is likely "
|
||||
"due to a previous installation that failed . pip is "
|
||||
"being responsible and not assuming it can delete this. "
|
||||
"Please delete it and try again."
|
||||
)
|
||||
|
||||
# For editable installations
|
||||
def update_editable(self) -> None:
|
||||
if not self.link:
|
||||
|
@ -696,9 +728,10 @@ class InstallRequirement:
|
|||
name = name.replace(os.path.sep, "/")
|
||||
return name
|
||||
|
||||
assert self.req is not None
|
||||
path = os.path.join(parentdir, path)
|
||||
name = _clean_zip_name(path, rootdir)
|
||||
return self.name + "/" + name
|
||||
return self.req.name + "/" + name
|
||||
|
||||
def archive(self, build_dir: Optional[str]) -> None:
|
||||
"""Saves archive to provided build_dir.
|
||||
|
@ -777,8 +810,9 @@ class InstallRequirement:
|
|||
use_user_site: bool = False,
|
||||
pycompile: bool = True,
|
||||
) -> None:
|
||||
assert self.req is not None
|
||||
scheme = get_scheme(
|
||||
self.name,
|
||||
self.req.name,
|
||||
user=use_user_site,
|
||||
home=home,
|
||||
root=root,
|
||||
|
@ -792,7 +826,7 @@ class InstallRequirement:
|
|||
prefix=prefix,
|
||||
home=home,
|
||||
use_user_site=use_user_site,
|
||||
name=self.name,
|
||||
name=self.req.name,
|
||||
setup_py_path=self.setup_py_path,
|
||||
isolated=self.isolated,
|
||||
build_env=self.build_env,
|
||||
|
@ -805,7 +839,7 @@ class InstallRequirement:
|
|||
assert self.local_file_path
|
||||
|
||||
install_wheel(
|
||||
self.name,
|
||||
self.req.name,
|
||||
self.local_file_path,
|
||||
scheme=scheme,
|
||||
req_description=str(self.req),
|
||||
|
|
|
@ -274,7 +274,7 @@ class StashedUninstallPathSet:
|
|||
|
||||
def commit(self) -> None:
|
||||
"""Commits the uninstall by removing stashed files."""
|
||||
for _, save_dir in self._save_dirs.items():
|
||||
for save_dir in self._save_dirs.values():
|
||||
save_dir.cleanup()
|
||||
self._moves = []
|
||||
self._save_dirs = {}
|
||||
|
|
|
@ -132,7 +132,7 @@ class Factory:
|
|||
if not link.is_wheel:
|
||||
return
|
||||
wheel = Wheel(link.filename)
|
||||
if wheel.supported(self._finder.target_python.get_tags()):
|
||||
if wheel.supported(self._finder.target_python.get_unsorted_tags()):
|
||||
return
|
||||
msg = f"{link.filename} is not a supported wheel on this platform."
|
||||
raise UnsupportedWheel(msg)
|
||||
|
@ -603,8 +603,26 @@ class Factory:
|
|||
|
||||
cands = self._finder.find_all_candidates(req.project_name)
|
||||
skipped_by_requires_python = self._finder.requires_python_skipped_reasons()
|
||||
versions = [str(v) for v in sorted({c.version for c in cands})]
|
||||
|
||||
versions_set: Set[CandidateVersion] = set()
|
||||
yanked_versions_set: Set[CandidateVersion] = set()
|
||||
for c in cands:
|
||||
is_yanked = c.link.is_yanked if c.link else False
|
||||
if is_yanked:
|
||||
yanked_versions_set.add(c.version)
|
||||
else:
|
||||
versions_set.add(c.version)
|
||||
|
||||
versions = [str(v) for v in sorted(versions_set)]
|
||||
yanked_versions = [str(v) for v in sorted(yanked_versions_set)]
|
||||
|
||||
if yanked_versions:
|
||||
# Saying "version X is yanked" isn't entirely accurate.
|
||||
# https://github.com/pypa/pip/issues/11745#issuecomment-1402805842
|
||||
logger.critical(
|
||||
"Ignored the following yanked versions: %s",
|
||||
", ".join(yanked_versions) or "none",
|
||||
)
|
||||
if skipped_by_requires_python:
|
||||
logger.critical(
|
||||
"Ignored the following versions that require a different python "
|
||||
|
|
|
@ -28,8 +28,7 @@ from pip._internal.utils.entrypoints import (
|
|||
from pip._internal.utils.filesystem import adjacent_tmp_file, check_path_owner, replace
|
||||
from pip._internal.utils.misc import ensure_dir
|
||||
|
||||
_DATE_FMT = "%Y-%m-%dT%H:%M:%SZ"
|
||||
|
||||
_WEEK = datetime.timedelta(days=7)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
@ -73,12 +72,10 @@ class SelfCheckState:
|
|||
if "pypi_version" not in self._state:
|
||||
return None
|
||||
|
||||
seven_days_in_seconds = 7 * 24 * 60 * 60
|
||||
|
||||
# Determine if we need to refresh the state
|
||||
last_check = datetime.datetime.strptime(self._state["last_check"], _DATE_FMT)
|
||||
seconds_since_last_check = (current_time - last_check).total_seconds()
|
||||
if seconds_since_last_check > seven_days_in_seconds:
|
||||
last_check = datetime.datetime.fromisoformat(self._state["last_check"])
|
||||
time_since_last_check = current_time - last_check
|
||||
if time_since_last_check > _WEEK:
|
||||
return None
|
||||
|
||||
return self._state["pypi_version"]
|
||||
|
@ -100,7 +97,7 @@ class SelfCheckState:
|
|||
# Include the key so it's easy to tell which pip wrote the
|
||||
# file.
|
||||
"key": self.key,
|
||||
"last_check": current_time.strftime(_DATE_FMT),
|
||||
"last_check": current_time.isoformat(),
|
||||
"pypi_version": pypi_version,
|
||||
}
|
||||
|
||||
|
@ -229,7 +226,7 @@ def pip_self_version_check(session: PipSession, options: optparse.Values) -> Non
|
|||
try:
|
||||
upgrade_prompt = _self_version_check_logic(
|
||||
state=SelfCheckState(cache_dir=options.cache_dir),
|
||||
current_time=datetime.datetime.utcnow(),
|
||||
current_time=datetime.datetime.now(datetime.timezone.utc),
|
||||
local_version=installed_dist.version,
|
||||
get_remote_version=functools.partial(
|
||||
_get_current_remote_pip_version, session, options
|
||||
|
|
|
@ -11,9 +11,11 @@ import stat
|
|||
import sys
|
||||
import sysconfig
|
||||
import urllib.parse
|
||||
from functools import partial
|
||||
from io import StringIO
|
||||
from itertools import filterfalse, tee, zip_longest
|
||||
from types import TracebackType
|
||||
from pathlib import Path
|
||||
from types import FunctionType, TracebackType
|
||||
from typing import (
|
||||
Any,
|
||||
BinaryIO,
|
||||
|
@ -66,6 +68,8 @@ T = TypeVar("T")
|
|||
ExcInfo = Tuple[Type[BaseException], BaseException, TracebackType]
|
||||
VersionInfo = Tuple[int, int, int]
|
||||
NetlocTuple = Tuple[str, Tuple[Optional[str], Optional[str]]]
|
||||
OnExc = Callable[[FunctionType, Path, BaseException], Any]
|
||||
OnErr = Callable[[FunctionType, Path, ExcInfo], Any]
|
||||
|
||||
|
||||
def get_pip_version() -> str:
|
||||
|
@ -123,33 +127,75 @@ def get_prog() -> str:
|
|||
# Retry every half second for up to 3 seconds
|
||||
# Tenacity raises RetryError by default, explicitly raise the original exception
|
||||
@retry(reraise=True, stop=stop_after_delay(3), wait=wait_fixed(0.5))
|
||||
def rmtree(dir: str, ignore_errors: bool = False) -> None:
|
||||
def rmtree(
|
||||
dir: str,
|
||||
ignore_errors: bool = False,
|
||||
onexc: Optional[OnExc] = None,
|
||||
) -> None:
|
||||
if ignore_errors:
|
||||
onexc = _onerror_ignore
|
||||
if onexc is None:
|
||||
onexc = _onerror_reraise
|
||||
handler: OnErr = partial(
|
||||
# `[func, path, Union[ExcInfo, BaseException]] -> Any` is equivalent to
|
||||
# `Union[([func, path, ExcInfo] -> Any), ([func, path, BaseException] -> Any)]`.
|
||||
cast(Union[OnExc, OnErr], rmtree_errorhandler),
|
||||
onexc=onexc,
|
||||
)
|
||||
if sys.version_info >= (3, 12):
|
||||
shutil.rmtree(dir, ignore_errors=ignore_errors, onexc=rmtree_errorhandler)
|
||||
# See https://docs.python.org/3.12/whatsnew/3.12.html#shutil.
|
||||
shutil.rmtree(dir, onexc=handler)
|
||||
else:
|
||||
shutil.rmtree(dir, ignore_errors=ignore_errors, onerror=rmtree_errorhandler)
|
||||
shutil.rmtree(dir, onerror=handler)
|
||||
|
||||
|
||||
def _onerror_ignore(*_args: Any) -> None:
|
||||
pass
|
||||
|
||||
|
||||
def _onerror_reraise(*_args: Any) -> None:
|
||||
raise
|
||||
|
||||
|
||||
def rmtree_errorhandler(
|
||||
func: Callable[..., Any], path: str, exc_info: Union[ExcInfo, BaseException]
|
||||
func: FunctionType,
|
||||
path: Path,
|
||||
exc_info: Union[ExcInfo, BaseException],
|
||||
*,
|
||||
onexc: OnExc = _onerror_reraise,
|
||||
) -> None:
|
||||
"""On Windows, the files in .svn are read-only, so when rmtree() tries to
|
||||
remove them, an exception is thrown. We catch that here, remove the
|
||||
read-only attribute, and hopefully continue without problems."""
|
||||
"""
|
||||
`rmtree` error handler to 'force' a file remove (i.e. like `rm -f`).
|
||||
|
||||
* If a file is readonly then it's write flag is set and operation is
|
||||
retried.
|
||||
|
||||
* `onerror` is the original callback from `rmtree(... onerror=onerror)`
|
||||
that is chained at the end if the "rm -f" still fails.
|
||||
"""
|
||||
try:
|
||||
has_attr_readonly = not (os.stat(path).st_mode & stat.S_IWRITE)
|
||||
st_mode = os.stat(path).st_mode
|
||||
except OSError:
|
||||
# it's equivalent to os.path.exists
|
||||
return
|
||||
|
||||
if has_attr_readonly:
|
||||
if not st_mode & stat.S_IWRITE:
|
||||
# convert to read/write
|
||||
os.chmod(path, stat.S_IWRITE)
|
||||
# use the original function to repeat the operation
|
||||
func(path)
|
||||
return
|
||||
else:
|
||||
raise
|
||||
try:
|
||||
os.chmod(path, st_mode | stat.S_IWRITE)
|
||||
except OSError:
|
||||
pass
|
||||
else:
|
||||
# use the original function to repeat the operation
|
||||
try:
|
||||
func(path)
|
||||
return
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
if not isinstance(exc_info, BaseException):
|
||||
_, exc_info, _ = exc_info
|
||||
onexc(func, path, exc_info)
|
||||
|
||||
|
||||
def display_path(path: str) -> str:
|
||||
|
|
|
@ -3,8 +3,19 @@ import itertools
|
|||
import logging
|
||||
import os.path
|
||||
import tempfile
|
||||
import traceback
|
||||
from contextlib import ExitStack, contextmanager
|
||||
from typing import Any, Dict, Generator, Optional, TypeVar, Union
|
||||
from pathlib import Path
|
||||
from types import FunctionType
|
||||
from typing import (
|
||||
Any,
|
||||
Dict,
|
||||
Generator,
|
||||
List,
|
||||
Optional,
|
||||
TypeVar,
|
||||
Union,
|
||||
)
|
||||
|
||||
from pip._internal.utils.misc import enum, rmtree
|
||||
|
||||
|
@ -106,6 +117,7 @@ class TempDirectory:
|
|||
delete: Union[bool, None, _Default] = _default,
|
||||
kind: str = "temp",
|
||||
globally_managed: bool = False,
|
||||
ignore_cleanup_errors: bool = True,
|
||||
):
|
||||
super().__init__()
|
||||
|
||||
|
@ -128,6 +140,7 @@ class TempDirectory:
|
|||
self._deleted = False
|
||||
self.delete = delete
|
||||
self.kind = kind
|
||||
self.ignore_cleanup_errors = ignore_cleanup_errors
|
||||
|
||||
if globally_managed:
|
||||
assert _tempdir_manager is not None
|
||||
|
@ -170,7 +183,44 @@ class TempDirectory:
|
|||
self._deleted = True
|
||||
if not os.path.exists(self._path):
|
||||
return
|
||||
rmtree(self._path)
|
||||
|
||||
errors: List[BaseException] = []
|
||||
|
||||
def onerror(
|
||||
func: FunctionType,
|
||||
path: Path,
|
||||
exc_val: BaseException,
|
||||
) -> None:
|
||||
"""Log a warning for a `rmtree` error and continue"""
|
||||
formatted_exc = "\n".join(
|
||||
traceback.format_exception_only(type(exc_val), exc_val)
|
||||
)
|
||||
formatted_exc = formatted_exc.rstrip() # remove trailing new line
|
||||
if func in (os.unlink, os.remove, os.rmdir):
|
||||
logger.debug(
|
||||
"Failed to remove a temporary file '%s' due to %s.\n",
|
||||
path,
|
||||
formatted_exc,
|
||||
)
|
||||
else:
|
||||
logger.debug("%s failed with %s.", func.__qualname__, formatted_exc)
|
||||
errors.append(exc_val)
|
||||
|
||||
if self.ignore_cleanup_errors:
|
||||
try:
|
||||
# first try with tenacity; retrying to handle ephemeral errors
|
||||
rmtree(self._path, ignore_errors=False)
|
||||
except OSError:
|
||||
# last pass ignore/log all errors
|
||||
rmtree(self._path, onexc=onerror)
|
||||
if errors:
|
||||
logger.warning(
|
||||
"Failed to remove contents in a temporary directory '%s'.\n"
|
||||
"You can safely remove it manually.",
|
||||
self._path,
|
||||
)
|
||||
else:
|
||||
rmtree(self._path)
|
||||
|
||||
|
||||
class AdjacentTempDirectory(TempDirectory):
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
from .core import contents, where
|
||||
|
||||
__all__ = ["contents", "where"]
|
||||
__version__ = "2023.05.07"
|
||||
__version__ = "2023.07.22"
|
||||
|
|
|
@ -791,34 +791,6 @@ uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2
|
|||
XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E=
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=Hongkong Post Root CA 1 O=Hongkong Post
|
||||
# Subject: CN=Hongkong Post Root CA 1 O=Hongkong Post
|
||||
# Label: "Hongkong Post Root CA 1"
|
||||
# Serial: 1000
|
||||
# MD5 Fingerprint: a8:0d:6f:39:78:b9:43:6d:77:42:6d:98:5a:cc:23:ca
|
||||
# SHA1 Fingerprint: d6:da:a8:20:8d:09:d2:15:4d:24:b5:2f:cb:34:6e:b2:58:b2:8a:58
|
||||
# SHA256 Fingerprint: f9:e6:7d:33:6c:51:00:2a:c0:54:c6:32:02:2d:66:dd:a2:e7:e3:ff:f1:0a:d0:61:ed:31:d8:bb:b4:10:cf:b2
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIDMDCCAhigAwIBAgICA+gwDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UEBhMCSEsx
|
||||
FjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdrb25nIFBvc3Qg
|
||||
Um9vdCBDQSAxMB4XDTAzMDUxNTA1MTMxNFoXDTIzMDUxNTA0NTIyOVowRzELMAkG
|
||||
A1UEBhMCSEsxFjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdr
|
||||
b25nIFBvc3QgUm9vdCBDQSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
|
||||
AQEArP84tulmAknjorThkPlAj3n54r15/gK97iSSHSL22oVyaf7XPwnU3ZG1ApzQ
|
||||
jVrhVcNQhrkpJsLj2aDxaQMoIIBFIi1WpztUlVYiWR8o3x8gPW2iNr4joLFutbEn
|
||||
PzlTCeqrauh0ssJlXI6/fMN4hM2eFvz1Lk8gKgifd/PFHsSaUmYeSF7jEAaPIpjh
|
||||
ZY4bXSNmO7ilMlHIhqqhqZ5/dpTCpmy3QfDVyAY45tQM4vM7TG1QjMSDJ8EThFk9
|
||||
nnV0ttgCXjqQesBCNnLsak3c78QA3xMYV18meMjWCnl3v/evt3a5pQuEF10Q6m/h
|
||||
q5URX208o1xNg1vysxmKgIsLhwIDAQABoyYwJDASBgNVHRMBAf8ECDAGAQH/AgED
|
||||
MA4GA1UdDwEB/wQEAwIBxjANBgkqhkiG9w0BAQUFAAOCAQEADkbVPK7ih9legYsC
|
||||
mEEIjEy82tvuJxuC52pF7BaLT4Wg87JwvVqWuspube5Gi27nKi6Wsxkz67SfqLI3
|
||||
7piol7Yutmcn1KZJ/RyTZXaeQi/cImyaT/JaFTmxcdcrUehtHJjA2Sr0oYJ71clB
|
||||
oiMBdDhViw+5LmeiIAQ32pwL0xch4I+XeTRvhEgCIDMb5jREn5Fw9IBehEPCKdJs
|
||||
EhTkYY2sEJCehFC78JZvRZ+K88psT/oROhUVRsPNH4NbLUES7VBnQRM9IauUiqpO
|
||||
fMGx+6fWtScvl6tu4B3i0RwsH0Ti/L6RoZz71ilTc4afU9hDDl3WY4JxHYB0yvbi
|
||||
AmvZWg==
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=SecureSign RootCA11 O=Japan Certification Services, Inc.
|
||||
# Subject: CN=SecureSign RootCA11 O=Japan Certification Services, Inc.
|
||||
# Label: "SecureSign RootCA11"
|
||||
|
@ -1676,50 +1648,6 @@ HL/EVlP6Y2XQ8xwOFvVrhlhNGNTkDY6lnVuR3HYkUD/GKvvZt5y11ubQ2egZixVx
|
|||
SK236thZiNSQvxaz2emsWWFUyBy6ysHK4bkgTI86k4mloMy/0/Z1pHWWbVY=
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=E-Tugra Certification Authority O=E-Tu\u011fra EBG Bili\u015fim Teknolojileri ve Hizmetleri A.\u015e. OU=E-Tugra Sertifikasyon Merkezi
|
||||
# Subject: CN=E-Tugra Certification Authority O=E-Tu\u011fra EBG Bili\u015fim Teknolojileri ve Hizmetleri A.\u015e. OU=E-Tugra Sertifikasyon Merkezi
|
||||
# Label: "E-Tugra Certification Authority"
|
||||
# Serial: 7667447206703254355
|
||||
# MD5 Fingerprint: b8:a1:03:63:b0:bd:21:71:70:8a:6f:13:3a:bb:79:49
|
||||
# SHA1 Fingerprint: 51:c6:e7:08:49:06:6e:f3:92:d4:5c:a0:0d:6d:a3:62:8f:c3:52:39
|
||||
# SHA256 Fingerprint: b0:bf:d5:2b:b0:d7:d9:bd:92:bf:5d:4d:c1:3d:a2:55:c0:2c:54:2f:37:83:65:ea:89:39:11:f5:5e:55:f2:3c
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIGSzCCBDOgAwIBAgIIamg+nFGby1MwDQYJKoZIhvcNAQELBQAwgbIxCzAJBgNV
|
||||
BAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+BgNVBAoMN0UtVHXEn3JhIEVCRyBC
|
||||
aWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhpem1ldGxlcmkgQS7Fni4xJjAkBgNV
|
||||
BAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBNZXJrZXppMSgwJgYDVQQDDB9FLVR1
|
||||
Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTEzMDMwNTEyMDk0OFoXDTIz
|
||||
MDMwMzEyMDk0OFowgbIxCzAJBgNVBAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+
|
||||
BgNVBAoMN0UtVHXEn3JhIEVCRyBCaWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhp
|
||||
em1ldGxlcmkgQS7Fni4xJjAkBgNVBAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBN
|
||||
ZXJrZXppMSgwJgYDVQQDDB9FLVR1Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5
|
||||
MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA4vU/kwVRHoViVF56C/UY
|
||||
B4Oufq9899SKa6VjQzm5S/fDxmSJPZQuVIBSOTkHS0vdhQd2h8y/L5VMzH2nPbxH
|
||||
D5hw+IyFHnSOkm0bQNGZDbt1bsipa5rAhDGvykPL6ys06I+XawGb1Q5KCKpbknSF
|
||||
Q9OArqGIW66z6l7LFpp3RMih9lRozt6Plyu6W0ACDGQXwLWTzeHxE2bODHnv0ZEo
|
||||
q1+gElIwcxmOj+GMB6LDu0rw6h8VqO4lzKRG+Bsi77MOQ7osJLjFLFzUHPhdZL3D
|
||||
k14opz8n8Y4e0ypQBaNV2cvnOVPAmJ6MVGKLJrD3fY185MaeZkJVgkfnsliNZvcH
|
||||
fC425lAcP9tDJMW/hkd5s3kc91r0E+xs+D/iWR+V7kI+ua2oMoVJl0b+SzGPWsut
|
||||
dEcf6ZG33ygEIqDUD13ieU/qbIWGvaimzuT6w+Gzrt48Ue7LE3wBf4QOXVGUnhMM
|
||||
ti6lTPk5cDZvlsouDERVxcr6XQKj39ZkjFqzAQqptQpHF//vkUAqjqFGOjGY5RH8
|
||||
zLtJVor8udBhmm9lbObDyz51Sf6Pp+KJxWfXnUYTTjF2OySznhFlhqt/7x3U+Lzn
|
||||
rFpct1pHXFXOVbQicVtbC/DP3KBhZOqp12gKY6fgDT+gr9Oq0n7vUaDmUStVkhUX
|
||||
U8u3Zg5mTPj5dUyQ5xJwx0UCAwEAAaNjMGEwHQYDVR0OBBYEFC7j27JJ0JxUeVz6
|
||||
Jyr+zE7S6E5UMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAULuPbsknQnFR5
|
||||
XPonKv7MTtLoTlQwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAF
|
||||
Nzr0TbdF4kV1JI+2d1LoHNgQk2Xz8lkGpD4eKexd0dCrfOAKkEh47U6YA5n+KGCR
|
||||
HTAduGN8qOY1tfrTYXbm1gdLymmasoR6d5NFFxWfJNCYExL/u6Au/U5Mh/jOXKqY
|
||||
GwXgAEZKgoClM4so3O0409/lPun++1ndYYRP0lSWE2ETPo+Aab6TR7U1Q9Jauz1c
|
||||
77NCR807VRMGsAnb/WP2OogKmW9+4c4bU2pEZiNRCHu8W1Ki/QY3OEBhj0qWuJA3
|
||||
+GbHeJAAFS6LrVE1Uweoa2iu+U48BybNCAVwzDk/dr2l02cmAYamU9JgO3xDf1WK
|
||||
vJUawSg5TB9D0pH0clmKuVb8P7Sd2nCcdlqMQ1DujjByTd//SffGqWfZbawCEeI6
|
||||
FiWnWAjLb1NBnEg4R2gz0dfHj9R0IdTDBZB6/86WiLEVKV0jq9BgoRJP3vQXzTLl
|
||||
yb/IQ639Lo7xr+L0mPoSHyDYwKcMhcWQ9DstliaxLL5Mq+ux0orJ23gTDx4JnW2P
|
||||
AJ8C2sH6H3p6CcRK5ogql5+Ji/03X186zjhZhkuvcQu02PJwT58yE+Owp1fl2tpD
|
||||
y4Q08ijE6m30Ku/Ba3ba+367hTzSU8JNvnHhRdH9I2cNE3X7z2VnIp2usAnRCf8d
|
||||
NL/+I5c30jn6PQ0GC7TbO6Orb1wdtn7os4I07QZcJA==
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
|
||||
# Subject: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
|
||||
# Label: "T-TeleSec GlobalRoot Class 2"
|
||||
|
@ -4397,73 +4325,6 @@ ut6Dacpps6kFtZaSF4fC0urQe87YQVt8rgIwRt7qy12a7DLCZRawTDBcMPPaTnOG
|
|||
BtjOiQRINzf43TNRnXCve1XYAS59BWQOhriR
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=E-Tugra Global Root CA RSA v3 O=E-Tugra EBG A.S. OU=E-Tugra Trust Center
|
||||
# Subject: CN=E-Tugra Global Root CA RSA v3 O=E-Tugra EBG A.S. OU=E-Tugra Trust Center
|
||||
# Label: "E-Tugra Global Root CA RSA v3"
|
||||
# Serial: 75951268308633135324246244059508261641472512052
|
||||
# MD5 Fingerprint: 22:be:10:f6:c2:f8:03:88:73:5f:33:29:47:28:47:a4
|
||||
# SHA1 Fingerprint: e9:a8:5d:22:14:52:1c:5b:aa:0a:b4:be:24:6a:23:8a:c9:ba:e2:a9
|
||||
# SHA256 Fingerprint: ef:66:b0:b1:0a:3c:db:9f:2e:36:48:c7:6b:d2:af:18:ea:d2:bf:e6:f1:17:65:5e:28:c4:06:0d:a1:a3:f4:c2
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIF8zCCA9ugAwIBAgIUDU3FzRYilZYIfrgLfxUGNPt5EDQwDQYJKoZIhvcNAQEL
|
||||
BQAwgYAxCzAJBgNVBAYTAlRSMQ8wDQYDVQQHEwZBbmthcmExGTAXBgNVBAoTEEUt
|
||||
VHVncmEgRUJHIEEuUy4xHTAbBgNVBAsTFEUtVHVncmEgVHJ1c3QgQ2VudGVyMSYw
|
||||
JAYDVQQDEx1FLVR1Z3JhIEdsb2JhbCBSb290IENBIFJTQSB2MzAeFw0yMDAzMTgw
|
||||
OTA3MTdaFw00NTAzMTIwOTA3MTdaMIGAMQswCQYDVQQGEwJUUjEPMA0GA1UEBxMG
|
||||
QW5rYXJhMRkwFwYDVQQKExBFLVR1Z3JhIEVCRyBBLlMuMR0wGwYDVQQLExRFLVR1
|
||||
Z3JhIFRydXN0IENlbnRlcjEmMCQGA1UEAxMdRS1UdWdyYSBHbG9iYWwgUm9vdCBD
|
||||
QSBSU0EgdjMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCiZvCJt3J7
|
||||
7gnJY9LTQ91ew6aEOErxjYG7FL1H6EAX8z3DeEVypi6Q3po61CBxyryfHUuXCscx
|
||||
uj7X/iWpKo429NEvx7epXTPcMHD4QGxLsqYxYdE0PD0xesevxKenhOGXpOhL9hd8
|
||||
7jwH7eKKV9y2+/hDJVDqJ4GohryPUkqWOmAalrv9c/SF/YP9f4RtNGx/ardLAQO/
|
||||
rWm31zLZ9Vdq6YaCPqVmMbMWPcLzJmAy01IesGykNz709a/r4d+ABs8qQedmCeFL
|
||||
l+d3vSFtKbZnwy1+7dZ5ZdHPOrbRsV5WYVB6Ws5OUDGAA5hH5+QYfERaxqSzO8bG
|
||||
wzrwbMOLyKSRBfP12baqBqG3q+Sx6iEUXIOk/P+2UNOMEiaZdnDpwA+mdPy70Bt4
|
||||
znKS4iicvObpCdg604nmvi533wEKb5b25Y08TVJ2Glbhc34XrD2tbKNSEhhw5oBO
|
||||
M/J+JjKsBY04pOZ2PJ8QaQ5tndLBeSBrW88zjdGUdjXnXVXHt6woq0bM5zshtQoK
|
||||
5EpZ3IE1S0SVEgpnpaH/WwAH0sDM+T/8nzPyAPiMbIedBi3x7+PmBvrFZhNb/FAH
|
||||
nnGGstpvdDDPk1Po3CLW3iAfYY2jLqN4MpBs3KwytQXk9TwzDdbgh3cXTJ2w2Amo
|
||||
DVf3RIXwyAS+XF1a4xeOVGNpf0l0ZAWMowIDAQABo2MwYTAPBgNVHRMBAf8EBTAD
|
||||
AQH/MB8GA1UdIwQYMBaAFLK0ruYt9ybVqnUtdkvAG1Mh0EjvMB0GA1UdDgQWBBSy
|
||||
tK7mLfcm1ap1LXZLwBtTIdBI7zAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEL
|
||||
BQADggIBAImocn+M684uGMQQgC0QDP/7FM0E4BQ8Tpr7nym/Ip5XuYJzEmMmtcyQ
|
||||
6dIqKe6cLcwsmb5FJ+Sxce3kOJUxQfJ9emN438o2Fi+CiJ+8EUdPdk3ILY7r3y18
|
||||
Tjvarvbj2l0Upq7ohUSdBm6O++96SmotKygY/r+QLHUWnw/qln0F7psTpURs+APQ
|
||||
3SPh/QMSEgj0GDSz4DcLdxEBSL9htLX4GdnLTeqjjO/98Aa1bZL0SmFQhO3sSdPk
|
||||
vmjmLuMxC1QLGpLWgti2omU8ZgT5Vdps+9u1FGZNlIM7zR6mK7L+d0CGq+ffCsn9
|
||||
9t2HVhjYsCxVYJb6CH5SkPVLpi6HfMsg2wY+oF0Dd32iPBMbKaITVaA9FCKvb7jQ
|
||||
mhty3QUBjYZgv6Rn7rWlDdF/5horYmbDB7rnoEgcOMPpRfunf/ztAmgayncSd6YA
|
||||
VSgU7NbHEqIbZULpkejLPoeJVF3Zr52XnGnnCv8PWniLYypMfUeUP95L6VPQMPHF
|
||||
9p5J3zugkaOj/s1YzOrfr28oO6Bpm4/srK4rVJ2bBLFHIK+WEj5jlB0E5y67hscM
|
||||
moi/dkfv97ALl2bSRM9gUgfh1SxKOidhd8rXj+eHDjD/DLsE4mHDosiXYY60MGo8
|
||||
bcIHX0pzLz/5FooBZu+6kcpSV3uu1OYP3Qt6f4ueJiDPO++BcYNZ
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=E-Tugra Global Root CA ECC v3 O=E-Tugra EBG A.S. OU=E-Tugra Trust Center
|
||||
# Subject: CN=E-Tugra Global Root CA ECC v3 O=E-Tugra EBG A.S. OU=E-Tugra Trust Center
|
||||
# Label: "E-Tugra Global Root CA ECC v3"
|
||||
# Serial: 218504919822255052842371958738296604628416471745
|
||||
# MD5 Fingerprint: 46:bc:81:bb:f1:b5:1e:f7:4b:96:bc:14:e2:e7:27:64
|
||||
# SHA1 Fingerprint: 8a:2f:af:57:53:b1:b0:e6:a1:04:ec:5b:6a:69:71:6d:f6:1c:e2:84
|
||||
# SHA256 Fingerprint: 87:3f:46:85:fa:7f:56:36:25:25:2e:6d:36:bc:d7:f1:6f:c2:49:51:f2:64:e4:7e:1b:95:4f:49:08:cd:ca:13
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIICpTCCAiqgAwIBAgIUJkYZdzHhT28oNt45UYbm1JeIIsEwCgYIKoZIzj0EAwMw
|
||||
gYAxCzAJBgNVBAYTAlRSMQ8wDQYDVQQHEwZBbmthcmExGTAXBgNVBAoTEEUtVHVn
|
||||
cmEgRUJHIEEuUy4xHTAbBgNVBAsTFEUtVHVncmEgVHJ1c3QgQ2VudGVyMSYwJAYD
|
||||
VQQDEx1FLVR1Z3JhIEdsb2JhbCBSb290IENBIEVDQyB2MzAeFw0yMDAzMTgwOTQ2
|
||||
NThaFw00NTAzMTIwOTQ2NThaMIGAMQswCQYDVQQGEwJUUjEPMA0GA1UEBxMGQW5r
|
||||
YXJhMRkwFwYDVQQKExBFLVR1Z3JhIEVCRyBBLlMuMR0wGwYDVQQLExRFLVR1Z3Jh
|
||||
IFRydXN0IENlbnRlcjEmMCQGA1UEAxMdRS1UdWdyYSBHbG9iYWwgUm9vdCBDQSBF
|
||||
Q0MgdjMwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAASOmCm/xxAeJ9urA8woLNheSBkQ
|
||||
KczLWYHMjLiSF4mDKpL2w6QdTGLVn9agRtwcvHbB40fQWxPa56WzZkjnIZpKT4YK
|
||||
fWzqTTKACrJ6CZtpS5iB4i7sAnCWH/31Rs7K3IKjYzBhMA8GA1UdEwEB/wQFMAMB
|
||||
Af8wHwYDVR0jBBgwFoAU/4Ixcj75xGZsrTie0bBRiKWQzPUwHQYDVR0OBBYEFP+C
|
||||
MXI++cRmbK04ntGwUYilkMz1MA4GA1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNp
|
||||
ADBmAjEA5gVYaWHlLcoNy/EZCL3W/VGSGn5jVASQkZo1kTmZ+gepZpO6yGjUij/6
|
||||
7W4WAie3AjEA3VoXK3YdZUKWpqxdinlW2Iob35reX8dQj7FbcQwm32pAAOwzkSFx
|
||||
vmjkI6TZraE3
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=Security Communication RootCA3 O=SECOM Trust Systems CO.,LTD.
|
||||
# Subject: CN=Security Communication RootCA3 O=SECOM Trust Systems CO.,LTD.
|
||||
# Label: "Security Communication RootCA3"
|
||||
|
@ -4587,3 +4448,188 @@ AgEGMAoGCCqGSM49BAMDA2gAMGUCMBq8W9f+qdJUDkpd0m2xQNz0Q9XSSpkZElaA
|
|||
94M04TVOSG0ED1cxMDAtsaqdAzjbBgIxAMvMh1PLet8gUXOQwKhbYdDFUDn9hf7B
|
||||
43j4ptZLvZuHjw/l1lOWqzzIQNph91Oj9w==
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=Sectigo Public Server Authentication Root E46 O=Sectigo Limited
|
||||
# Subject: CN=Sectigo Public Server Authentication Root E46 O=Sectigo Limited
|
||||
# Label: "Sectigo Public Server Authentication Root E46"
|
||||
# Serial: 88989738453351742415770396670917916916
|
||||
# MD5 Fingerprint: 28:23:f8:b2:98:5c:37:16:3b:3e:46:13:4e:b0:b3:01
|
||||
# SHA1 Fingerprint: ec:8a:39:6c:40:f0:2e:bc:42:75:d4:9f:ab:1c:1a:5b:67:be:d2:9a
|
||||
# SHA256 Fingerprint: c9:0f:26:f0:fb:1b:40:18:b2:22:27:51:9b:5c:a2:b5:3e:2c:a5:b3:be:5c:f1:8e:fe:1b:ef:47:38:0c:53:83
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIICOjCCAcGgAwIBAgIQQvLM2htpN0RfFf51KBC49DAKBggqhkjOPQQDAzBfMQsw
|
||||
CQYDVQQGEwJHQjEYMBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1T
|
||||
ZWN0aWdvIFB1YmxpYyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBFNDYwHhcN
|
||||
MjEwMzIyMDAwMDAwWhcNNDYwMzIxMjM1OTU5WjBfMQswCQYDVQQGEwJHQjEYMBYG
|
||||
A1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1TZWN0aWdvIFB1YmxpYyBT
|
||||
ZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBFNDYwdjAQBgcqhkjOPQIBBgUrgQQA
|
||||
IgNiAAR2+pmpbiDt+dd34wc7qNs9Xzjoq1WmVk/WSOrsfy2qw7LFeeyZYX8QeccC
|
||||
WvkEN/U0NSt3zn8gj1KjAIns1aeibVvjS5KToID1AZTc8GgHHs3u/iVStSBDHBv+
|
||||
6xnOQ6OjQjBAMB0GA1UdDgQWBBTRItpMWfFLXyY4qp3W7usNw/upYTAOBgNVHQ8B
|
||||
Af8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNnADBkAjAn7qRa
|
||||
qCG76UeXlImldCBteU/IvZNeWBj7LRoAasm4PdCkT0RHlAFWovgzJQxC36oCMB3q
|
||||
4S6ILuH5px0CMk7yn2xVdOOurvulGu7t0vzCAxHrRVxgED1cf5kDW21USAGKcw==
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=Sectigo Public Server Authentication Root R46 O=Sectigo Limited
|
||||
# Subject: CN=Sectigo Public Server Authentication Root R46 O=Sectigo Limited
|
||||
# Label: "Sectigo Public Server Authentication Root R46"
|
||||
# Serial: 156256931880233212765902055439220583700
|
||||
# MD5 Fingerprint: 32:10:09:52:00:d5:7e:6c:43:df:15:c0:b1:16:93:e5
|
||||
# SHA1 Fingerprint: ad:98:f9:f3:e4:7d:75:3b:65:d4:82:b3:a4:52:17:bb:6e:f5:e4:38
|
||||
# SHA256 Fingerprint: 7b:b6:47:a6:2a:ee:ac:88:bf:25:7a:a5:22:d0:1f:fe:a3:95:e0:ab:45:c7:3f:93:f6:56:54:ec:38:f2:5a:06
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIFijCCA3KgAwIBAgIQdY39i658BwD6qSWn4cetFDANBgkqhkiG9w0BAQwFADBf
|
||||
MQswCQYDVQQGEwJHQjEYMBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQD
|
||||
Ey1TZWN0aWdvIFB1YmxpYyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBSNDYw
|
||||
HhcNMjEwMzIyMDAwMDAwWhcNNDYwMzIxMjM1OTU5WjBfMQswCQYDVQQGEwJHQjEY
|
||||
MBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1TZWN0aWdvIFB1Ymxp
|
||||
YyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBSNDYwggIiMA0GCSqGSIb3DQEB
|
||||
AQUAA4ICDwAwggIKAoICAQCTvtU2UnXYASOgHEdCSe5jtrch/cSV1UgrJnwUUxDa
|
||||
ef0rty2k1Cz66jLdScK5vQ9IPXtamFSvnl0xdE8H/FAh3aTPaE8bEmNtJZlMKpnz
|
||||
SDBh+oF8HqcIStw+KxwfGExxqjWMrfhu6DtK2eWUAtaJhBOqbchPM8xQljeSM9xf
|
||||
iOefVNlI8JhD1mb9nxc4Q8UBUQvX4yMPFF1bFOdLvt30yNoDN9HWOaEhUTCDsG3X
|
||||
ME6WW5HwcCSrv0WBZEMNvSE6Lzzpng3LILVCJ8zab5vuZDCQOc2TZYEhMbUjUDM3
|
||||
IuM47fgxMMxF/mL50V0yeUKH32rMVhlATc6qu/m1dkmU8Sf4kaWD5QazYw6A3OAS
|
||||
VYCmO2a0OYctyPDQ0RTp5A1NDvZdV3LFOxxHVp3i1fuBYYzMTYCQNFu31xR13NgE
|
||||
SJ/AwSiItOkcyqex8Va3e0lMWeUgFaiEAin6OJRpmkkGj80feRQXEgyDet4fsZfu
|
||||
+Zd4KKTIRJLpfSYFplhym3kT2BFfrsU4YjRosoYwjviQYZ4ybPUHNs2iTG7sijbt
|
||||
8uaZFURww3y8nDnAtOFr94MlI1fZEoDlSfB1D++N6xybVCi0ITz8fAr/73trdf+L
|
||||
HaAZBav6+CuBQug4urv7qv094PPK306Xlynt8xhW6aWWrL3DkJiy4Pmi1KZHQ3xt
|
||||
zwIDAQABo0IwQDAdBgNVHQ4EFgQUVnNYZJX5khqwEioEYnmhQBWIIUkwDgYDVR0P
|
||||
AQH/BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAC9c
|
||||
mTz8Bl6MlC5w6tIyMY208FHVvArzZJ8HXtXBc2hkeqK5Duj5XYUtqDdFqij0lgVQ
|
||||
YKlJfp/imTYpE0RHap1VIDzYm/EDMrraQKFz6oOht0SmDpkBm+S8f74TlH7Kph52
|
||||
gDY9hAaLMyZlbcp+nv4fjFg4exqDsQ+8FxG75gbMY/qB8oFM2gsQa6H61SilzwZA
|
||||
Fv97fRheORKkU55+MkIQpiGRqRxOF3yEvJ+M0ejf5lG5Nkc/kLnHvALcWxxPDkjB
|
||||
JYOcCj+esQMzEhonrPcibCTRAUH4WAP+JWgiH5paPHxsnnVI84HxZmduTILA7rpX
|
||||
DhjvLpr3Etiga+kFpaHpaPi8TD8SHkXoUsCjvxInebnMMTzD9joiFgOgyY9mpFui
|
||||
TdaBJQbpdqQACj7LzTWb4OE4y2BThihCQRxEV+ioratF4yUQvNs+ZUH7G6aXD+u5
|
||||
dHn5HrwdVw1Hr8Mvn4dGp+smWg9WY7ViYG4A++MnESLn/pmPNPW56MORcr3Ywx65
|
||||
LvKRRFHQV80MNNVIIb/bE/FmJUNS0nAiNs2fxBx1IK1jcmMGDw4nztJqDby1ORrp
|
||||
0XZ60Vzk50lJLVU3aPAaOpg+VBeHVOmmJ1CJeyAvP/+/oYtKR5j/K3tJPsMpRmAY
|
||||
QqszKbrAKbkTidOIijlBO8n9pu0f9GBj39ItVQGL
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=SSL.com TLS RSA Root CA 2022 O=SSL Corporation
|
||||
# Subject: CN=SSL.com TLS RSA Root CA 2022 O=SSL Corporation
|
||||
# Label: "SSL.com TLS RSA Root CA 2022"
|
||||
# Serial: 148535279242832292258835760425842727825
|
||||
# MD5 Fingerprint: d8:4e:c6:59:30:d8:fe:a0:d6:7a:5a:2c:2c:69:78:da
|
||||
# SHA1 Fingerprint: ec:2c:83:40:72:af:26:95:10:ff:0e:f2:03:ee:31:70:f6:78:9d:ca
|
||||
# SHA256 Fingerprint: 8f:af:7d:2e:2c:b4:70:9b:b8:e0:b3:36:66:bf:75:a5:dd:45:b5:de:48:0f:8e:a8:d4:bf:e6:be:bc:17:f2:ed
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIFiTCCA3GgAwIBAgIQb77arXO9CEDii02+1PdbkTANBgkqhkiG9w0BAQsFADBO
|
||||
MQswCQYDVQQGEwJVUzEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMSUwIwYDVQQD
|
||||
DBxTU0wuY29tIFRMUyBSU0EgUm9vdCBDQSAyMDIyMB4XDTIyMDgyNTE2MzQyMloX
|
||||
DTQ2MDgxOTE2MzQyMVowTjELMAkGA1UEBhMCVVMxGDAWBgNVBAoMD1NTTCBDb3Jw
|
||||
b3JhdGlvbjElMCMGA1UEAwwcU1NMLmNvbSBUTFMgUlNBIFJvb3QgQ0EgMjAyMjCC
|
||||
AiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANCkCXJPQIgSYT41I57u9nTP
|
||||
L3tYPc48DRAokC+X94xI2KDYJbFMsBFMF3NQ0CJKY7uB0ylu1bUJPiYYf7ISf5OY
|
||||
t6/wNr/y7hienDtSxUcZXXTzZGbVXcdotL8bHAajvI9AI7YexoS9UcQbOcGV0ins
|
||||
S657Lb85/bRi3pZ7QcacoOAGcvvwB5cJOYF0r/c0WRFXCsJbwST0MXMwgsadugL3
|
||||
PnxEX4MN8/HdIGkWCVDi1FW24IBydm5MR7d1VVm0U3TZlMZBrViKMWYPHqIbKUBO
|
||||
L9975hYsLfy/7PO0+r4Y9ptJ1O4Fbtk085zx7AGL0SDGD6C1vBdOSHtRwvzpXGk3
|
||||
R2azaPgVKPC506QVzFpPulJwoxJF3ca6TvvC0PeoUidtbnm1jPx7jMEWTO6Af77w
|
||||
dr5BUxIzrlo4QqvXDz5BjXYHMtWrifZOZ9mxQnUjbvPNQrL8VfVThxc7wDNY8VLS
|
||||
+YCk8OjwO4s4zKTGkH8PnP2L0aPP2oOnaclQNtVcBdIKQXTbYxE3waWglksejBYS
|
||||
d66UNHsef8JmAOSqg+qKkK3ONkRN0VHpvB/zagX9wHQfJRlAUW7qglFA35u5CCoG
|
||||
AtUjHBPW6dvbxrB6y3snm/vg1UYk7RBLY0ulBY+6uB0rpvqR4pJSvezrZ5dtmi2f
|
||||
gTIFZzL7SAg/2SW4BCUvAgMBAAGjYzBhMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0j
|
||||
BBgwFoAU+y437uOEeicuzRk1sTN8/9REQrkwHQYDVR0OBBYEFPsuN+7jhHonLs0Z
|
||||
NbEzfP/UREK5MA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAjYlt
|
||||
hEUY8U+zoO9opMAdrDC8Z2awms22qyIZZtM7QbUQnRC6cm4pJCAcAZli05bg4vsM
|
||||
QtfhWsSWTVTNj8pDU/0quOr4ZcoBwq1gaAafORpR2eCNJvkLTqVTJXojpBzOCBvf
|
||||
R4iyrT7gJ4eLSYwfqUdYe5byiB0YrrPRpgqU+tvT5TgKa3kSM/tKWTcWQA673vWJ
|
||||
DPFs0/dRa1419dvAJuoSc06pkZCmF8NsLzjUo3KUQyxi4U5cMj29TH0ZR6LDSeeW
|
||||
P4+a0zvkEdiLA9z2tmBVGKaBUfPhqBVq6+AL8BQx1rmMRTqoENjwuSfr98t67wVy
|
||||
lrXEj5ZzxOhWc5y8aVFjvO9nHEMaX3cZHxj4HCUp+UmZKbaSPaKDN7EgkaibMOlq
|
||||
bLQjk2UEqxHzDh1TJElTHaE/nUiSEeJ9DU/1172iWD54nR4fK/4huxoTtrEoZP2w
|
||||
AgDHbICivRZQIA9ygV/MlP+7mea6kMvq+cYMwq7FGc4zoWtcu358NFcXrfA/rs3q
|
||||
r5nsLFR+jM4uElZI7xc7P0peYNLcdDa8pUNjyw9bowJWCZ4kLOGGgYz+qxcs+sji
|
||||
Mho6/4UIyYOf8kpIEFR3N+2ivEC+5BB09+Rbu7nzifmPQdjH5FCQNYA+HLhNkNPU
|
||||
98OwoX6EyneSMSy4kLGCenROmxMmtNVQZlR4rmA=
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=SSL.com TLS ECC Root CA 2022 O=SSL Corporation
|
||||
# Subject: CN=SSL.com TLS ECC Root CA 2022 O=SSL Corporation
|
||||
# Label: "SSL.com TLS ECC Root CA 2022"
|
||||
# Serial: 26605119622390491762507526719404364228
|
||||
# MD5 Fingerprint: 99:d7:5c:f1:51:36:cc:e9:ce:d9:19:2e:77:71:56:c5
|
||||
# SHA1 Fingerprint: 9f:5f:d9:1a:54:6d:f5:0c:71:f0:ee:7a:bd:17:49:98:84:73:e2:39
|
||||
# SHA256 Fingerprint: c3:2f:fd:9f:46:f9:36:d1:6c:36:73:99:09:59:43:4b:9a:d6:0a:af:bb:9e:7c:f3:36:54:f1:44:cc:1b:a1:43
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIICOjCCAcCgAwIBAgIQFAP1q/s3ixdAW+JDsqXRxDAKBggqhkjOPQQDAzBOMQsw
|
||||
CQYDVQQGEwJVUzEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMSUwIwYDVQQDDBxT
|
||||
U0wuY29tIFRMUyBFQ0MgUm9vdCBDQSAyMDIyMB4XDTIyMDgyNTE2MzM0OFoXDTQ2
|
||||
MDgxOTE2MzM0N1owTjELMAkGA1UEBhMCVVMxGDAWBgNVBAoMD1NTTCBDb3Jwb3Jh
|
||||
dGlvbjElMCMGA1UEAwwcU1NMLmNvbSBUTFMgRUNDIFJvb3QgQ0EgMjAyMjB2MBAG
|
||||
ByqGSM49AgEGBSuBBAAiA2IABEUpNXP6wrgjzhR9qLFNoFs27iosU8NgCTWyJGYm
|
||||
acCzldZdkkAZDsalE3D07xJRKF3nzL35PIXBz5SQySvOkkJYWWf9lCcQZIxPBLFN
|
||||
SeR7T5v15wj4A4j3p8OSSxlUgaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSME
|
||||
GDAWgBSJjy+j6CugFFR781a4Jl9nOAuc0DAdBgNVHQ4EFgQUiY8vo+groBRUe/NW
|
||||
uCZfZzgLnNAwDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMDA2gAMGUCMFXjIlbp
|
||||
15IkWE8elDIPDAI2wv2sdDJO4fscgIijzPvX6yv/N33w7deedWo1dlJF4AIxAMeN
|
||||
b0Igj762TVntd00pxCAgRWSGOlDGxK0tk/UYfXLtqc/ErFc2KAhl3zx5Zn6g6g==
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=Atos TrustedRoot Root CA ECC TLS 2021 O=Atos
|
||||
# Subject: CN=Atos TrustedRoot Root CA ECC TLS 2021 O=Atos
|
||||
# Label: "Atos TrustedRoot Root CA ECC TLS 2021"
|
||||
# Serial: 81873346711060652204712539181482831616
|
||||
# MD5 Fingerprint: 16:9f:ad:f1:70:ad:79:d6:ed:29:b4:d1:c5:79:70:a8
|
||||
# SHA1 Fingerprint: 9e:bc:75:10:42:b3:02:f3:81:f4:f7:30:62:d4:8f:c3:a7:51:b2:dd
|
||||
# SHA256 Fingerprint: b2:fa:e5:3e:14:cc:d7:ab:92:12:06:47:01:ae:27:9c:1d:89:88:fa:cb:77:5f:a8:a0:08:91:4e:66:39:88:a8
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIICFTCCAZugAwIBAgIQPZg7pmY9kGP3fiZXOATvADAKBggqhkjOPQQDAzBMMS4w
|
||||
LAYDVQQDDCVBdG9zIFRydXN0ZWRSb290IFJvb3QgQ0EgRUNDIFRMUyAyMDIxMQ0w
|
||||
CwYDVQQKDARBdG9zMQswCQYDVQQGEwJERTAeFw0yMTA0MjIwOTI2MjNaFw00MTA0
|
||||
MTcwOTI2MjJaMEwxLjAsBgNVBAMMJUF0b3MgVHJ1c3RlZFJvb3QgUm9vdCBDQSBF
|
||||
Q0MgVExTIDIwMjExDTALBgNVBAoMBEF0b3MxCzAJBgNVBAYTAkRFMHYwEAYHKoZI
|
||||
zj0CAQYFK4EEACIDYgAEloZYKDcKZ9Cg3iQZGeHkBQcfl+3oZIK59sRxUM6KDP/X
|
||||
tXa7oWyTbIOiaG6l2b4siJVBzV3dscqDY4PMwL502eCdpO5KTlbgmClBk1IQ1SQ4
|
||||
AjJn8ZQSb+/Xxd4u/RmAo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBR2
|
||||
KCXWfeBmmnoJsmo7jjPXNtNPojAOBgNVHQ8BAf8EBAMCAYYwCgYIKoZIzj0EAwMD
|
||||
aAAwZQIwW5kp85wxtolrbNa9d+F851F+uDrNozZffPc8dz7kUK2o59JZDCaOMDtu
|
||||
CCrCp1rIAjEAmeMM56PDr9NJLkaCI2ZdyQAUEv049OGYa3cpetskz2VAv9LcjBHo
|
||||
9H1/IISpQuQo
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=Atos TrustedRoot Root CA RSA TLS 2021 O=Atos
|
||||
# Subject: CN=Atos TrustedRoot Root CA RSA TLS 2021 O=Atos
|
||||
# Label: "Atos TrustedRoot Root CA RSA TLS 2021"
|
||||
# Serial: 111436099570196163832749341232207667876
|
||||
# MD5 Fingerprint: d4:d3:46:b8:9a:c0:9c:76:5d:9e:3a:c3:b9:99:31:d2
|
||||
# SHA1 Fingerprint: 18:52:3b:0d:06:37:e4:d6:3a:df:23:e4:98:fb:5b:16:fb:86:74:48
|
||||
# SHA256 Fingerprint: 81:a9:08:8e:a5:9f:b3:64:c5:48:a6:f8:55:59:09:9b:6f:04:05:ef:bf:18:e5:32:4e:c9:f4:57:ba:00:11:2f
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIFZDCCA0ygAwIBAgIQU9XP5hmTC/srBRLYwiqipDANBgkqhkiG9w0BAQwFADBM
|
||||
MS4wLAYDVQQDDCVBdG9zIFRydXN0ZWRSb290IFJvb3QgQ0EgUlNBIFRMUyAyMDIx
|
||||
MQ0wCwYDVQQKDARBdG9zMQswCQYDVQQGEwJERTAeFw0yMTA0MjIwOTIxMTBaFw00
|
||||
MTA0MTcwOTIxMDlaMEwxLjAsBgNVBAMMJUF0b3MgVHJ1c3RlZFJvb3QgUm9vdCBD
|
||||
QSBSU0EgVExTIDIwMjExDTALBgNVBAoMBEF0b3MxCzAJBgNVBAYTAkRFMIICIjAN
|
||||
BgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAtoAOxHm9BYx9sKOdTSJNy/BBl01Z
|
||||
4NH+VoyX8te9j2y3I49f1cTYQcvyAh5x5en2XssIKl4w8i1mx4QbZFc4nXUtVsYv
|
||||
Ye+W/CBGvevUez8/fEc4BKkbqlLfEzfTFRVOvV98r61jx3ncCHvVoOX3W3WsgFWZ
|
||||
kmGbzSoXfduP9LVq6hdKZChmFSlsAvFr1bqjM9xaZ6cF4r9lthawEO3NUDPJcFDs
|
||||
GY6wx/J0W2tExn2WuZgIWWbeKQGb9Cpt0xU6kGpn8bRrZtkh68rZYnxGEFzedUln
|
||||
nkL5/nWpo63/dgpnQOPF943HhZpZnmKaau1Fh5hnstVKPNe0OwANwI8f4UDErmwh
|
||||
3El+fsqyjW22v5MvoVw+j8rtgI5Y4dtXz4U2OLJxpAmMkokIiEjxQGMYsluMWuPD
|
||||
0xeqqxmjLBvk1cbiZnrXghmmOxYsL3GHX0WelXOTwkKBIROW1527k2gV+p2kHYzy
|
||||
geBYBr3JtuP2iV2J+axEoctr+hbxx1A9JNr3w+SH1VbxT5Aw+kUJWdo0zuATHAR8
|
||||
ANSbhqRAvNncTFd+rrcztl524WWLZt+NyteYr842mIycg5kDcPOvdO3GDjbnvezB
|
||||
c6eUWsuSZIKmAMFwoW4sKeFYV+xafJlrJaSQOoD0IJ2azsct+bJLKZWD6TWNp0lI
|
||||
pw9MGZHQ9b8Q4HECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU
|
||||
dEmZ0f+0emhFdcN+tNzMzjkz2ggwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEB
|
||||
DAUAA4ICAQAjQ1MkYlxt/T7Cz1UAbMVWiLkO3TriJQ2VSpfKgInuKs1l+NsW4AmS
|
||||
4BjHeJi78+xCUvuppILXTdiK/ORO/auQxDh1MoSf/7OwKwIzNsAQkG8dnK/haZPs
|
||||
o0UvFJ/1TCplQ3IM98P4lYsU84UgYt1UU90s3BiVaU+DR3BAM1h3Egyi61IxHkzJ
|
||||
qM7F78PRreBrAwA0JrRUITWXAdxfG/F851X6LWh3e9NpzNMOa7pNdkTWwhWaJuyw
|
||||
xfW70Xp0wmzNxbVe9kzmWy2B27O3Opee7c9GslA9hGCZcbUztVdF5kJHdWoOsAgM
|
||||
rr3e97sPWD2PAzHoPYJQyi9eDF20l74gNAf0xBLh7tew2VktafcxBPTy+av5EzH4
|
||||
AXcOPUIjJsyacmdRIXrMPIWo6iFqO9taPKU0nprALN+AnCng33eU0aKAQv9qTFsR
|
||||
0PXNor6uzFFcw9VUewyu1rkGd4Di7wcaaMxZUa1+XGdrudviB0JbuAEFWDlN5LuY
|
||||
o7Ey7Nmj1m+UI/87tyll5gfp77YZ6ufCOB0yiJA8EytuzO+rdwY0d4RPcuSBhPm5
|
||||
dDTedk+SKlOxJTnbPP/lPqYO5Wue/9vsL3SD3460s6neFE3/MaNFcyT6lSnMEpcE
|
||||
oji2jbDwN/zIIX8/syQbPYtuzE2wFg2WHYMfRsCbvUOZ58SWLs5fyQ==
|
||||
-----END CERTIFICATE-----
|
||||
|
|
|
@ -8,7 +8,7 @@ platformdirs==3.8.1
|
|||
pyparsing==3.1.0
|
||||
pyproject-hooks==1.0.0
|
||||
requests==2.31.0
|
||||
certifi==2023.5.7
|
||||
certifi==2023.7.22
|
||||
chardet==5.1.0
|
||||
idna==3.4
|
||||
urllib3==1.26.16
|
||||
|
|
|
@ -1,22 +1,32 @@
|
|||
import compileall
|
||||
import fnmatch
|
||||
import http.server
|
||||
import io
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import threading
|
||||
from contextlib import ExitStack, contextmanager
|
||||
from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
from hashlib import sha256
|
||||
from pathlib import Path
|
||||
from textwrap import dedent
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
AnyStr,
|
||||
Callable,
|
||||
ClassVar,
|
||||
Dict,
|
||||
Iterable,
|
||||
Iterator,
|
||||
List,
|
||||
Optional,
|
||||
Set,
|
||||
Tuple,
|
||||
Union,
|
||||
)
|
||||
from unittest.mock import patch
|
||||
|
@ -49,7 +59,7 @@ from .lib.compat import nullcontext
|
|||
if TYPE_CHECKING:
|
||||
from typing import Protocol
|
||||
|
||||
from wsgi import WSGIApplication
|
||||
from _typeshed.wsgi import WSGIApplication
|
||||
else:
|
||||
# TODO: Protocol was introduced in Python 3.8. Remove this branch when
|
||||
# dropping support for Python 3.7.
|
||||
|
@ -645,7 +655,12 @@ class InMemoryPip:
|
|||
try:
|
||||
returncode = pip_entry_point([os.fspath(a) for a in args])
|
||||
except SystemExit as e:
|
||||
returncode = e.code or 0
|
||||
if isinstance(e.code, int):
|
||||
returncode = e.code
|
||||
elif e.code:
|
||||
returncode = 1
|
||||
else:
|
||||
returncode = 0
|
||||
finally:
|
||||
sys.stdout = orig_stdout
|
||||
return InMemoryPipResult(returncode, stdout.getvalue())
|
||||
|
@ -745,3 +760,289 @@ def proxy(request: pytest.FixtureRequest) -> str:
|
|||
@pytest.fixture
|
||||
def enable_user_site(virtualenv: VirtualEnvironment) -> None:
|
||||
virtualenv.user_site_packages = True
|
||||
|
||||
|
||||
class MetadataKind(Enum):
|
||||
"""All the types of values we might be provided for the data-dist-info-metadata
|
||||
attribute from PEP 658."""
|
||||
|
||||
# Valid: will read metadata from the dist instead.
|
||||
No = "none"
|
||||
# Valid: will read the .metadata file, but won't check its hash.
|
||||
Unhashed = "unhashed"
|
||||
# Valid: will read the .metadata file and check its hash matches.
|
||||
Sha256 = "sha256"
|
||||
# Invalid: will error out after checking the hash.
|
||||
WrongHash = "wrong-hash"
|
||||
# Invalid: will error out after failing to fetch the .metadata file.
|
||||
NoFile = "no-file"
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class FakePackage:
|
||||
"""Mock package structure used to generate a PyPI repository.
|
||||
|
||||
FakePackage name and version should correspond to sdists (.tar.gz files) in our test
|
||||
data."""
|
||||
|
||||
name: str
|
||||
version: str
|
||||
filename: str
|
||||
metadata: MetadataKind
|
||||
# This will override any dependencies specified in the actual dist's METADATA.
|
||||
requires_dist: Tuple[str, ...] = ()
|
||||
# This will override the Name specified in the actual dist's METADATA.
|
||||
metadata_name: Optional[str] = None
|
||||
|
||||
def metadata_filename(self) -> str:
|
||||
"""This is specified by PEP 658."""
|
||||
return f"{self.filename}.metadata"
|
||||
|
||||
def generate_additional_tag(self) -> str:
|
||||
"""This gets injected into the <a> tag in the generated PyPI index page for this
|
||||
package."""
|
||||
if self.metadata == MetadataKind.No:
|
||||
return ""
|
||||
if self.metadata in [MetadataKind.Unhashed, MetadataKind.NoFile]:
|
||||
return 'data-dist-info-metadata="true"'
|
||||
if self.metadata == MetadataKind.WrongHash:
|
||||
return 'data-dist-info-metadata="sha256=WRONG-HASH"'
|
||||
assert self.metadata == MetadataKind.Sha256
|
||||
checksum = sha256(self.generate_metadata()).hexdigest()
|
||||
return f'data-dist-info-metadata="sha256={checksum}"'
|
||||
|
||||
def requires_str(self) -> str:
|
||||
if not self.requires_dist:
|
||||
return ""
|
||||
joined = " and ".join(self.requires_dist)
|
||||
return f"Requires-Dist: {joined}"
|
||||
|
||||
def generate_metadata(self) -> bytes:
|
||||
"""This is written to `self.metadata_filename()` and will override the actual
|
||||
dist's METADATA, unless `self.metadata == MetadataKind.NoFile`."""
|
||||
return dedent(
|
||||
f"""\
|
||||
Metadata-Version: 2.1
|
||||
Name: {self.metadata_name or self.name}
|
||||
Version: {self.version}
|
||||
{self.requires_str()}
|
||||
"""
|
||||
).encode("utf-8")
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def fake_packages() -> Dict[str, List[FakePackage]]:
|
||||
"""The package database we generate for testing PEP 658 support."""
|
||||
return {
|
||||
"simple": [
|
||||
FakePackage("simple", "1.0", "simple-1.0.tar.gz", MetadataKind.Sha256),
|
||||
FakePackage("simple", "2.0", "simple-2.0.tar.gz", MetadataKind.No),
|
||||
# This will raise a hashing error.
|
||||
FakePackage("simple", "3.0", "simple-3.0.tar.gz", MetadataKind.WrongHash),
|
||||
],
|
||||
"simple2": [
|
||||
# Override the dependencies here in order to force pip to download
|
||||
# simple-1.0.tar.gz as well.
|
||||
FakePackage(
|
||||
"simple2",
|
||||
"1.0",
|
||||
"simple2-1.0.tar.gz",
|
||||
MetadataKind.Unhashed,
|
||||
("simple==1.0",),
|
||||
),
|
||||
# This will raise an error when pip attempts to fetch the metadata file.
|
||||
FakePackage("simple2", "2.0", "simple2-2.0.tar.gz", MetadataKind.NoFile),
|
||||
# This has a METADATA file with a mismatched name.
|
||||
FakePackage(
|
||||
"simple2",
|
||||
"3.0",
|
||||
"simple2-3.0.tar.gz",
|
||||
MetadataKind.Sha256,
|
||||
metadata_name="not-simple2",
|
||||
),
|
||||
],
|
||||
"colander": [
|
||||
# Ensure we can read the dependencies from a metadata file within a wheel
|
||||
# *without* PEP 658 metadata.
|
||||
FakePackage(
|
||||
"colander",
|
||||
"0.9.9",
|
||||
"colander-0.9.9-py2.py3-none-any.whl",
|
||||
MetadataKind.No,
|
||||
),
|
||||
],
|
||||
"compilewheel": [
|
||||
# Ensure we can override the dependencies of a wheel file by injecting PEP
|
||||
# 658 metadata.
|
||||
FakePackage(
|
||||
"compilewheel",
|
||||
"1.0",
|
||||
"compilewheel-1.0-py2.py3-none-any.whl",
|
||||
MetadataKind.Unhashed,
|
||||
("simple==1.0",),
|
||||
),
|
||||
],
|
||||
"has-script": [
|
||||
# Ensure we check PEP 658 metadata hashing errors for wheel files.
|
||||
FakePackage(
|
||||
"has-script",
|
||||
"1.0",
|
||||
"has.script-1.0-py2.py3-none-any.whl",
|
||||
MetadataKind.WrongHash,
|
||||
),
|
||||
],
|
||||
"translationstring": [
|
||||
FakePackage(
|
||||
"translationstring",
|
||||
"1.1",
|
||||
"translationstring-1.1.tar.gz",
|
||||
MetadataKind.No,
|
||||
),
|
||||
],
|
||||
"priority": [
|
||||
# Ensure we check for a missing metadata file for wheels.
|
||||
FakePackage(
|
||||
"priority",
|
||||
"1.0",
|
||||
"priority-1.0-py2.py3-none-any.whl",
|
||||
MetadataKind.NoFile,
|
||||
),
|
||||
],
|
||||
"requires-simple-extra": [
|
||||
# Metadata name is not canonicalized.
|
||||
FakePackage(
|
||||
"requires-simple-extra",
|
||||
"0.1",
|
||||
"requires_simple_extra-0.1-py2.py3-none-any.whl",
|
||||
MetadataKind.Sha256,
|
||||
metadata_name="Requires_Simple.Extra",
|
||||
),
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def html_index_for_packages(
|
||||
shared_data: TestData,
|
||||
fake_packages: Dict[str, List[FakePackage]],
|
||||
tmpdir_factory: pytest.TempPathFactory,
|
||||
) -> Path:
|
||||
"""Generate a PyPI HTML package index within a local directory pointing to
|
||||
synthetic test data."""
|
||||
html_dir = tmpdir_factory.mktemp("fake_index_html_content")
|
||||
|
||||
# (1) Generate the content for a PyPI index.html.
|
||||
pkg_links = "\n".join(
|
||||
f' <a href="{pkg}/index.html">{pkg}</a>' for pkg in fake_packages.keys()
|
||||
)
|
||||
# Output won't be nicely indented because dedent() acts after f-string
|
||||
# arg insertion.
|
||||
index_html = dedent(
|
||||
f"""\
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta name="pypi:repository-version" content="1.0">
|
||||
<title>Simple index</title>
|
||||
</head>
|
||||
<body>
|
||||
{pkg_links}
|
||||
</body>
|
||||
</html>"""
|
||||
)
|
||||
# (2) Generate the index.html in a new subdirectory of the temp directory.
|
||||
(html_dir / "index.html").write_text(index_html)
|
||||
|
||||
# (3) Generate subdirectories for individual packages, each with their own
|
||||
# index.html.
|
||||
for pkg, links in fake_packages.items():
|
||||
pkg_subdir = html_dir / pkg
|
||||
pkg_subdir.mkdir()
|
||||
|
||||
download_links: List[str] = []
|
||||
for package_link in links:
|
||||
# (3.1) Generate the <a> tag which pip can crawl pointing to this
|
||||
# specific package version.
|
||||
download_links.append(
|
||||
f' <a href="{package_link.filename}" {package_link.generate_additional_tag()}>{package_link.filename}</a><br/>' # noqa: E501
|
||||
)
|
||||
# (3.2) Copy over the corresponding file in `shared_data.packages`.
|
||||
shutil.copy(
|
||||
shared_data.packages / package_link.filename,
|
||||
pkg_subdir / package_link.filename,
|
||||
)
|
||||
# (3.3) Write a metadata file, if applicable.
|
||||
if package_link.metadata != MetadataKind.NoFile:
|
||||
with open(pkg_subdir / package_link.metadata_filename(), "wb") as f:
|
||||
f.write(package_link.generate_metadata())
|
||||
|
||||
# (3.4) After collating all the download links and copying over the files,
|
||||
# write an index.html with the generated download links for each
|
||||
# copied file for this specific package name.
|
||||
download_links_str = "\n".join(download_links)
|
||||
pkg_index_content = dedent(
|
||||
f"""\
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta name="pypi:repository-version" content="1.0">
|
||||
<title>Links for {pkg}</title>
|
||||
</head>
|
||||
<body>
|
||||
<h1>Links for {pkg}</h1>
|
||||
{download_links_str}
|
||||
</body>
|
||||
</html>"""
|
||||
)
|
||||
with open(pkg_subdir / "index.html", "w") as f:
|
||||
f.write(pkg_index_content)
|
||||
|
||||
return html_dir
|
||||
|
||||
|
||||
class OneTimeDownloadHandler(http.server.SimpleHTTPRequestHandler):
|
||||
"""Serve files from the current directory, but error if a file is downloaded more
|
||||
than once."""
|
||||
|
||||
_seen_paths: ClassVar[Set[str]] = set()
|
||||
|
||||
def do_GET(self) -> None:
|
||||
if self.path in self._seen_paths:
|
||||
self.send_error(
|
||||
http.HTTPStatus.NOT_FOUND,
|
||||
f"File {self.path} not available more than once!",
|
||||
)
|
||||
return
|
||||
super().do_GET()
|
||||
if not (self.path.endswith("/") or self.path.endswith(".metadata")):
|
||||
self._seen_paths.add(self.path)
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def html_index_with_onetime_server(
|
||||
html_index_for_packages: Path,
|
||||
) -> Iterator[http.server.ThreadingHTTPServer]:
|
||||
"""Serve files from a generated pypi index, erroring if a file is downloaded more
|
||||
than once.
|
||||
|
||||
Provide `-i http://localhost:8000` to pip invocations to point them at this server.
|
||||
"""
|
||||
|
||||
class InDirectoryServer(http.server.ThreadingHTTPServer):
|
||||
def finish_request(self, request: Any, client_address: Any) -> None:
|
||||
self.RequestHandlerClass(
|
||||
request, client_address, self, directory=str(html_index_for_packages) # type: ignore[call-arg] # noqa: E501
|
||||
)
|
||||
|
||||
class Handler(OneTimeDownloadHandler):
|
||||
_seen_paths: ClassVar[Set[str]] = set()
|
||||
|
||||
with InDirectoryServer(("", 8000), Handler) as httpd:
|
||||
server_thread = threading.Thread(target=httpd.serve_forever)
|
||||
server_thread.start()
|
||||
|
||||
try:
|
||||
yield httpd
|
||||
finally:
|
||||
httpd.shutdown()
|
||||
server_thread.join()
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import os
|
||||
import sys
|
||||
from textwrap import dedent
|
||||
from typing import Optional
|
||||
|
||||
|
@ -203,6 +204,31 @@ def test_build_env_overlay_prefix_has_priority(script: PipTestEnvironment) -> No
|
|||
assert result.stdout.strip() == "2.0", str(result)
|
||||
|
||||
|
||||
if sys.version_info < (3, 12):
|
||||
BUILD_ENV_ERROR_DEBUG_CODE = r"""
|
||||
from distutils.sysconfig import get_python_lib
|
||||
print(
|
||||
f'imported `pkg` from `{pkg.__file__}`',
|
||||
file=sys.stderr)
|
||||
print('system sites:\n ' + '\n '.join(sorted({
|
||||
get_python_lib(plat_specific=0),
|
||||
get_python_lib(plat_specific=1),
|
||||
})), file=sys.stderr)
|
||||
"""
|
||||
else:
|
||||
BUILD_ENV_ERROR_DEBUG_CODE = r"""
|
||||
from sysconfig import get_paths
|
||||
paths = get_paths()
|
||||
print(
|
||||
f'imported `pkg` from `{pkg.__file__}`',
|
||||
file=sys.stderr)
|
||||
print('system sites:\n ' + '\n '.join(sorted({
|
||||
paths['platlib'],
|
||||
paths['purelib'],
|
||||
})), file=sys.stderr)
|
||||
"""
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("enable_user_site")
|
||||
def test_build_env_isolation(script: PipTestEnvironment) -> None:
|
||||
# Create dummy `pkg` wheel.
|
||||
|
@ -231,8 +257,7 @@ def test_build_env_isolation(script: PipTestEnvironment) -> None:
|
|||
run_with_build_env(
|
||||
script,
|
||||
"",
|
||||
r"""
|
||||
from distutils.sysconfig import get_python_lib
|
||||
f"""
|
||||
import sys
|
||||
|
||||
try:
|
||||
|
@ -240,17 +265,9 @@ def test_build_env_isolation(script: PipTestEnvironment) -> None:
|
|||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
print(
|
||||
f'imported `pkg` from `{pkg.__file__}`',
|
||||
file=sys.stderr)
|
||||
print('system sites:\n ' + '\n '.join(sorted({
|
||||
get_python_lib(plat_specific=0),
|
||||
get_python_lib(plat_specific=1),
|
||||
})), file=sys.stderr)
|
||||
print('sys.path:\n ' + '\n '.join(sys.path), file=sys.stderr)
|
||||
{BUILD_ENV_ERROR_DEBUG_CODE}
|
||||
print('sys.path:\\n ' + '\\n '.join(sys.path), file=sys.stderr)
|
||||
sys.exit(1)
|
||||
"""
|
||||
f"""
|
||||
# second check: direct check of exclusion of system site packages
|
||||
import os
|
||||
|
||||
|
|
|
@ -36,10 +36,7 @@ def http_cache_files(http_cache_dir: str) -> List[str]:
|
|||
return []
|
||||
|
||||
filenames = glob(os.path.join(destination, "*"))
|
||||
files = []
|
||||
for filename in filenames:
|
||||
files.append(os.path.join(destination, filename))
|
||||
return files
|
||||
return [os.path.join(destination, filename) for filename in filenames]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
|
@ -50,10 +47,7 @@ def wheel_cache_files(wheel_cache_dir: str) -> List[str]:
|
|||
return []
|
||||
|
||||
filenames = glob(os.path.join(destination, "*.whl"))
|
||||
files = []
|
||||
for filename in filenames:
|
||||
files.append(os.path.join(destination, filename))
|
||||
return files
|
||||
return [os.path.join(destination, filename) for filename in filenames]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
|
@ -107,7 +101,7 @@ def list_matches_wheel(wheel_name: str, result: TestPipResult) -> bool:
|
|||
`- foo-1.2.3-py3-none-any.whl `."""
|
||||
lines = result.stdout.splitlines()
|
||||
expected = f" - {wheel_name}-py3-none-any.whl "
|
||||
return any(map(lambda line: line.startswith(expected), lines))
|
||||
return any(line.startswith(expected) for line in lines)
|
||||
|
||||
|
||||
def list_matches_wheel_abspath(wheel_name: str, result: TestPipResult) -> bool:
|
||||
|
@ -120,11 +114,9 @@ def list_matches_wheel_abspath(wheel_name: str, result: TestPipResult) -> bool:
|
|||
lines = result.stdout.splitlines()
|
||||
expected = f"{wheel_name}-py3-none-any.whl"
|
||||
return any(
|
||||
map(
|
||||
lambda line: (
|
||||
os.path.basename(line).startswith(expected) and os.path.exists(line)
|
||||
),
|
||||
lines,
|
||||
(
|
||||
(os.path.basename(line).startswith(expected) and os.path.exists(line))
|
||||
for line in lines
|
||||
)
|
||||
)
|
||||
|
||||
|
|
|
@ -1,14 +1,11 @@
|
|||
import http.server
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import textwrap
|
||||
import uuid
|
||||
from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
from hashlib import sha256
|
||||
from pathlib import Path
|
||||
from textwrap import dedent
|
||||
from typing import Callable, Dict, List, Optional, Tuple
|
||||
from typing import Callable, List, Tuple
|
||||
|
||||
import pytest
|
||||
|
||||
|
@ -1237,181 +1234,16 @@ def test_download_use_pep517_propagation(
|
|||
assert len(downloads) == 2
|
||||
|
||||
|
||||
class MetadataKind(Enum):
|
||||
"""All the types of values we might be provided for the data-dist-info-metadata
|
||||
attribute from PEP 658."""
|
||||
|
||||
# Valid: will read metadata from the dist instead.
|
||||
No = "none"
|
||||
# Valid: will read the .metadata file, but won't check its hash.
|
||||
Unhashed = "unhashed"
|
||||
# Valid: will read the .metadata file and check its hash matches.
|
||||
Sha256 = "sha256"
|
||||
# Invalid: will error out after checking the hash.
|
||||
WrongHash = "wrong-hash"
|
||||
# Invalid: will error out after failing to fetch the .metadata file.
|
||||
NoFile = "no-file"
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class Package:
|
||||
"""Mock package structure used to generate a PyPI repository.
|
||||
|
||||
Package name and version should correspond to sdists (.tar.gz files) in our test
|
||||
data."""
|
||||
|
||||
name: str
|
||||
version: str
|
||||
filename: str
|
||||
metadata: MetadataKind
|
||||
# This will override any dependencies specified in the actual dist's METADATA.
|
||||
requires_dist: Tuple[str, ...] = ()
|
||||
# This will override the Name specified in the actual dist's METADATA.
|
||||
metadata_name: Optional[str] = None
|
||||
|
||||
def metadata_filename(self) -> str:
|
||||
"""This is specified by PEP 658."""
|
||||
return f"{self.filename}.metadata"
|
||||
|
||||
def generate_additional_tag(self) -> str:
|
||||
"""This gets injected into the <a> tag in the generated PyPI index page for this
|
||||
package."""
|
||||
if self.metadata == MetadataKind.No:
|
||||
return ""
|
||||
if self.metadata in [MetadataKind.Unhashed, MetadataKind.NoFile]:
|
||||
return 'data-dist-info-metadata="true"'
|
||||
if self.metadata == MetadataKind.WrongHash:
|
||||
return 'data-dist-info-metadata="sha256=WRONG-HASH"'
|
||||
assert self.metadata == MetadataKind.Sha256
|
||||
checksum = sha256(self.generate_metadata()).hexdigest()
|
||||
return f'data-dist-info-metadata="sha256={checksum}"'
|
||||
|
||||
def requires_str(self) -> str:
|
||||
if not self.requires_dist:
|
||||
return ""
|
||||
joined = " and ".join(self.requires_dist)
|
||||
return f"Requires-Dist: {joined}"
|
||||
|
||||
def generate_metadata(self) -> bytes:
|
||||
"""This is written to `self.metadata_filename()` and will override the actual
|
||||
dist's METADATA, unless `self.metadata == MetadataKind.NoFile`."""
|
||||
return dedent(
|
||||
f"""\
|
||||
Metadata-Version: 2.1
|
||||
Name: {self.metadata_name or self.name}
|
||||
Version: {self.version}
|
||||
{self.requires_str()}
|
||||
"""
|
||||
).encode("utf-8")
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def write_index_html_content(tmpdir: Path) -> Callable[[str], Path]:
|
||||
"""Generate a PyPI package index.html within a temporary local directory."""
|
||||
html_dir = tmpdir / "index_html_content"
|
||||
html_dir.mkdir()
|
||||
|
||||
def generate_index_html_subdir(index_html: str) -> Path:
|
||||
"""Create a new subdirectory after a UUID and write an index.html."""
|
||||
new_subdir = html_dir / uuid.uuid4().hex
|
||||
new_subdir.mkdir()
|
||||
|
||||
with open(new_subdir / "index.html", "w") as f:
|
||||
f.write(index_html)
|
||||
|
||||
return new_subdir
|
||||
|
||||
return generate_index_html_subdir
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def html_index_for_packages(
|
||||
shared_data: TestData,
|
||||
write_index_html_content: Callable[[str], Path],
|
||||
) -> Callable[..., Path]:
|
||||
"""Generate a PyPI HTML package index within a local directory pointing to
|
||||
blank data."""
|
||||
|
||||
def generate_html_index_for_packages(packages: Dict[str, List[Package]]) -> Path:
|
||||
"""
|
||||
Produce a PyPI directory structure pointing to the specified packages.
|
||||
"""
|
||||
# (1) Generate the content for a PyPI index.html.
|
||||
pkg_links = "\n".join(
|
||||
f' <a href="{pkg}/index.html">{pkg}</a>' for pkg in packages.keys()
|
||||
)
|
||||
index_html = f"""\
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta name="pypi:repository-version" content="1.0">
|
||||
<title>Simple index</title>
|
||||
</head>
|
||||
<body>
|
||||
{pkg_links}
|
||||
</body>
|
||||
</html>"""
|
||||
# (2) Generate the index.html in a new subdirectory of the temp directory.
|
||||
index_html_subdir = write_index_html_content(index_html)
|
||||
|
||||
# (3) Generate subdirectories for individual packages, each with their own
|
||||
# index.html.
|
||||
for pkg, links in packages.items():
|
||||
pkg_subdir = index_html_subdir / pkg
|
||||
pkg_subdir.mkdir()
|
||||
|
||||
download_links: List[str] = []
|
||||
for package_link in links:
|
||||
# (3.1) Generate the <a> tag which pip can crawl pointing to this
|
||||
# specific package version.
|
||||
download_links.append(
|
||||
f' <a href="{package_link.filename}" {package_link.generate_additional_tag()}>{package_link.filename}</a><br/>' # noqa: E501
|
||||
)
|
||||
# (3.2) Copy over the corresponding file in `shared_data.packages`.
|
||||
shutil.copy(
|
||||
shared_data.packages / package_link.filename,
|
||||
pkg_subdir / package_link.filename,
|
||||
)
|
||||
# (3.3) Write a metadata file, if applicable.
|
||||
if package_link.metadata != MetadataKind.NoFile:
|
||||
with open(pkg_subdir / package_link.metadata_filename(), "wb") as f:
|
||||
f.write(package_link.generate_metadata())
|
||||
|
||||
# (3.4) After collating all the download links and copying over the files,
|
||||
# write an index.html with the generated download links for each
|
||||
# copied file for this specific package name.
|
||||
download_links_str = "\n".join(download_links)
|
||||
pkg_index_content = f"""\
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta name="pypi:repository-version" content="1.0">
|
||||
<title>Links for {pkg}</title>
|
||||
</head>
|
||||
<body>
|
||||
<h1>Links for {pkg}</h1>
|
||||
{download_links_str}
|
||||
</body>
|
||||
</html>"""
|
||||
with open(pkg_subdir / "index.html", "w") as f:
|
||||
f.write(pkg_index_content)
|
||||
|
||||
return index_html_subdir
|
||||
|
||||
return generate_html_index_for_packages
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def download_generated_html_index(
|
||||
def download_local_html_index(
|
||||
script: PipTestEnvironment,
|
||||
html_index_for_packages: Callable[[Dict[str, List[Package]]], Path],
|
||||
html_index_for_packages: Path,
|
||||
tmpdir: Path,
|
||||
) -> Callable[..., Tuple[TestPipResult, Path]]:
|
||||
"""Execute `pip download` against a generated PyPI index."""
|
||||
download_dir = tmpdir / "download_dir"
|
||||
|
||||
def run_for_generated_index(
|
||||
packages: Dict[str, List[Package]],
|
||||
args: List[str],
|
||||
allow_error: bool = False,
|
||||
) -> Tuple[TestPipResult, Path]:
|
||||
|
@ -1419,13 +1251,12 @@ def download_generated_html_index(
|
|||
Produce a PyPI directory structure pointing to the specified packages, then
|
||||
execute `pip download -i ...` pointing to our generated index.
|
||||
"""
|
||||
index_dir = html_index_for_packages(packages)
|
||||
pip_args = [
|
||||
"download",
|
||||
"-d",
|
||||
str(download_dir),
|
||||
"-i",
|
||||
path_to_url(str(index_dir)),
|
||||
path_to_url(str(html_index_for_packages)),
|
||||
*args,
|
||||
]
|
||||
result = script.pip(*pip_args, allow_error=allow_error)
|
||||
|
@ -1434,84 +1265,35 @@ def download_generated_html_index(
|
|||
return run_for_generated_index
|
||||
|
||||
|
||||
# The package database we generate for testing PEP 658 support.
|
||||
_simple_packages: Dict[str, List[Package]] = {
|
||||
"simple": [
|
||||
Package("simple", "1.0", "simple-1.0.tar.gz", MetadataKind.Sha256),
|
||||
Package("simple", "2.0", "simple-2.0.tar.gz", MetadataKind.No),
|
||||
# This will raise a hashing error.
|
||||
Package("simple", "3.0", "simple-3.0.tar.gz", MetadataKind.WrongHash),
|
||||
],
|
||||
"simple2": [
|
||||
# Override the dependencies here in order to force pip to download
|
||||
# simple-1.0.tar.gz as well.
|
||||
Package(
|
||||
"simple2",
|
||||
"1.0",
|
||||
"simple2-1.0.tar.gz",
|
||||
MetadataKind.Unhashed,
|
||||
("simple==1.0",),
|
||||
),
|
||||
# This will raise an error when pip attempts to fetch the metadata file.
|
||||
Package("simple2", "2.0", "simple2-2.0.tar.gz", MetadataKind.NoFile),
|
||||
# This has a METADATA file with a mismatched name.
|
||||
Package(
|
||||
"simple2",
|
||||
"3.0",
|
||||
"simple2-3.0.tar.gz",
|
||||
MetadataKind.Sha256,
|
||||
metadata_name="not-simple2",
|
||||
),
|
||||
],
|
||||
"colander": [
|
||||
# Ensure we can read the dependencies from a metadata file within a wheel
|
||||
# *without* PEP 658 metadata.
|
||||
Package(
|
||||
"colander", "0.9.9", "colander-0.9.9-py2.py3-none-any.whl", MetadataKind.No
|
||||
),
|
||||
],
|
||||
"compilewheel": [
|
||||
# Ensure we can override the dependencies of a wheel file by injecting PEP
|
||||
# 658 metadata.
|
||||
Package(
|
||||
"compilewheel",
|
||||
"1.0",
|
||||
"compilewheel-1.0-py2.py3-none-any.whl",
|
||||
MetadataKind.Unhashed,
|
||||
("simple==1.0",),
|
||||
),
|
||||
],
|
||||
"has-script": [
|
||||
# Ensure we check PEP 658 metadata hashing errors for wheel files.
|
||||
Package(
|
||||
"has-script",
|
||||
"1.0",
|
||||
"has.script-1.0-py2.py3-none-any.whl",
|
||||
MetadataKind.WrongHash,
|
||||
),
|
||||
],
|
||||
"translationstring": [
|
||||
Package(
|
||||
"translationstring", "1.1", "translationstring-1.1.tar.gz", MetadataKind.No
|
||||
),
|
||||
],
|
||||
"priority": [
|
||||
# Ensure we check for a missing metadata file for wheels.
|
||||
Package(
|
||||
"priority", "1.0", "priority-1.0-py2.py3-none-any.whl", MetadataKind.NoFile
|
||||
),
|
||||
],
|
||||
"requires-simple-extra": [
|
||||
# Metadata name is not canonicalized.
|
||||
Package(
|
||||
"requires-simple-extra",
|
||||
"0.1",
|
||||
"requires_simple_extra-0.1-py2.py3-none-any.whl",
|
||||
MetadataKind.Sha256,
|
||||
metadata_name="Requires_Simple.Extra",
|
||||
),
|
||||
],
|
||||
}
|
||||
@pytest.fixture(scope="function")
|
||||
def download_server_html_index(
|
||||
script: PipTestEnvironment,
|
||||
tmpdir: Path,
|
||||
html_index_with_onetime_server: http.server.ThreadingHTTPServer,
|
||||
) -> Callable[..., Tuple[TestPipResult, Path]]:
|
||||
"""Execute `pip download` against a generated PyPI index."""
|
||||
download_dir = tmpdir / "download_dir"
|
||||
|
||||
def run_for_generated_index(
|
||||
args: List[str],
|
||||
allow_error: bool = False,
|
||||
) -> Tuple[TestPipResult, Path]:
|
||||
"""
|
||||
Produce a PyPI directory structure pointing to the specified packages, then
|
||||
execute `pip download -i ...` pointing to our generated index.
|
||||
"""
|
||||
pip_args = [
|
||||
"download",
|
||||
"-d",
|
||||
str(download_dir),
|
||||
"-i",
|
||||
"http://localhost:8000",
|
||||
*args,
|
||||
]
|
||||
result = script.pip(*pip_args, allow_error=allow_error)
|
||||
return (result, download_dir)
|
||||
|
||||
return run_for_generated_index
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
|
@ -1530,19 +1312,69 @@ _simple_packages: Dict[str, List[Package]] = {
|
|||
],
|
||||
)
|
||||
def test_download_metadata(
|
||||
download_generated_html_index: Callable[..., Tuple[TestPipResult, Path]],
|
||||
download_local_html_index: Callable[..., Tuple[TestPipResult, Path]],
|
||||
requirement_to_download: str,
|
||||
expected_outputs: List[str],
|
||||
) -> None:
|
||||
"""Verify that if a data-dist-info-metadata attribute is present, then it is used
|
||||
instead of the actual dist's METADATA."""
|
||||
_, download_dir = download_generated_html_index(
|
||||
_simple_packages,
|
||||
_, download_dir = download_local_html_index(
|
||||
[requirement_to_download],
|
||||
)
|
||||
assert sorted(os.listdir(download_dir)) == expected_outputs
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"requirement_to_download, expected_outputs, doubled_path",
|
||||
[
|
||||
(
|
||||
"simple2==1.0",
|
||||
["simple-1.0.tar.gz", "simple2-1.0.tar.gz"],
|
||||
"/simple2/simple2-1.0.tar.gz",
|
||||
),
|
||||
("simple==2.0", ["simple-2.0.tar.gz"], "/simple/simple-2.0.tar.gz"),
|
||||
(
|
||||
"colander",
|
||||
["colander-0.9.9-py2.py3-none-any.whl", "translationstring-1.1.tar.gz"],
|
||||
"/colander/colander-0.9.9-py2.py3-none-any.whl",
|
||||
),
|
||||
(
|
||||
"compilewheel",
|
||||
[
|
||||
"compilewheel-1.0-py2.py3-none-any.whl",
|
||||
"simple-1.0.tar.gz",
|
||||
],
|
||||
"/compilewheel/compilewheel-1.0-py2.py3-none-any.whl",
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_download_metadata_server(
|
||||
download_server_html_index: Callable[..., Tuple[TestPipResult, Path]],
|
||||
requirement_to_download: str,
|
||||
expected_outputs: List[str],
|
||||
doubled_path: str,
|
||||
) -> None:
|
||||
"""Verify that if a data-dist-info-metadata attribute is present, then it is used
|
||||
instead of the actual dist's METADATA.
|
||||
|
||||
Additionally, verify that each dist is downloaded exactly once using a mock server.
|
||||
|
||||
This is a regression test for issue https://github.com/pypa/pip/issues/11847.
|
||||
"""
|
||||
_, download_dir = download_server_html_index(
|
||||
[requirement_to_download, "--no-cache-dir"],
|
||||
)
|
||||
assert sorted(os.listdir(download_dir)) == expected_outputs
|
||||
shutil.rmtree(download_dir)
|
||||
result, _ = download_server_html_index(
|
||||
[requirement_to_download, "--no-cache-dir"],
|
||||
allow_error=True,
|
||||
)
|
||||
assert result.returncode != 0
|
||||
expected_msg = f"File {doubled_path} not available more than once!"
|
||||
assert expected_msg in result.stderr
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"requirement_to_download, real_hash",
|
||||
[
|
||||
|
@ -1557,14 +1389,13 @@ def test_download_metadata(
|
|||
],
|
||||
)
|
||||
def test_incorrect_metadata_hash(
|
||||
download_generated_html_index: Callable[..., Tuple[TestPipResult, Path]],
|
||||
download_local_html_index: Callable[..., Tuple[TestPipResult, Path]],
|
||||
requirement_to_download: str,
|
||||
real_hash: str,
|
||||
) -> None:
|
||||
"""Verify that if a hash for data-dist-info-metadata is provided, it must match the
|
||||
actual hash of the metadata file."""
|
||||
result, _ = download_generated_html_index(
|
||||
_simple_packages,
|
||||
result, _ = download_local_html_index(
|
||||
[requirement_to_download],
|
||||
allow_error=True,
|
||||
)
|
||||
|
@ -1583,15 +1414,14 @@ def test_incorrect_metadata_hash(
|
|||
],
|
||||
)
|
||||
def test_metadata_not_found(
|
||||
download_generated_html_index: Callable[..., Tuple[TestPipResult, Path]],
|
||||
download_local_html_index: Callable[..., Tuple[TestPipResult, Path]],
|
||||
requirement_to_download: str,
|
||||
expected_url: str,
|
||||
) -> None:
|
||||
"""Verify that if a data-dist-info-metadata attribute is provided, that pip will
|
||||
fetch the .metadata file at the location specified by PEP 658, and error
|
||||
if unavailable."""
|
||||
result, _ = download_generated_html_index(
|
||||
_simple_packages,
|
||||
result, _ = download_local_html_index(
|
||||
[requirement_to_download],
|
||||
allow_error=True,
|
||||
)
|
||||
|
@ -1604,11 +1434,10 @@ def test_metadata_not_found(
|
|||
|
||||
|
||||
def test_produces_error_for_mismatched_package_name_in_metadata(
|
||||
download_generated_html_index: Callable[..., Tuple[TestPipResult, Path]],
|
||||
download_local_html_index: Callable[..., Tuple[TestPipResult, Path]],
|
||||
) -> None:
|
||||
"""Verify that the package name from the metadata matches the requested package."""
|
||||
result, _ = download_generated_html_index(
|
||||
_simple_packages,
|
||||
result, _ = download_local_html_index(
|
||||
["simple2==3.0"],
|
||||
allow_error=True,
|
||||
)
|
||||
|
@ -1628,7 +1457,7 @@ def test_produces_error_for_mismatched_package_name_in_metadata(
|
|||
),
|
||||
)
|
||||
def test_canonicalizes_package_name_before_verifying_metadata(
|
||||
download_generated_html_index: Callable[..., Tuple[TestPipResult, Path]],
|
||||
download_local_html_index: Callable[..., Tuple[TestPipResult, Path]],
|
||||
requirement: str,
|
||||
) -> None:
|
||||
"""Verify that the package name from the command line and the package's
|
||||
|
@ -1636,8 +1465,7 @@ def test_canonicalizes_package_name_before_verifying_metadata(
|
|||
|
||||
Regression test for https://github.com/pypa/pip/issues/12038
|
||||
"""
|
||||
result, download_dir = download_generated_html_index(
|
||||
_simple_packages,
|
||||
result, download_dir = download_local_html_index(
|
||||
[requirement],
|
||||
allow_error=True,
|
||||
)
|
||||
|
|
|
@ -2,12 +2,14 @@ import fnmatch
|
|||
import json
|
||||
import os
|
||||
import pathlib
|
||||
import re
|
||||
from os.path import basename
|
||||
from typing import Iterable
|
||||
|
||||
from pip._vendor.packaging.utils import canonicalize_name
|
||||
from pytest import mark
|
||||
|
||||
from pip._internal.utils.misc import hash_file
|
||||
from tests.lib import PipTestEnvironment, TestData, TestPipResult
|
||||
|
||||
|
||||
|
@ -101,3 +103,36 @@ def test_hash_mismatch(script: PipTestEnvironment, tmp_path: pathlib.Path) -> No
|
|||
expect_error=True,
|
||||
)
|
||||
assert "DO NOT MATCH THE HASHES" in result.stderr
|
||||
|
||||
|
||||
@mark.network
|
||||
def test_hash_mismatch_existing_download_for_metadata_only_wheel(
|
||||
script: PipTestEnvironment, tmp_path: pathlib.Path
|
||||
) -> None:
|
||||
"""Metadata-only wheels from PEP 658 or fast-deps check for hash matching in
|
||||
a separate code path than when the wheel is downloaded all at once. Make sure we
|
||||
still check for hash mismatches."""
|
||||
reqs = tmp_path / "requirements.txt"
|
||||
reqs.write_text("idna==2.10")
|
||||
dl_dir = tmp_path / "downloads"
|
||||
dl_dir.mkdir()
|
||||
idna_wheel = dl_dir / "idna-2.10-py2.py3-none-any.whl"
|
||||
idna_wheel.write_text("asdf")
|
||||
result = script.pip(
|
||||
"download",
|
||||
# Ensure that we have a metadata-only dist for idna.
|
||||
"--use-feature=fast-deps",
|
||||
"-r",
|
||||
str(reqs),
|
||||
"-d",
|
||||
str(dl_dir),
|
||||
allow_stderr_warning=True,
|
||||
)
|
||||
assert re.search(
|
||||
r"WARNING: Previously-downloaded file.*has bad hash", result.stderr
|
||||
)
|
||||
# This is the correct hash for idna==2.10.
|
||||
assert (
|
||||
hash_file(str(idna_wheel))[0].hexdigest()
|
||||
== "b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0"
|
||||
)
|
||||
|
|
|
@ -102,8 +102,8 @@ def test_help_commands_equally_functional(in_memory_pip: InMemoryPip) -> None:
|
|||
results = list(map(in_memory_pip.pip, ("help", "--help")))
|
||||
results.append(in_memory_pip.pip())
|
||||
|
||||
out = map(lambda x: x.stdout, results)
|
||||
ret = map(lambda x: x.returncode, results)
|
||||
out = (x.stdout for x in results)
|
||||
ret = (x.returncode for x in results)
|
||||
|
||||
msg = '"pip --help" != "pip help" != "pip"'
|
||||
assert len(set(out)) == 1, "output of: " + msg
|
||||
|
|
|
@ -7,7 +7,7 @@ import sysconfig
|
|||
import textwrap
|
||||
from os.path import curdir, join, pardir
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Tuple
|
||||
from typing import Dict, Iterable, List, Optional, Tuple
|
||||
|
||||
import pytest
|
||||
|
||||
|
@ -20,6 +20,7 @@ from tests.lib import (
|
|||
PipTestEnvironment,
|
||||
ResolverVariant,
|
||||
TestData,
|
||||
TestPipResult,
|
||||
_create_svn_repo,
|
||||
_create_test_package,
|
||||
create_basic_wheel_for_package,
|
||||
|
@ -848,14 +849,18 @@ def test_editable_install__local_dir_no_setup_py(
|
|||
)
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
sys.version_info >= (3, 12),
|
||||
reason="Setuptools<64 does not support Python 3.12+",
|
||||
)
|
||||
@pytest.mark.network
|
||||
def test_editable_install__local_dir_no_setup_py_with_pyproject(
|
||||
def test_editable_install_legacy__local_dir_no_setup_py_with_pyproject(
|
||||
script: PipTestEnvironment,
|
||||
) -> None:
|
||||
"""
|
||||
Test installing in editable mode from a local directory with no setup.py
|
||||
but that does have pyproject.toml with a build backend that does not support
|
||||
the build_editable hook.
|
||||
Test installing in legacy editable mode from a local directory with no
|
||||
setup.py but that does have pyproject.toml with a build backend that does
|
||||
not support the build_editable hook.
|
||||
"""
|
||||
local_dir = script.scratch_path.joinpath("temp")
|
||||
local_dir.mkdir()
|
||||
|
@ -1383,8 +1388,14 @@ setup(name='pkga', version='0.1')
|
|||
_test_install_editable_with_prefix(script, {"setup.py": setup_py})
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
sys.version_info >= (3, 12),
|
||||
reason="Setuptools<64 does not support Python 3.12+",
|
||||
)
|
||||
@pytest.mark.network
|
||||
def test_install_editable_with_prefix_setup_cfg(script: PipTestEnvironment) -> None:
|
||||
def test_install_editable_legacy_with_prefix_setup_cfg(
|
||||
script: PipTestEnvironment,
|
||||
) -> None:
|
||||
setup_cfg = """[metadata]
|
||||
name = pkga
|
||||
version = 0.1
|
||||
|
@ -2231,6 +2242,33 @@ def test_install_yanked_file_and_print_warning(
|
|||
assert "Successfully installed simple-3.0\n" in result.stdout, str(result)
|
||||
|
||||
|
||||
def test_yanked_version_missing_from_availble_versions_error_message(
|
||||
script: PipTestEnvironment, data: TestData
|
||||
) -> None:
|
||||
"""
|
||||
Test yanked version is missing from available versions error message.
|
||||
|
||||
Yanked files are always ignored, unless they are the only file that
|
||||
matches a version specifier that "pins" to an exact version (PEP 592).
|
||||
"""
|
||||
result = script.pip(
|
||||
"install",
|
||||
"simple==",
|
||||
"--index-url",
|
||||
data.index_url("yanked"),
|
||||
expect_error=True,
|
||||
)
|
||||
# the yanked version (3.0) is filtered out from the output:
|
||||
expected_warning = (
|
||||
"Could not find a version that satisfies the requirement simple== "
|
||||
"(from versions: 1.0, 2.0)"
|
||||
)
|
||||
assert expected_warning in result.stderr, str(result)
|
||||
# and mentioned in a separate warning:
|
||||
expected_warning = "Ignored the following yanked versions: 3.0"
|
||||
assert expected_warning in result.stderr, str(result)
|
||||
|
||||
|
||||
def test_error_all_yanked_files_and_no_pin(
|
||||
script: PipTestEnvironment, data: TestData
|
||||
) -> None:
|
||||
|
@ -2361,14 +2399,68 @@ def test_install_logs_pip_version_in_debug(
|
|||
assert_re_match(pattern, result.stdout)
|
||||
|
||||
|
||||
def test_install_dry_run(script: PipTestEnvironment, data: TestData) -> None:
|
||||
"""Test that pip install --dry-run logs what it would install."""
|
||||
result = script.pip(
|
||||
"install", "--dry-run", "--find-links", data.find_links, "simple"
|
||||
def install_find_links(
|
||||
script: PipTestEnvironment,
|
||||
data: TestData,
|
||||
args: Iterable[str],
|
||||
*,
|
||||
dry_run: bool,
|
||||
target_dir: Optional[Path],
|
||||
) -> TestPipResult:
|
||||
return script.pip(
|
||||
"install",
|
||||
*(
|
||||
(
|
||||
"--target",
|
||||
str(target_dir),
|
||||
)
|
||||
if target_dir is not None
|
||||
else ()
|
||||
),
|
||||
*(("--dry-run",) if dry_run else ()),
|
||||
"--no-index",
|
||||
"--find-links",
|
||||
data.find_links,
|
||||
*args,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"with_target_dir",
|
||||
(True, False),
|
||||
)
|
||||
def test_install_dry_run_nothing_installed(
|
||||
script: PipTestEnvironment,
|
||||
data: TestData,
|
||||
tmpdir: Path,
|
||||
with_target_dir: bool,
|
||||
) -> None:
|
||||
"""Test that pip install --dry-run logs what it would install, but doesn't actually
|
||||
install anything."""
|
||||
if with_target_dir:
|
||||
install_dir = tmpdir / "fake-install"
|
||||
install_dir.mkdir()
|
||||
else:
|
||||
install_dir = None
|
||||
|
||||
result = install_find_links(
|
||||
script, data, ["simple"], dry_run=True, target_dir=install_dir
|
||||
)
|
||||
assert "Would install simple-3.0" in result.stdout
|
||||
assert "Successfully installed" not in result.stdout
|
||||
|
||||
script.assert_not_installed("simple")
|
||||
if with_target_dir:
|
||||
assert not os.listdir(install_dir)
|
||||
|
||||
# Ensure that the same install command would normally have worked if not for
|
||||
# --dry-run.
|
||||
install_find_links(script, data, ["simple"], dry_run=False, target_dir=install_dir)
|
||||
if with_target_dir:
|
||||
assert os.listdir(install_dir)
|
||||
else:
|
||||
script.assert_installed(simple="3.0")
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
sys.version_info < (3, 11),
|
||||
|
@ -2449,6 +2541,40 @@ def test_install_pip_prints_req_chain_local(script: PipTestEnvironment) -> None:
|
|||
)
|
||||
|
||||
|
||||
def test_install_dist_restriction_without_target(script: PipTestEnvironment) -> None:
|
||||
result = script.pip(
|
||||
"install", "--python-version=3.1", "--only-binary=:all:", expect_error=True
|
||||
)
|
||||
assert (
|
||||
"Can not use any platform or abi specific options unless installing "
|
||||
"via '--target'" in result.stderr
|
||||
), str(result)
|
||||
|
||||
|
||||
def test_install_dist_restriction_dry_run_doesnt_require_target(
|
||||
script: PipTestEnvironment,
|
||||
) -> None:
|
||||
create_basic_wheel_for_package(
|
||||
script,
|
||||
"base",
|
||||
"0.1.0",
|
||||
)
|
||||
|
||||
result = script.pip(
|
||||
"install",
|
||||
"--python-version=3.1",
|
||||
"--only-binary=:all:",
|
||||
"--dry-run",
|
||||
"--no-cache-dir",
|
||||
"--no-index",
|
||||
"--find-links",
|
||||
script.scratch_path,
|
||||
"base",
|
||||
)
|
||||
|
||||
assert not result.stderr, str(result)
|
||||
|
||||
|
||||
@pytest.mark.network
|
||||
def test_install_pip_prints_req_chain_pypi(script: PipTestEnvironment) -> None:
|
||||
"""
|
||||
|
|
|
@ -64,6 +64,59 @@ def test_install_report_dep(
|
|||
assert _install_dict(report)["simple"]["requested"] is False
|
||||
|
||||
|
||||
def test_yanked_version(
|
||||
script: PipTestEnvironment, data: TestData, tmp_path: Path
|
||||
) -> None:
|
||||
"""
|
||||
Test is_yanked is True when explicitly requesting a yanked package.
|
||||
Yanked files are always ignored, unless they are the only file that
|
||||
matches a version specifier that "pins" to an exact version (PEP 592).
|
||||
"""
|
||||
report_path = tmp_path / "report.json"
|
||||
script.pip(
|
||||
"install",
|
||||
"simple==3.0",
|
||||
"--index-url",
|
||||
data.index_url("yanked"),
|
||||
"--dry-run",
|
||||
"--report",
|
||||
str(report_path),
|
||||
allow_stderr_warning=True,
|
||||
)
|
||||
report = json.loads(report_path.read_text())
|
||||
simple_report = _install_dict(report)["simple"]
|
||||
assert simple_report["requested"] is True
|
||||
assert simple_report["is_direct"] is False
|
||||
assert simple_report["is_yanked"] is True
|
||||
assert simple_report["metadata"]["version"] == "3.0"
|
||||
|
||||
|
||||
def test_skipped_yanked_version(
|
||||
script: PipTestEnvironment, data: TestData, tmp_path: Path
|
||||
) -> None:
|
||||
"""
|
||||
Test is_yanked is False when not explicitly requesting a yanked package.
|
||||
Yanked files are always ignored, unless they are the only file that
|
||||
matches a version specifier that "pins" to an exact version (PEP 592).
|
||||
"""
|
||||
report_path = tmp_path / "report.json"
|
||||
script.pip(
|
||||
"install",
|
||||
"simple",
|
||||
"--index-url",
|
||||
data.index_url("yanked"),
|
||||
"--dry-run",
|
||||
"--report",
|
||||
str(report_path),
|
||||
)
|
||||
report = json.loads(report_path.read_text())
|
||||
simple_report = _install_dict(report)["simple"]
|
||||
assert simple_report["requested"] is True
|
||||
assert simple_report["is_direct"] is False
|
||||
assert simple_report["is_yanked"] is False
|
||||
assert simple_report["metadata"]["version"] == "2.0"
|
||||
|
||||
|
||||
@pytest.mark.network
|
||||
def test_install_report_index(script: PipTestEnvironment, tmp_path: Path) -> None:
|
||||
"""Test report for sdist obtained from index."""
|
||||
|
|
|
@ -273,25 +273,19 @@ def test_outdated_flag(script: PipTestEnvironment, data: TestData) -> None:
|
|||
"latest_version": "3.0",
|
||||
"latest_filetype": "sdist",
|
||||
} in json_output
|
||||
assert (
|
||||
dict(
|
||||
name="simplewheel",
|
||||
version="1.0",
|
||||
latest_version="2.0",
|
||||
latest_filetype="wheel",
|
||||
)
|
||||
in json_output
|
||||
)
|
||||
assert (
|
||||
dict(
|
||||
name="pip-test-package",
|
||||
version="0.1",
|
||||
latest_version="0.1.1",
|
||||
latest_filetype="sdist",
|
||||
editable_project_location="<location>",
|
||||
)
|
||||
in json_output
|
||||
)
|
||||
assert {
|
||||
"name": "simplewheel",
|
||||
"version": "1.0",
|
||||
"latest_version": "2.0",
|
||||
"latest_filetype": "wheel",
|
||||
} in json_output
|
||||
assert {
|
||||
"name": "pip-test-package",
|
||||
"version": "0.1",
|
||||
"latest_version": "0.1.1",
|
||||
"latest_filetype": "sdist",
|
||||
"editable_project_location": "<location>",
|
||||
} in json_output
|
||||
assert "simple2" not in {p["name"] for p in json_output}
|
||||
|
||||
|
||||
|
|
|
@ -37,6 +37,10 @@ def test_basic_uninstall(script: PipTestEnvironment) -> None:
|
|||
assert_all_changes(result, result2, [script.venv / "build", "cache"])
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
sys.version_info >= (3, 12),
|
||||
reason="distutils is no longer available in Python 3.12+",
|
||||
)
|
||||
def test_basic_uninstall_distutils(script: PipTestEnvironment) -> None:
|
||||
"""
|
||||
Test basic install and uninstall.
|
||||
|
@ -68,6 +72,10 @@ def test_basic_uninstall_distutils(script: PipTestEnvironment) -> None:
|
|||
) in result.stderr
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
sys.version_info >= (3, 12),
|
||||
reason="Setuptools<64 does not support Python 3.12+",
|
||||
)
|
||||
@pytest.mark.network
|
||||
def test_basic_uninstall_with_scripts(script: PipTestEnvironment) -> None:
|
||||
"""
|
||||
|
@ -101,6 +109,10 @@ def test_uninstall_invalid_parameter(
|
|||
assert expected_message in result.stderr
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
sys.version_info >= (3, 12),
|
||||
reason="Setuptools<64 does not support Python 3.12+",
|
||||
)
|
||||
@pytest.mark.network
|
||||
def test_uninstall_easy_install_after_import(script: PipTestEnvironment) -> None:
|
||||
"""
|
||||
|
@ -126,6 +138,10 @@ def test_uninstall_easy_install_after_import(script: PipTestEnvironment) -> None
|
|||
)
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
sys.version_info >= (3, 12),
|
||||
reason="Setuptools<64 does not support Python 3.12+",
|
||||
)
|
||||
@pytest.mark.network
|
||||
def test_uninstall_trailing_newline(script: PipTestEnvironment) -> None:
|
||||
"""
|
||||
|
@ -337,6 +353,10 @@ def test_uninstall_console_scripts_uppercase_name(script: PipTestEnvironment) ->
|
|||
assert not script_name.exists()
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
sys.version_info >= (3, 12),
|
||||
reason="Setuptools<64 does not support Python 3.12+",
|
||||
)
|
||||
@pytest.mark.network
|
||||
def test_uninstall_easy_installed_console_scripts(script: PipTestEnvironment) -> None:
|
||||
"""
|
||||
|
|
|
@ -740,21 +740,19 @@ class PipTestEnvironment(TestFileEnvironment):
|
|||
|
||||
def assert_installed(self, **kwargs: str) -> None:
|
||||
ret = self.pip("list", "--format=json")
|
||||
installed = set(
|
||||
installed = {
|
||||
(canonicalize_name(val["name"]), val["version"])
|
||||
for val in json.loads(ret.stdout)
|
||||
)
|
||||
expected = set((canonicalize_name(k), v) for k, v in kwargs.items())
|
||||
}
|
||||
expected = {(canonicalize_name(k), v) for k, v in kwargs.items()}
|
||||
assert expected <= installed, "{!r} not all in {!r}".format(expected, installed)
|
||||
|
||||
def assert_not_installed(self, *args: str) -> None:
|
||||
ret = self.pip("list", "--format=json")
|
||||
installed = set(
|
||||
canonicalize_name(val["name"]) for val in json.loads(ret.stdout)
|
||||
)
|
||||
installed = {canonicalize_name(val["name"]) for val in json.loads(ret.stdout)}
|
||||
# None of the given names should be listed as installed, i.e. their
|
||||
# intersection should be empty.
|
||||
expected = set(canonicalize_name(k) for k in args)
|
||||
expected = {canonicalize_name(k) for k in args}
|
||||
assert not (expected & installed), "{!r} contained in {!r}".format(
|
||||
expected, installed
|
||||
)
|
||||
|
@ -797,17 +795,15 @@ def diff_states(
|
|||
prefix = prefix.rstrip(os.path.sep) + os.path.sep
|
||||
return path.startswith(prefix)
|
||||
|
||||
start_keys = {
|
||||
k for k in start.keys() if not any([prefix_match(k, i) for i in ignore])
|
||||
}
|
||||
end_keys = {k for k in end.keys() if not any([prefix_match(k, i) for i in ignore])}
|
||||
start_keys = {k for k in start if not any(prefix_match(k, i) for i in ignore)}
|
||||
end_keys = {k for k in end if not any(prefix_match(k, i) for i in ignore)}
|
||||
deleted = {k: start[k] for k in start_keys.difference(end_keys)}
|
||||
created = {k: end[k] for k in end_keys.difference(start_keys)}
|
||||
updated = {}
|
||||
for k in start_keys.intersection(end_keys):
|
||||
if start[k].size != end[k].size:
|
||||
updated[k] = end[k]
|
||||
return dict(deleted=deleted, created=created, updated=updated)
|
||||
return {"deleted": deleted, "created": created, "updated": updated}
|
||||
|
||||
|
||||
def assert_all_changes(
|
||||
|
@ -1187,7 +1183,7 @@ def create_basic_wheel_for_package(
|
|||
|
||||
# Fix wheel distribution name by replacing runs of non-alphanumeric
|
||||
# characters with an underscore _ as per PEP 491
|
||||
name = re.sub(r"[^\w\d.]+", "_", name, re.UNICODE)
|
||||
name = re.sub(r"[^\w\d.]+", "_", name)
|
||||
archive_name = f"{name}-{version}-py2.py3-none-any.whl"
|
||||
archive_path = script.scratch_path / archive_name
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
from datetime import datetime, timedelta
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import Tuple
|
||||
|
||||
from cryptography import x509
|
||||
|
@ -23,8 +23,8 @@ def make_tls_cert(hostname: str) -> Tuple[x509.Certificate, rsa.RSAPrivateKey]:
|
|||
.issuer_name(issuer)
|
||||
.public_key(key.public_key())
|
||||
.serial_number(x509.random_serial_number())
|
||||
.not_valid_before(datetime.utcnow())
|
||||
.not_valid_after(datetime.utcnow() + timedelta(days=10))
|
||||
.not_valid_before(datetime.now(timezone.utc))
|
||||
.not_valid_after(datetime.now(timezone.utc) + timedelta(days=10))
|
||||
.add_extension(
|
||||
x509.SubjectAlternativeName([x509.DNSName(hostname)]),
|
||||
critical=False,
|
||||
|
|
|
@ -124,7 +124,7 @@ class VirtualEnvironment:
|
|||
)
|
||||
elif self._venv_type == "venv":
|
||||
builder = _venv.EnvBuilder()
|
||||
context = builder.ensure_directories(self.location)
|
||||
context = builder.ensure_directories(os.fspath(self.location))
|
||||
builder.create_configuration(context)
|
||||
builder.setup_python(context)
|
||||
self.site.mkdir(parents=True, exist_ok=True)
|
||||
|
|
|
@ -129,3 +129,17 @@ def test_dist_found_in_zip(tmp_path: Path) -> None:
|
|||
dist = get_environment([location]).get_distribution("pkg")
|
||||
assert dist is not None and dist.location is not None
|
||||
assert Path(dist.location) == Path(location)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"path",
|
||||
(
|
||||
"/path/to/foo.egg-info".replace("/", os.path.sep),
|
||||
# Tests issue fixed by https://github.com/pypa/pip/pull/2530
|
||||
"/path/to/foo.egg-info/".replace("/", os.path.sep),
|
||||
),
|
||||
)
|
||||
def test_trailing_slash_directory_metadata(path: str) -> None:
|
||||
dist = get_directory_distribution(path)
|
||||
assert dist.raw_name == dist.canonical_name == "foo"
|
||||
assert dist.location == "/path/to".replace("/", os.path.sep)
|
||||
|
|
|
@ -21,13 +21,13 @@ class TestIndentingFormatter:
|
|||
|
||||
def make_record(self, msg: str, level_name: str) -> logging.LogRecord:
|
||||
level_number = getattr(logging, level_name)
|
||||
attrs = dict(
|
||||
msg=msg,
|
||||
created=1547704837.040001 + time.timezone,
|
||||
msecs=40,
|
||||
levelname=level_name,
|
||||
levelno=level_number,
|
||||
)
|
||||
attrs = {
|
||||
"msg": msg,
|
||||
"created": 1547704837.040001 + time.timezone,
|
||||
"msecs": 40,
|
||||
"levelname": level_name,
|
||||
"levelno": level_number,
|
||||
}
|
||||
record = logging.makeLogRecord(attrs)
|
||||
|
||||
return record
|
||||
|
|
|
@ -23,7 +23,6 @@ from pip._internal.exceptions import (
|
|||
PreviousBuildDirError,
|
||||
)
|
||||
from pip._internal.index.package_finder import PackageFinder
|
||||
from pip._internal.metadata import select_backend
|
||||
from pip._internal.models.direct_url import ArchiveInfo, DirectUrl, DirInfo, VcsInfo
|
||||
from pip._internal.models.link import Link
|
||||
from pip._internal.network.session import PipSession
|
||||
|
@ -600,22 +599,6 @@ class TestInstallRequirement:
|
|||
assert req.link is not None
|
||||
assert req.link.url == url
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"path",
|
||||
(
|
||||
"/path/to/foo.egg-info".replace("/", os.path.sep),
|
||||
# Tests issue fixed by https://github.com/pypa/pip/pull/2530
|
||||
"/path/to/foo.egg-info/".replace("/", os.path.sep),
|
||||
),
|
||||
)
|
||||
def test_get_dist(self, path: str) -> None:
|
||||
req = install_req_from_line("foo")
|
||||
req.metadata_directory = path
|
||||
dist = req.get_dist()
|
||||
assert isinstance(dist, select_backend().Distribution)
|
||||
assert dist.raw_name == dist.canonical_name == "foo"
|
||||
assert dist.location == "/path/to".replace("/", os.path.sep)
|
||||
|
||||
def test_markers(self) -> None:
|
||||
for line in (
|
||||
# recommended syntax
|
||||
|
|
|
@ -59,10 +59,9 @@ def test_uninstallation_paths() -> None:
|
|||
|
||||
def test_compressed_listing(tmpdir: Path) -> None:
|
||||
def in_tmpdir(paths: List[str]) -> List[str]:
|
||||
li = []
|
||||
for path in paths:
|
||||
li.append(str(os.path.join(tmpdir, path.replace("/", os.path.sep))))
|
||||
return li
|
||||
return [
|
||||
str(os.path.join(tmpdir, path.replace("/", os.path.sep))) for path in paths
|
||||
]
|
||||
|
||||
sample = in_tmpdir(
|
||||
[
|
||||
|
|
|
@ -40,7 +40,7 @@ def test_pip_self_version_check_calls_underlying_implementation(
|
|||
) -> None:
|
||||
# GIVEN
|
||||
mock_session = Mock()
|
||||
fake_options = Values(dict(cache_dir=str(tmpdir)))
|
||||
fake_options = Values({"cache_dir": str(tmpdir)})
|
||||
|
||||
# WHEN
|
||||
self_outdated_check.pip_self_version_check(mock_session, fake_options)
|
||||
|
@ -49,7 +49,9 @@ def test_pip_self_version_check_calls_underlying_implementation(
|
|||
mocked_state.assert_called_once_with(cache_dir=str(tmpdir))
|
||||
mocked_function.assert_called_once_with(
|
||||
state=mocked_state(cache_dir=str(tmpdir)),
|
||||
current_time=datetime.datetime(1970, 1, 2, 11, 0, 0),
|
||||
current_time=datetime.datetime(
|
||||
1970, 1, 2, 11, 0, 0, tzinfo=datetime.timezone.utc
|
||||
),
|
||||
local_version=ANY,
|
||||
get_remote_version=ANY,
|
||||
)
|
||||
|
@ -167,7 +169,10 @@ class TestSelfCheckState:
|
|||
|
||||
# WHEN
|
||||
state = self_outdated_check.SelfCheckState(cache_dir=str(cache_dir))
|
||||
state.set("1.0.0", datetime.datetime(2000, 1, 1, 0, 0, 0))
|
||||
state.set(
|
||||
"1.0.0",
|
||||
datetime.datetime(2000, 1, 1, 0, 0, 0, tzinfo=datetime.timezone.utc),
|
||||
)
|
||||
|
||||
# THEN
|
||||
assert state._statefile_path == os.fspath(expected_path)
|
||||
|
@ -175,6 +180,6 @@ class TestSelfCheckState:
|
|||
contents = expected_path.read_text()
|
||||
assert json.loads(contents) == {
|
||||
"key": sys.prefix,
|
||||
"last_check": "2000-01-01T00:00:00Z",
|
||||
"last_check": "2000-01-01T00:00:00+00:00",
|
||||
"pypi_version": "1.0.0",
|
||||
}
|
||||
|
|
|
@ -54,18 +54,18 @@ class TestTargetPython:
|
|||
"kwargs, expected",
|
||||
[
|
||||
({}, ""),
|
||||
(dict(py_version_info=(3, 6)), "version_info='3.6'"),
|
||||
({"py_version_info": (3, 6)}, "version_info='3.6'"),
|
||||
(
|
||||
dict(platforms=["darwin"], py_version_info=(3, 6)),
|
||||
{"platforms": ["darwin"], "py_version_info": (3, 6)},
|
||||
"platforms=['darwin'] version_info='3.6'",
|
||||
),
|
||||
(
|
||||
dict(
|
||||
platforms=["darwin"],
|
||||
py_version_info=(3, 6),
|
||||
abis=["cp36m"],
|
||||
implementation="cp",
|
||||
),
|
||||
{
|
||||
"platforms": ["darwin"],
|
||||
"py_version_info": (3, 6),
|
||||
"abis": ["cp36m"],
|
||||
"implementation": "cp",
|
||||
},
|
||||
(
|
||||
"platforms=['darwin'] version_info='3.6' abis=['cp36m'] "
|
||||
"implementation='cp'"
|
||||
|
@ -88,12 +88,12 @@ class TestTargetPython:
|
|||
((3, 7, 3), "37"),
|
||||
# Check a minor version with two digits.
|
||||
((3, 10, 1), "310"),
|
||||
# Check that versions=None is passed to get_tags().
|
||||
# Check that versions=None is passed to get_sorted_tags().
|
||||
(None, None),
|
||||
],
|
||||
)
|
||||
@mock.patch("pip._internal.models.target_python.get_supported")
|
||||
def test_get_tags(
|
||||
def test_get_sorted_tags(
|
||||
self,
|
||||
mock_get_supported: mock.Mock,
|
||||
py_version_info: Optional[Tuple[int, ...]],
|
||||
|
@ -102,7 +102,7 @@ class TestTargetPython:
|
|||
mock_get_supported.return_value = ["tag-1", "tag-2"]
|
||||
|
||||
target_python = TargetPython(py_version_info=py_version_info)
|
||||
actual = target_python.get_tags()
|
||||
actual = target_python.get_sorted_tags()
|
||||
assert actual == ["tag-1", "tag-2"]
|
||||
|
||||
actual = mock_get_supported.call_args[1]["version"]
|
||||
|
@ -111,14 +111,14 @@ class TestTargetPython:
|
|||
# Check that the value was cached.
|
||||
assert target_python._valid_tags == ["tag-1", "tag-2"]
|
||||
|
||||
def test_get_tags__uses_cached_value(self) -> None:
|
||||
def test_get_unsorted_tags__uses_cached_value(self) -> None:
|
||||
"""
|
||||
Test that get_tags() uses the cached value.
|
||||
Test that get_unsorted_tags() uses the cached value.
|
||||
"""
|
||||
target_python = TargetPython(py_version_info=None)
|
||||
target_python._valid_tags = [
|
||||
target_python._valid_tags_set = {
|
||||
Tag("py2", "none", "any"),
|
||||
Tag("py3", "none", "any"),
|
||||
]
|
||||
actual = target_python.get_tags()
|
||||
assert actual == [Tag("py2", "none", "any"), Tag("py3", "none", "any")]
|
||||
}
|
||||
actual = target_python.get_unsorted_tags()
|
||||
assert actual == {Tag("py2", "none", "any"), Tag("py3", "none", "any")}
|
||||
|
|
|
@ -257,9 +257,13 @@ def test_rmtree_errorhandler_reraises_error(tmpdir: Path) -> None:
|
|||
except RuntimeError:
|
||||
# Make sure the handler reraises an exception
|
||||
with pytest.raises(RuntimeError, match="test message"):
|
||||
# Argument 3 to "rmtree_errorhandler" has incompatible type "None"; expected
|
||||
# "Tuple[Type[BaseException], BaseException, TracebackType]"
|
||||
rmtree_errorhandler(mock_func, path, None) # type: ignore[arg-type]
|
||||
# Argument 3 to "rmtree_errorhandler" has incompatible type
|
||||
# "Union[Tuple[Type[BaseException], BaseException, TracebackType],
|
||||
# Tuple[None, None, None]]"; expected "Tuple[Type[BaseException],
|
||||
# BaseException, TracebackType]"
|
||||
rmtree_errorhandler(
|
||||
mock_func, path, sys.exc_info() # type: ignore[arg-type]
|
||||
)
|
||||
|
||||
mock_func.assert_not_called()
|
||||
|
||||
|
|
|
@ -4,6 +4,7 @@ import stat
|
|||
import tempfile
|
||||
from pathlib import Path
|
||||
from typing import Any, Iterator, Optional, Union
|
||||
from unittest import mock
|
||||
|
||||
import pytest
|
||||
|
||||
|
@ -274,3 +275,25 @@ def test_tempdir_registry_lazy(should_delete: bool) -> None:
|
|||
registry.set_delete("test-for-lazy", should_delete)
|
||||
assert os.path.exists(path)
|
||||
assert os.path.exists(path) == (not should_delete)
|
||||
|
||||
|
||||
def test_tempdir_cleanup_ignore_errors() -> None:
|
||||
os_unlink = os.unlink
|
||||
|
||||
# mock os.unlink to fail with EACCES for a specific filename to simulate
|
||||
# how removing a loaded exe/dll behaves.
|
||||
def unlink(name: str, *args: Any, **kwargs: Any) -> None:
|
||||
if "bomb" in name:
|
||||
raise PermissionError(name)
|
||||
else:
|
||||
os_unlink(name)
|
||||
|
||||
with mock.patch("os.unlink", unlink):
|
||||
with TempDirectory(ignore_cleanup_errors=True) as tmp_dir:
|
||||
path = tmp_dir.path
|
||||
with open(os.path.join(path, "bomb"), "a"):
|
||||
pass
|
||||
|
||||
filename = os.path.join(path, "bomb")
|
||||
assert os.path.isfile(filename)
|
||||
os.unlink(filename)
|
||||
|
|
|
@ -73,7 +73,7 @@ def test_rev_options_repr() -> None:
|
|||
Git,
|
||||
["HEAD", "opt1", "opt2"],
|
||||
["123", "opt1", "opt2"],
|
||||
dict(extra_args=["opt1", "opt2"]),
|
||||
{"extra_args": ["opt1", "opt2"]},
|
||||
),
|
||||
],
|
||||
)
|
||||
|
|
Loading…
Reference in New Issue