mirror of https://github.com/pypa/pip
Merge branch 'main' into separate-config-same-level
This commit is contained in:
commit
af49b81e05
|
@ -0,0 +1,6 @@
|
|||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "monthly"
|
|
@ -21,7 +21,7 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.x"
|
||||
|
@ -57,7 +57,7 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.x"
|
||||
|
@ -81,7 +81,7 @@ jobs:
|
|||
github.event_name != 'pull_request'
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.x"
|
||||
|
@ -112,7 +112,7 @@ jobs:
|
|||
- "3.12"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ matrix.python }}
|
||||
|
@ -164,7 +164,7 @@ jobs:
|
|||
group: [1, 2]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ matrix.python }}
|
||||
|
@ -215,7 +215,7 @@ jobs:
|
|||
github.event_name != 'pull_request'
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
|
|
|
@ -17,7 +17,7 @@ jobs:
|
|||
if: github.repository_owner == 'pypa'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: dessant/lock-threads@v3
|
||||
- uses: dessant/lock-threads@v4
|
||||
with:
|
||||
issue-inactive-days: '30'
|
||||
pr-inactive-days: '15'
|
||||
|
|
|
@ -10,7 +10,7 @@ jobs:
|
|||
runs-on: ubuntu-20.04
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
# `towncrier check` runs `git diff --name-only origin/main...`, which
|
||||
# needs a non-shallow clone.
|
||||
|
|
|
@ -18,7 +18,7 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
environment: RTD Deploys
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.11"
|
||||
|
|
|
@ -22,8 +22,7 @@ repos:
|
|||
- id: black
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.0.287
|
||||
rev: v0.0.292
|
||||
hooks:
|
||||
- id: ruff
|
||||
|
||||
|
|
13
AUTHORS.txt
13
AUTHORS.txt
|
@ -20,6 +20,7 @@ Albert-Guan
|
|||
albertg
|
||||
Alberto Sottile
|
||||
Aleks Bunin
|
||||
Ales Erjavec
|
||||
Alethea Flowers
|
||||
Alex Gaynor
|
||||
Alex Grönholm
|
||||
|
@ -30,6 +31,7 @@ Alex Stachowiak
|
|||
Alexander Shtyrov
|
||||
Alexandre Conrad
|
||||
Alexey Popravka
|
||||
Aleš Erjavec
|
||||
Alli
|
||||
Ami Fischman
|
||||
Ananya Maiti
|
||||
|
@ -196,9 +198,11 @@ David Runge
|
|||
David Tucker
|
||||
David Wales
|
||||
Davidovich
|
||||
ddelange
|
||||
Deepak Sharma
|
||||
Deepyaman Datta
|
||||
Denise Yu
|
||||
dependabot[bot]
|
||||
derwolfe
|
||||
Desetude
|
||||
Devesh Kumar Singh
|
||||
|
@ -312,6 +316,7 @@ Ilya Baryshev
|
|||
Inada Naoki
|
||||
Ionel Cristian Mărieș
|
||||
Ionel Maries Cristian
|
||||
Itamar Turner-Trauring
|
||||
Ivan Pozdeev
|
||||
Jacob Kim
|
||||
Jacob Walls
|
||||
|
@ -338,6 +343,7 @@ Jay Graves
|
|||
Jean-Christophe Fillion-Robin
|
||||
Jeff Barber
|
||||
Jeff Dairiki
|
||||
Jeff Widman
|
||||
Jelmer Vernooij
|
||||
jenix21
|
||||
Jeremy Stanley
|
||||
|
@ -367,6 +373,7 @@ Joseph Long
|
|||
Josh Bronson
|
||||
Josh Hansen
|
||||
Josh Schneier
|
||||
Joshua
|
||||
Juan Luis Cano Rodríguez
|
||||
Juanjo Bazán
|
||||
Judah Rand
|
||||
|
@ -397,6 +404,7 @@ KOLANICH
|
|||
kpinc
|
||||
Krishna Oza
|
||||
Kumar McMillan
|
||||
Kurt McKee
|
||||
Kyle Persohn
|
||||
lakshmanaram
|
||||
Laszlo Kiss-Kollar
|
||||
|
@ -413,6 +421,7 @@ lorddavidiii
|
|||
Loren Carvalho
|
||||
Lucas Cimon
|
||||
Ludovic Gasc
|
||||
Lukas Geiger
|
||||
Lukas Juhrich
|
||||
Luke Macken
|
||||
Luo Jiebin
|
||||
|
@ -529,6 +538,7 @@ Patrick Jenkins
|
|||
Patrick Lawson
|
||||
patricktokeeffe
|
||||
Patrik Kopkan
|
||||
Paul Ganssle
|
||||
Paul Kehrer
|
||||
Paul Moore
|
||||
Paul Nasrat
|
||||
|
@ -609,6 +619,7 @@ ryneeverett
|
|||
Sachi King
|
||||
Salvatore Rinchiera
|
||||
sandeepkiran-js
|
||||
Sander Van Balen
|
||||
Savio Jomton
|
||||
schlamar
|
||||
Scott Kitterman
|
||||
|
@ -621,6 +632,7 @@ SeongSoo Cho
|
|||
Sergey Vasilyev
|
||||
Seth Michael Larson
|
||||
Seth Woodworth
|
||||
Shahar Epstein
|
||||
Shantanu
|
||||
shireenrao
|
||||
Shivansh-007
|
||||
|
@ -648,6 +660,7 @@ Steve Kowalik
|
|||
Steven Myint
|
||||
Steven Silvester
|
||||
stonebig
|
||||
studioj
|
||||
Stéphane Bidoul
|
||||
Stéphane Bidoul (ACSONE)
|
||||
Stéphane Klein
|
||||
|
|
77
NEWS.rst
77
NEWS.rst
|
@ -9,13 +9,80 @@
|
|||
|
||||
.. towncrier release notes start
|
||||
|
||||
23.3.1 (2023-10-21)
|
||||
===================
|
||||
|
||||
Bug Fixes
|
||||
---------
|
||||
|
||||
- Handle a timezone indicator of Z when parsing dates in the self check. (`#12338 <https://github.com/pypa/pip/issues/12338>`_)
|
||||
- Fix bug where installing the same package at the same time with multiple pip processes could fail. (`#12361 <https://github.com/pypa/pip/issues/12361>`_)
|
||||
|
||||
|
||||
23.3 (2023-10-15)
|
||||
=================
|
||||
|
||||
Process
|
||||
-------
|
||||
|
||||
- Added reference to `vulnerability reporting guidelines <https://www.python.org/dev/security/>`_ to pip's security policy.
|
||||
|
||||
Deprecations and Removals
|
||||
-------------------------
|
||||
|
||||
- Drop a fallback to using SecureTransport on macOS. It was useful when pip detected OpenSSL older than 1.0.1, but the current pip does not support any Python version supporting such old OpenSSL versions. (`#12175 <https://github.com/pypa/pip/issues/12175>`_)
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
- Improve extras resolution for multiple constraints on same base package. (`#11924 <https://github.com/pypa/pip/issues/11924>`_)
|
||||
- Improve use of datastructures to make candidate selection 1.6x faster. (`#12204 <https://github.com/pypa/pip/issues/12204>`_)
|
||||
- Allow ``pip install --dry-run`` to use platform and ABI overriding options. (`#12215 <https://github.com/pypa/pip/issues/12215>`_)
|
||||
- Add ``is_yanked`` boolean entry to the installation report (``--report``) to indicate whether the requirement was yanked from the index, but was still selected by pip conform to :pep:`592`. (`#12224 <https://github.com/pypa/pip/issues/12224>`_)
|
||||
|
||||
Bug Fixes
|
||||
---------
|
||||
|
||||
- Ignore errors in temporary directory cleanup (show a warning instead). (`#11394 <https://github.com/pypa/pip/issues/11394>`_)
|
||||
- Normalize extras according to :pep:`685` from package metadata in the resolver
|
||||
for comparison. This ensures extras are correctly compared and merged as long
|
||||
as the package providing the extra(s) is built with values normalized according
|
||||
to the standard. Note, however, that this *does not* solve cases where the
|
||||
package itself contains unnormalized extra values in the metadata. (`#11649 <https://github.com/pypa/pip/issues/11649>`_)
|
||||
- Prevent downloading sdists twice when :pep:`658` metadata is present. (`#11847 <https://github.com/pypa/pip/issues/11847>`_)
|
||||
- Include all requested extras in the install report (``--report``). (`#11924 <https://github.com/pypa/pip/issues/11924>`_)
|
||||
- Removed uses of ``datetime.datetime.utcnow`` from non-vendored code. (`#12005 <https://github.com/pypa/pip/issues/12005>`_)
|
||||
- Consistently report whether a dependency comes from an extra. (`#12095 <https://github.com/pypa/pip/issues/12095>`_)
|
||||
- Fix completion script for zsh (`#12166 <https://github.com/pypa/pip/issues/12166>`_)
|
||||
- Fix improper handling of the new onexc argument of ``shutil.rmtree()`` in Python 3.12. (`#12187 <https://github.com/pypa/pip/issues/12187>`_)
|
||||
- Filter out yanked links from the available versions error message: "(from versions: 1.0, 2.0, 3.0)" will not contain yanked versions conform PEP 592. The yanked versions (if any) will be mentioned in a separate error message. (`#12225 <https://github.com/pypa/pip/issues/12225>`_)
|
||||
- Fix crash when the git version number contains something else than digits and dots. (`#12280 <https://github.com/pypa/pip/issues/12280>`_)
|
||||
- Use ``-r=...`` instead of ``-r ...`` to specify references with Mercurial. (`#12306 <https://github.com/pypa/pip/issues/12306>`_)
|
||||
- Redact password from URLs in some additional places. (`#12350 <https://github.com/pypa/pip/issues/12350>`_)
|
||||
- pip uses less memory when caching large packages. As a result, there is a new on-disk cache format stored in a new directory ($PIP_CACHE_DIR/http-v2). (`#2984 <https://github.com/pypa/pip/issues/2984>`_)
|
||||
|
||||
Vendored Libraries
|
||||
------------------
|
||||
|
||||
- Upgrade certifi to 2023.7.22
|
||||
- Add truststore 0.8.0
|
||||
- Upgrade urllib3 to 1.26.17
|
||||
|
||||
Improved Documentation
|
||||
----------------------
|
||||
|
||||
- Document that ``pip search`` support has been removed from PyPI (`#12059 <https://github.com/pypa/pip/issues/12059>`_)
|
||||
- Clarify --prefer-binary in CLI and docs (`#12122 <https://github.com/pypa/pip/issues/12122>`_)
|
||||
- Document that using OS-provided Python can cause pip's test suite to report false failures. (`#12334 <https://github.com/pypa/pip/issues/12334>`_)
|
||||
|
||||
|
||||
23.2.1 (2023-07-22)
|
||||
===================
|
||||
|
||||
Bug Fixes
|
||||
---------
|
||||
|
||||
- Disable PEP 658 metadata fetching with the legacy resolver. (`#12156 <https://github.com/pypa/pip/issues/12156>`_)
|
||||
- Disable :pep:`658` metadata fetching with the legacy resolver. (`#12156 <https://github.com/pypa/pip/issues/12156>`_)
|
||||
|
||||
|
||||
23.2 (2023-07-15)
|
||||
|
@ -45,11 +112,11 @@ Bug Fixes
|
|||
---------
|
||||
|
||||
- Fix ``pip completion --zsh``. (`#11417 <https://github.com/pypa/pip/issues/11417>`_)
|
||||
- Prevent downloading files twice when PEP 658 metadata is present (`#11847 <https://github.com/pypa/pip/issues/11847>`_)
|
||||
- Prevent downloading files twice when :pep:`658` metadata is present (`#11847 <https://github.com/pypa/pip/issues/11847>`_)
|
||||
- Add permission check before configuration (`#11920 <https://github.com/pypa/pip/issues/11920>`_)
|
||||
- Fix deprecation warnings in Python 3.12 for usage of shutil.rmtree (`#11957 <https://github.com/pypa/pip/issues/11957>`_)
|
||||
- Ignore invalid or unreadable ``origin.json`` files in the cache of locally built wheels. (`#11985 <https://github.com/pypa/pip/issues/11985>`_)
|
||||
- Fix installation of packages with PEP658 metadata using non-canonicalized names (`#12038 <https://github.com/pypa/pip/issues/12038>`_)
|
||||
- Fix installation of packages with :pep:`658` metadata using non-canonicalized names (`#12038 <https://github.com/pypa/pip/issues/12038>`_)
|
||||
- Correctly parse ``dist-info-metadata`` values from JSON-format index data. (`#12042 <https://github.com/pypa/pip/issues/12042>`_)
|
||||
- Fail with an error if the ``--python`` option is specified after the subcommand name. (`#12067 <https://github.com/pypa/pip/issues/12067>`_)
|
||||
- Fix slowness when using ``importlib.metadata`` (the default way for pip to read metadata in Python 3.11+) and there is a large overlap between already installed and to-be-installed packages. (`#12079 <https://github.com/pypa/pip/issues/12079>`_)
|
||||
|
@ -220,7 +287,7 @@ Features
|
|||
|
||||
- Change the hashes in the installation report to be a mapping. Emit the
|
||||
``archive_info.hashes`` dictionary in ``direct_url.json``. (`#11312 <https://github.com/pypa/pip/issues/11312>`_)
|
||||
- Implement logic to read the ``EXTERNALLY-MANAGED`` file as specified in PEP 668.
|
||||
- Implement logic to read the ``EXTERNALLY-MANAGED`` file as specified in :pep:`668`.
|
||||
This allows a downstream Python distributor to prevent users from using pip to
|
||||
modify the externally managed environment. (`#11381 <https://github.com/pypa/pip/issues/11381>`_)
|
||||
- Enable the use of ``keyring`` found on ``PATH``. This allows ``keyring``
|
||||
|
@ -236,7 +303,7 @@ Bug Fixes
|
|||
- Use the "venv" scheme if available to obtain prefixed lib paths. (`#11598 <https://github.com/pypa/pip/issues/11598>`_)
|
||||
- Deprecated a historical ambiguity in how ``egg`` fragments in URL-style
|
||||
requirements are formatted and handled. ``egg`` fragments that do not look
|
||||
like PEP 508 names now produce a deprecation warning. (`#11617 <https://github.com/pypa/pip/issues/11617>`_)
|
||||
like :pep:`508` names now produce a deprecation warning. (`#11617 <https://github.com/pypa/pip/issues/11617>`_)
|
||||
- Fix scripts path in isolated build environment on Debian. (`#11623 <https://github.com/pypa/pip/issues/11623>`_)
|
||||
- Make ``pip show`` show the editable location if package is editable (`#11638 <https://github.com/pypa/pip/issues/11638>`_)
|
||||
- Stop checking that ``wheel`` is present when ``build-system.requires``
|
||||
|
|
|
@ -3,9 +3,15 @@ pip - The Python Package Installer
|
|||
|
||||
.. image:: https://img.shields.io/pypi/v/pip.svg
|
||||
:target: https://pypi.org/project/pip/
|
||||
:alt: PyPI
|
||||
|
||||
.. image:: https://img.shields.io/pypi/pyversions/pip
|
||||
:target: https://pypi.org/project/pip
|
||||
:alt: PyPI - Python Version
|
||||
|
||||
.. image:: https://readthedocs.org/projects/pip/badge/?version=latest
|
||||
:target: https://pip.pypa.io/en/latest
|
||||
:alt: Documentation
|
||||
|
||||
pip is the `package installer`_ for Python. You can use pip to install packages from the `Python Package Index`_ and other indexes.
|
||||
|
||||
|
@ -19,8 +25,6 @@ We release updates regularly, with a new version every 3 months. Find more detai
|
|||
* `Release notes`_
|
||||
* `Release process`_
|
||||
|
||||
**Note**: pip 21.0, in January 2021, removed Python 2 support, per pip's `Python 2 support policy`_. Please migrate to Python 3.
|
||||
|
||||
If you find bugs, need help, or want to talk to the developers, please use our mailing lists or chat rooms:
|
||||
|
||||
* `Issue tracking`_
|
||||
|
@ -47,7 +51,6 @@ rooms, and mailing lists is expected to follow the `PSF Code of Conduct`_.
|
|||
.. _Release process: https://pip.pypa.io/en/latest/development/release-process/
|
||||
.. _GitHub page: https://github.com/pypa/pip
|
||||
.. _Development documentation: https://pip.pypa.io/en/latest/development
|
||||
.. _Python 2 support policy: https://pip.pypa.io/en/latest/development/release-process/#python-2-support
|
||||
.. _Issue tracking: https://github.com/pypa/pip/issues
|
||||
.. _Discourse channel: https://discuss.python.org/c/packaging
|
||||
.. _User IRC: https://kiwiirc.com/nextclient/#ircs://irc.libera.chat:+6697/pypa
|
||||
|
|
|
@ -21,6 +21,12 @@ Usage
|
|||
Description
|
||||
===========
|
||||
|
||||
.. attention::
|
||||
PyPI no longer supports ``pip search`` (or XML-RPC search). Please use https://pypi.org/search (via a browser)
|
||||
instead. See https://warehouse.pypa.io/api-reference/xml-rpc.html#deprecated-methods for more information.
|
||||
|
||||
However, XML-RPC search (and this command) may still be supported by indexes other than PyPI.
|
||||
|
||||
.. pip-command-description:: search
|
||||
|
||||
|
||||
|
|
|
@ -112,7 +112,7 @@ the ``news/`` directory with the extension of ``.trivial.rst``. If you are on a
|
|||
POSIX like operating system, one can be added by running
|
||||
``touch news/$(uuidgen).trivial.rst``. On Windows, the same result can be
|
||||
achieved in Powershell using ``New-Item "news/$([guid]::NewGuid()).trivial.rst"``.
|
||||
Core committers may also add a "trivial" label to the PR which will accomplish
|
||||
Core committers may also add a "skip news" label to the PR which will accomplish
|
||||
the same thing.
|
||||
|
||||
Upgrading, removing, or adding a new vendored library gets a special mention
|
||||
|
|
|
@ -73,7 +73,7 @@ pip's tests are written using the :pypi:`pytest` test framework and
|
|||
:mod:`unittest.mock`. :pypi:`nox` is used to automate the setup and execution
|
||||
of pip's tests.
|
||||
|
||||
It is preferable to run the tests in parallel for better experience during development,
|
||||
It is preferable to run the tests in parallel for a better experience during development,
|
||||
since the tests can take a long time to finish when run sequentially.
|
||||
|
||||
To run tests:
|
||||
|
@ -104,6 +104,15 @@ can select tests using the various ways that pytest provides:
|
|||
$ # Using keywords
|
||||
$ nox -s test-3.10 -- -k "install and not wheel"
|
||||
|
||||
.. note::
|
||||
|
||||
When running pip's tests with OS distribution Python versions, be aware that some
|
||||
functional tests may fail due to potential patches introduced by the distribution.
|
||||
For all tests to pass consider:
|
||||
|
||||
- Installing Python from `python.org`_ or compile from source
|
||||
- Or, using `pyenv`_ to assist with source compilation
|
||||
|
||||
Running pip's entire test suite requires supported version control tools
|
||||
(subversion, bazaar, git, and mercurial) to be installed. If you are missing
|
||||
any of these VCS, those tests should be skipped automatically. You can also
|
||||
|
@ -114,6 +123,9 @@ explicitly tell pytest to skip those tests:
|
|||
$ nox -s test-3.10 -- -k "not svn"
|
||||
$ nox -s test-3.10 -- -k "not (svn or git)"
|
||||
|
||||
.. _python.org: https://www.python.org/downloads/
|
||||
.. _pyenv: https://github.com/pyenv/pyenv
|
||||
|
||||
|
||||
Running Linters
|
||||
===============
|
||||
|
|
|
@ -145,8 +145,8 @@ Creating a new release
|
|||
#. Push the tag created by ``prepare-release``.
|
||||
#. Regenerate the ``get-pip.py`` script in the `get-pip repository`_ (as
|
||||
documented there) and commit the results.
|
||||
#. Submit a Pull Request to `CPython`_ adding the new version of pip (and upgrading
|
||||
setuptools) to ``Lib/ensurepip/_bundled``, removing the existing version, and
|
||||
#. Submit a Pull Request to `CPython`_ adding the new version of pip
|
||||
to ``Lib/ensurepip/_bundled``, removing the existing version, and
|
||||
adjusting the versions listed in ``Lib/ensurepip/__init__.py``.
|
||||
|
||||
|
||||
|
|
|
@ -27,6 +27,13 @@ While this cache attempts to minimize network activity, it does not prevent
|
|||
network access altogether. If you want a local install solution that
|
||||
circumvents accessing PyPI, see {ref}`Installing from local packages`.
|
||||
|
||||
```{versionchanged} 23.3
|
||||
A new cache format is now used, stored in a directory called `http-v2` (see
|
||||
below for this directory's location). Previously this cache was stored in a
|
||||
directory called `http` in the main cache directory. If you have completely
|
||||
switched to newer versions of `pip`, you may wish to delete the old directory.
|
||||
```
|
||||
|
||||
(wheel-caching)=
|
||||
|
||||
### Locally built wheels
|
||||
|
@ -124,11 +131,11 @@ The {ref}`pip cache` command can be used to manage pip's cache.
|
|||
|
||||
### Removing a single package
|
||||
|
||||
`pip cache remove setuptools` removes all wheel files related to setuptools from pip's cache.
|
||||
`pip cache remove setuptools` removes all wheel files related to setuptools from pip's cache. HTTP cache files are not removed at this time.
|
||||
|
||||
### Removing the cache
|
||||
|
||||
`pip cache purge` will clear all wheel files from pip's cache.
|
||||
`pip cache purge` will clear all files from pip's wheel and HTTP caches.
|
||||
|
||||
### Listing cached files
|
||||
|
||||
|
|
|
@ -28,19 +28,9 @@ It is possible to use the system trust store, instead of the bundled certifi
|
|||
certificates for verifying HTTPS certificates. This approach will typically
|
||||
support corporate proxy certificates without additional configuration.
|
||||
|
||||
In order to use system trust stores, you need to:
|
||||
|
||||
- Use Python 3.10 or newer.
|
||||
- Install the {pypi}`truststore` package, in the Python environment you're
|
||||
running pip in.
|
||||
|
||||
This is typically done by installing this package using a system package
|
||||
manager or by using pip in {ref}`Hash-checking mode` for this package and
|
||||
trusting the network using the `--trusted-host` flag.
|
||||
In order to use system trust stores, you need to use Python 3.10 or newer.
|
||||
|
||||
```{pip-cli}
|
||||
$ python -m pip install truststore
|
||||
[...]
|
||||
$ python -m pip install SomePackage --use-feature=truststore
|
||||
[...]
|
||||
Successfully installed SomePackage
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
Ignore errors in temporary directory cleanup (show a warning instead).
|
|
@ -1 +0,0 @@
|
|||
Removed uses of ``datetime.datetime.utcnow`` from non-vendored code.
|
|
@ -1,6 +0,0 @@
|
|||
The metadata-fetching log message is moved to the VERBOSE level and now hidden
|
||||
by default. The more significant information in this message to most users are
|
||||
already available in surrounding logs (the package name and version of the
|
||||
metadata being fetched), while the URL to the exact metadata file is generally
|
||||
too long and clutters the output. The message can be brought back with
|
||||
``--verbose``.
|
|
@ -1 +0,0 @@
|
|||
Drop a fallback to using SecureTransport on macOS. It was useful when pip detected OpenSSL older than 1.0.1, but the current pip does not support any Python version supporting such old OpenSSL versions.
|
|
@ -1 +0,0 @@
|
|||
Add test cases for some behaviors of ``install --dry-run`` and ``--use-feature=fast-deps``.
|
|
@ -1 +0,0 @@
|
|||
Fix improper handling of the new onexc argument of ``shutil.rmtree()`` in Python 3.12.
|
|
@ -1 +0,0 @@
|
|||
Prevent downloading sdists twice when PEP 658 metadata is present.
|
|
@ -1 +0,0 @@
|
|||
Add lots of comments to the ``BuildTracker``.
|
|
@ -1 +0,0 @@
|
|||
Improve use of datastructures to make candidate selection 1.6x faster
|
|
@ -1 +0,0 @@
|
|||
Allow ``pip install --dry-run`` to use platform and ABI overriding options similar to ``--target``.
|
|
@ -1 +0,0 @@
|
|||
Add ``is_yanked`` boolean entry to the installation report (``--report``) to indicate whether the requirement was yanked from the index, but was still selected by pip conform to PEP 592.
|
|
@ -1 +0,0 @@
|
|||
Filter out yanked links from the available versions error message: "(from versions: 1.0, 2.0, 3.0)" will not contain yanked versions conform PEP 592. The yanked versions (if any) will be mentioned in a separate error message.
|
|
@ -1 +0,0 @@
|
|||
Added reference to `vulnerability reporting guidelines <https://www.python.org/dev/security/>`_ to pip's security policy.
|
|
@ -1 +0,0 @@
|
|||
Add ruff rules ASYNC,C4,C90,PERF,PLE,PLR for minor optimizations and to set upper limits on code complexity.
|
|
@ -1 +0,0 @@
|
|||
Upgrade certifi to 2023.7.22
|
11
noxfile.py
11
noxfile.py
|
@ -184,6 +184,12 @@ def lint(session: nox.Session) -> None:
|
|||
# git reset --hard origin/main
|
||||
@nox.session
|
||||
def vendoring(session: nox.Session) -> None:
|
||||
# Ensure that the session Python is running 3.10+
|
||||
# so that truststore can be installed correctly.
|
||||
session.run(
|
||||
"python", "-c", "import sys; sys.exit(1 if sys.version_info < (3, 10) else 0)"
|
||||
)
|
||||
|
||||
session.install("vendoring~=1.2.0")
|
||||
|
||||
parser = argparse.ArgumentParser(prog="nox -s vendoring")
|
||||
|
@ -316,7 +322,7 @@ def build_release(session: nox.Session) -> None:
|
|||
)
|
||||
|
||||
session.log("# Install dependencies")
|
||||
session.install("setuptools", "wheel", "twine")
|
||||
session.install("build", "twine")
|
||||
|
||||
with release.isolated_temporary_checkout(session, version) as build_dir:
|
||||
session.log(
|
||||
|
@ -352,8 +358,7 @@ def build_dists(session: nox.Session) -> List[str]:
|
|||
)
|
||||
|
||||
session.log("# Build distributions")
|
||||
session.install("setuptools", "wheel")
|
||||
session.run("python", "setup.py", "sdist", "bdist_wheel", silent=True)
|
||||
session.run("python", "-m", "build", silent=True)
|
||||
produced_dists = glob.glob("dist/*")
|
||||
|
||||
session.log(f"# Verify distributions: {', '.join(produced_dists)}")
|
||||
|
|
|
@ -101,6 +101,7 @@ select = [
|
|||
"PLE",
|
||||
"PLR0",
|
||||
"W",
|
||||
"RUF100",
|
||||
]
|
||||
|
||||
[tool.ruff.isort]
|
||||
|
|
51
setup.cfg
51
setup.cfg
|
@ -1,46 +1,13 @@
|
|||
[isort]
|
||||
profile = black
|
||||
skip =
|
||||
./build,
|
||||
.nox,
|
||||
.tox,
|
||||
.scratch,
|
||||
_vendor,
|
||||
data
|
||||
known_third_party =
|
||||
pip._vendor
|
||||
|
||||
[flake8]
|
||||
max-line-length = 88
|
||||
exclude =
|
||||
./build,
|
||||
.nox,
|
||||
.tox,
|
||||
.scratch,
|
||||
_vendor,
|
||||
data
|
||||
enable-extensions = G
|
||||
extend-ignore =
|
||||
G200, G202,
|
||||
# black adds spaces around ':'
|
||||
E203,
|
||||
# using a cache
|
||||
B019,
|
||||
# reassigning variables in a loop
|
||||
B020,
|
||||
per-file-ignores =
|
||||
# G: The plugin logging-format treats every .log and .error as logging.
|
||||
noxfile.py: G
|
||||
# B011: Do not call assert False since python -O removes these calls
|
||||
tests/*: B011
|
||||
|
||||
[mypy]
|
||||
mypy_path = $MYPY_CONFIG_FILE_DIR/src
|
||||
|
||||
strict = True
|
||||
|
||||
no_implicit_reexport = False
|
||||
allow_subclassing_any = True
|
||||
allow_untyped_calls = True
|
||||
warn_return_any = False
|
||||
ignore_missing_imports = True
|
||||
disallow_untyped_defs = True
|
||||
disallow_any_generics = True
|
||||
warn_unused_ignores = True
|
||||
no_implicit_optional = True
|
||||
|
||||
[mypy-pip._internal.utils._jaraco_text]
|
||||
ignore_errors = True
|
||||
|
@ -51,12 +18,8 @@ ignore_errors = True
|
|||
# These vendored libraries use runtime magic to populate things and don't sit
|
||||
# well with static typing out of the box. Eventually we should provide correct
|
||||
# typing information for their public interface and remove these configs.
|
||||
[mypy-pip._vendor.colorama]
|
||||
follow_imports = skip
|
||||
[mypy-pip._vendor.pkg_resources]
|
||||
follow_imports = skip
|
||||
[mypy-pip._vendor.progress.*]
|
||||
follow_imports = skip
|
||||
[mypy-pip._vendor.requests.*]
|
||||
follow_imports = skip
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
from typing import List, Optional
|
||||
|
||||
__version__ = "23.3.dev0"
|
||||
__version__ = "24.0.dev0"
|
||||
|
||||
|
||||
def main(args: Optional[List[str]] = None) -> int:
|
||||
|
|
|
@ -181,7 +181,7 @@ class Command(CommandContextMixIn):
|
|||
assert isinstance(status, int)
|
||||
return status
|
||||
except DiagnosticPipError as exc:
|
||||
logger.error("[present-rich] %s", exc)
|
||||
logger.error("%s", exc, extra={"rich": True})
|
||||
logger.debug("Exception information:", exc_info=True)
|
||||
|
||||
return ERROR
|
||||
|
|
|
@ -670,7 +670,10 @@ def prefer_binary() -> Option:
|
|||
dest="prefer_binary",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Prefer older binary packages over newer source packages.",
|
||||
help=(
|
||||
"Prefer binary packages over source packages, even if the "
|
||||
"source packages are newer."
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
|
@ -823,7 +826,7 @@ def _handle_config_settings(
|
|||
) -> None:
|
||||
key, sep, val = value.partition("=")
|
||||
if sep != "=":
|
||||
parser.error(f"Arguments to {opt_str} must be of the form KEY=VAL") # noqa
|
||||
parser.error(f"Arguments to {opt_str} must be of the form KEY=VAL")
|
||||
dest = getattr(parser.values, option.dest)
|
||||
if dest is None:
|
||||
dest = {}
|
||||
|
@ -918,13 +921,13 @@ def _handle_merge_hash(
|
|||
algo, digest = value.split(":", 1)
|
||||
except ValueError:
|
||||
parser.error(
|
||||
"Arguments to {} must be a hash name " # noqa
|
||||
"Arguments to {} must be a hash name "
|
||||
"followed by a value, like --hash=sha256:"
|
||||
"abcde...".format(opt_str)
|
||||
)
|
||||
if algo not in STRONG_HASHES:
|
||||
parser.error(
|
||||
"Allowed hash algorithms for {} are {}.".format( # noqa
|
||||
"Allowed hash algorithms for {} are {}.".format(
|
||||
opt_str, ", ".join(STRONG_HASHES)
|
||||
)
|
||||
)
|
||||
|
|
|
@ -244,7 +244,7 @@ class ConfigOptionParser(CustomOptionParser):
|
|||
val = strtobool(val)
|
||||
except ValueError:
|
||||
self.error(
|
||||
"{} is not a valid value for {} option, " # noqa
|
||||
"{} is not a valid value for {} option, "
|
||||
"please specify a boolean value like yes/no, "
|
||||
"true/false or 1/0 instead.".format(val, key)
|
||||
)
|
||||
|
@ -255,7 +255,7 @@ class ConfigOptionParser(CustomOptionParser):
|
|||
val = int(val)
|
||||
if not isinstance(val, int) or val < 0:
|
||||
self.error(
|
||||
"{} is not a valid value for {} option, " # noqa
|
||||
"{} is not a valid value for {} option, "
|
||||
"please instead specify either a non-negative integer "
|
||||
"or a boolean value like yes/no or false/true "
|
||||
"which is equivalent to 1/0.".format(val, key)
|
||||
|
|
|
@ -58,12 +58,9 @@ def _create_truststore_ssl_context() -> Optional["SSLContext"]:
|
|||
return None
|
||||
|
||||
try:
|
||||
import truststore
|
||||
except ImportError:
|
||||
raise CommandError(
|
||||
"To use the truststore feature, 'truststore' must be installed into "
|
||||
"pip's current environment."
|
||||
)
|
||||
from pip._vendor import truststore
|
||||
except ImportError as e:
|
||||
raise CommandError(f"The truststore feature is unavailable: {e}")
|
||||
|
||||
return truststore.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
|
||||
|
||||
|
@ -123,7 +120,7 @@ class SessionCommandMixin(CommandContextMixIn):
|
|||
ssl_context = None
|
||||
|
||||
session = PipSession(
|
||||
cache=os.path.join(cache_dir, "http") if cache_dir else None,
|
||||
cache=os.path.join(cache_dir, "http-v2") if cache_dir else None,
|
||||
retries=retries if retries is not None else options.retries,
|
||||
trusted_hosts=options.trusted_hosts,
|
||||
index_urls=self._get_index_urls(options),
|
||||
|
@ -268,7 +265,7 @@ class RequirementCommand(IndexGroupCommand):
|
|||
if "legacy-resolver" in options.deprecated_features_enabled:
|
||||
return "legacy"
|
||||
|
||||
return "2020-resolver"
|
||||
return "resolvelib"
|
||||
|
||||
@classmethod
|
||||
def make_requirement_preparer(
|
||||
|
@ -290,7 +287,7 @@ class RequirementCommand(IndexGroupCommand):
|
|||
legacy_resolver = False
|
||||
|
||||
resolver_variant = cls.determine_resolver_variant(options)
|
||||
if resolver_variant == "2020-resolver":
|
||||
if resolver_variant == "resolvelib":
|
||||
lazy_wheel = "fast-deps" in options.features_enabled
|
||||
if lazy_wheel:
|
||||
logger.warning(
|
||||
|
@ -352,7 +349,7 @@ class RequirementCommand(IndexGroupCommand):
|
|||
# The long import name and duplicated invocation is needed to convince
|
||||
# Mypy into correctly typechecking. Otherwise it would complain the
|
||||
# "Resolver" class being redefined.
|
||||
if resolver_variant == "2020-resolver":
|
||||
if resolver_variant == "resolvelib":
|
||||
import pip._internal.resolution.resolvelib.resolver
|
||||
|
||||
return pip._internal.resolution.resolvelib.resolver.Resolver(
|
||||
|
|
|
@ -93,24 +93,30 @@ class CacheCommand(Command):
|
|||
num_http_files = len(self._find_http_files(options))
|
||||
num_packages = len(self._find_wheels(options, "*"))
|
||||
|
||||
http_cache_location = self._cache_dir(options, "http")
|
||||
http_cache_location = self._cache_dir(options, "http-v2")
|
||||
old_http_cache_location = self._cache_dir(options, "http")
|
||||
wheels_cache_location = self._cache_dir(options, "wheels")
|
||||
http_cache_size = filesystem.format_directory_size(http_cache_location)
|
||||
http_cache_size = filesystem.format_size(
|
||||
filesystem.directory_size(http_cache_location)
|
||||
+ filesystem.directory_size(old_http_cache_location)
|
||||
)
|
||||
wheels_cache_size = filesystem.format_directory_size(wheels_cache_location)
|
||||
|
||||
message = (
|
||||
textwrap.dedent(
|
||||
"""
|
||||
Package index page cache location: {http_cache_location}
|
||||
Package index page cache location (pip v23.3+): {http_cache_location}
|
||||
Package index page cache location (older pips): {old_http_cache_location}
|
||||
Package index page cache size: {http_cache_size}
|
||||
Number of HTTP files: {num_http_files}
|
||||
Locally built wheels location: {wheels_cache_location}
|
||||
Locally built wheels size: {wheels_cache_size}
|
||||
Number of locally built wheels: {package_count}
|
||||
"""
|
||||
""" # noqa: E501
|
||||
)
|
||||
.format(
|
||||
http_cache_location=http_cache_location,
|
||||
old_http_cache_location=old_http_cache_location,
|
||||
http_cache_size=http_cache_size,
|
||||
num_http_files=num_http_files,
|
||||
wheels_cache_location=wheels_cache_location,
|
||||
|
@ -189,8 +195,11 @@ class CacheCommand(Command):
|
|||
return os.path.join(options.cache_dir, subdir)
|
||||
|
||||
def _find_http_files(self, options: Values) -> List[str]:
|
||||
http_dir = self._cache_dir(options, "http")
|
||||
return filesystem.find_files(http_dir, "*")
|
||||
old_http_dir = self._cache_dir(options, "http")
|
||||
new_http_dir = self._cache_dir(options, "http-v2")
|
||||
return filesystem.find_files(old_http_dir, "*") + filesystem.find_files(
|
||||
new_http_dir, "*"
|
||||
)
|
||||
|
||||
def _find_wheels(self, options: Values, pattern: str) -> List[str]:
|
||||
wheel_dir = self._cache_dir(options, "wheels")
|
||||
|
|
|
@ -23,9 +23,18 @@ COMPLETION_SCRIPTS = {
|
|||
""",
|
||||
"zsh": """
|
||||
#compdef -P pip[0-9.]#
|
||||
__pip() {{
|
||||
compadd $( COMP_WORDS="$words[*]" \\
|
||||
COMP_CWORD=$((CURRENT-1)) \\
|
||||
PIP_AUTO_COMPLETE=1 $words[1] 2>/dev/null )
|
||||
}}
|
||||
if [[ $zsh_eval_context[-1] == loadautofunc ]]; then
|
||||
# autoload from fpath, call function directly
|
||||
__pip "$@"
|
||||
else
|
||||
# eval/source/. command, register function for later
|
||||
compdef __pip -P 'pip[0-9.]#'
|
||||
fi
|
||||
""",
|
||||
"fish": """
|
||||
function __fish_complete_pip
|
||||
|
|
|
@ -46,22 +46,29 @@ def create_vendor_txt_map() -> Dict[str, str]:
|
|||
return dict(line.split("==", 1) for line in lines)
|
||||
|
||||
|
||||
def get_module_from_module_name(module_name: str) -> ModuleType:
|
||||
def get_module_from_module_name(module_name: str) -> Optional[ModuleType]:
|
||||
# Module name can be uppercase in vendor.txt for some reason...
|
||||
module_name = module_name.lower().replace("-", "_")
|
||||
# PATCH: setuptools is actually only pkg_resources.
|
||||
if module_name == "setuptools":
|
||||
module_name = "pkg_resources"
|
||||
|
||||
try:
|
||||
__import__(f"pip._vendor.{module_name}", globals(), locals(), level=0)
|
||||
return getattr(pip._vendor, module_name)
|
||||
except ImportError:
|
||||
# We allow 'truststore' to fail to import due
|
||||
# to being unavailable on Python 3.9 and earlier.
|
||||
if module_name == "truststore" and sys.version_info < (3, 10):
|
||||
return None
|
||||
raise
|
||||
|
||||
|
||||
def get_vendor_version_from_module(module_name: str) -> Optional[str]:
|
||||
module = get_module_from_module_name(module_name)
|
||||
version = getattr(module, "__version__", None)
|
||||
|
||||
if not version:
|
||||
if module and not version:
|
||||
# Try to find version in debundled module info.
|
||||
assert module.__file__ is not None
|
||||
env = get_environment([os.path.dirname(module.__file__)])
|
||||
|
@ -134,9 +141,7 @@ def show_tags(options: Values) -> None:
|
|||
|
||||
|
||||
def ca_bundle_info(config: Configuration) -> str:
|
||||
# Ruff misidentifies config as a dict.
|
||||
# Configuration does not have support the mapping interface.
|
||||
levels = {key.split(".", 1)[0] for key, _ in config.items()} # noqa: PERF102
|
||||
levels = {key.split(".", 1)[0] for key, _ in config.items()}
|
||||
if not levels:
|
||||
return "Not specified"
|
||||
|
||||
|
|
|
@ -501,7 +501,7 @@ class InstallCommand(RequirementCommand):
|
|||
show_traceback,
|
||||
options.use_user_site,
|
||||
)
|
||||
logger.error(message, exc_info=show_traceback) # noqa
|
||||
logger.error(message, exc_info=show_traceback)
|
||||
|
||||
return ERROR
|
||||
|
||||
|
@ -595,7 +595,7 @@ class InstallCommand(RequirementCommand):
|
|||
"source of the following dependency conflicts."
|
||||
)
|
||||
else:
|
||||
assert resolver_variant == "2020-resolver"
|
||||
assert resolver_variant == "resolvelib"
|
||||
parts.append(
|
||||
"pip's dependency resolver does not currently take into account "
|
||||
"all the packages that are installed. This behaviour is the "
|
||||
|
@ -628,7 +628,7 @@ class InstallCommand(RequirementCommand):
|
|||
requirement=req,
|
||||
dep_name=dep_name,
|
||||
dep_version=dep_version,
|
||||
you=("you" if resolver_variant == "2020-resolver" else "you'll"),
|
||||
you=("you" if resolver_variant == "resolvelib" else "you'll"),
|
||||
)
|
||||
parts.append(message)
|
||||
|
||||
|
|
|
@ -9,7 +9,7 @@ from pip._internal.utils.misc import strtobool
|
|||
from .base import BaseDistribution, BaseEnvironment, FilesystemWheel, MemoryWheel, Wheel
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Protocol
|
||||
from typing import Literal, Protocol
|
||||
else:
|
||||
Protocol = object
|
||||
|
||||
|
@ -50,6 +50,7 @@ def _should_use_importlib_metadata() -> bool:
|
|||
|
||||
|
||||
class Backend(Protocol):
|
||||
NAME: 'Literal["importlib", "pkg_resources"]'
|
||||
Distribution: Type[BaseDistribution]
|
||||
Environment: Type[BaseEnvironment]
|
||||
|
||||
|
|
|
@ -24,7 +24,7 @@ from typing import (
|
|||
|
||||
from pip._vendor.packaging.requirements import Requirement
|
||||
from pip._vendor.packaging.specifiers import InvalidSpecifier, SpecifierSet
|
||||
from pip._vendor.packaging.utils import NormalizedName
|
||||
from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
|
||||
from pip._vendor.packaging.version import LegacyVersion, Version
|
||||
|
||||
from pip._internal.exceptions import NoneMetadataError
|
||||
|
@ -37,7 +37,6 @@ from pip._internal.models.direct_url import (
|
|||
from pip._internal.utils.compat import stdlib_pkgs # TODO: Move definition here.
|
||||
from pip._internal.utils.egg_link import egg_link_path_from_sys_path
|
||||
from pip._internal.utils.misc import is_local, normalize_path
|
||||
from pip._internal.utils.packaging import safe_extra
|
||||
from pip._internal.utils.urls import url_to_path
|
||||
|
||||
from ._json import msg_to_json
|
||||
|
@ -460,6 +459,19 @@ class BaseDistribution(Protocol):
|
|||
|
||||
For modern .dist-info distributions, this is the collection of
|
||||
"Provides-Extra:" entries in distribution metadata.
|
||||
|
||||
The return value of this function is not particularly useful other than
|
||||
display purposes due to backward compatibility issues and the extra
|
||||
names being poorly normalized prior to PEP 685. If you want to perform
|
||||
logic operations on extras, use :func:`is_extra_provided` instead.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def is_extra_provided(self, extra: str) -> bool:
|
||||
"""Check whether an extra is provided by this distribution.
|
||||
|
||||
This is needed mostly for compatibility issues with pkg_resources not
|
||||
following the extra normalization rules defined in PEP 685.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
|
@ -537,10 +549,11 @@ class BaseDistribution(Protocol):
|
|||
"""Get extras from the egg-info directory."""
|
||||
known_extras = {""}
|
||||
for entry in self._iter_requires_txt_entries():
|
||||
if entry.extra in known_extras:
|
||||
extra = canonicalize_name(entry.extra)
|
||||
if extra in known_extras:
|
||||
continue
|
||||
known_extras.add(entry.extra)
|
||||
yield entry.extra
|
||||
known_extras.add(extra)
|
||||
yield extra
|
||||
|
||||
def _iter_egg_info_dependencies(self) -> Iterable[str]:
|
||||
"""Get distribution dependencies from the egg-info directory.
|
||||
|
@ -556,10 +569,11 @@ class BaseDistribution(Protocol):
|
|||
all currently available PEP 517 backends, although not standardized.
|
||||
"""
|
||||
for entry in self._iter_requires_txt_entries():
|
||||
if entry.extra and entry.marker:
|
||||
marker = f'({entry.marker}) and extra == "{safe_extra(entry.extra)}"'
|
||||
elif entry.extra:
|
||||
marker = f'extra == "{safe_extra(entry.extra)}"'
|
||||
extra = canonicalize_name(entry.extra)
|
||||
if extra and entry.marker:
|
||||
marker = f'({entry.marker}) and extra == "{extra}"'
|
||||
elif extra:
|
||||
marker = f'extra == "{extra}"'
|
||||
elif entry.marker:
|
||||
marker = entry.marker
|
||||
else:
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
from ._dists import Distribution
|
||||
from ._envs import Environment
|
||||
|
||||
__all__ = ["Distribution", "Environment"]
|
||||
__all__ = ["NAME", "Distribution", "Environment"]
|
||||
|
||||
NAME = "importlib"
|
||||
|
|
|
@ -27,7 +27,6 @@ from pip._internal.metadata.base import (
|
|||
Wheel,
|
||||
)
|
||||
from pip._internal.utils.misc import normalize_path
|
||||
from pip._internal.utils.packaging import safe_extra
|
||||
from pip._internal.utils.temp_dir import TempDirectory
|
||||
from pip._internal.utils.wheel import parse_wheel, read_wheel_metadata_file
|
||||
|
||||
|
@ -208,12 +207,16 @@ class Distribution(BaseDistribution):
|
|||
return cast(email.message.Message, self._dist.metadata)
|
||||
|
||||
def iter_provided_extras(self) -> Iterable[str]:
|
||||
return (
|
||||
safe_extra(extra) for extra in self.metadata.get_all("Provides-Extra", [])
|
||||
return self.metadata.get_all("Provides-Extra", [])
|
||||
|
||||
def is_extra_provided(self, extra: str) -> bool:
|
||||
return any(
|
||||
canonicalize_name(provided_extra) == canonicalize_name(extra)
|
||||
for provided_extra in self.metadata.get_all("Provides-Extra", [])
|
||||
)
|
||||
|
||||
def iter_dependencies(self, extras: Collection[str] = ()) -> Iterable[Requirement]:
|
||||
contexts: Sequence[Dict[str, str]] = [{"extra": safe_extra(e)} for e in extras]
|
||||
contexts: Sequence[Dict[str, str]] = [{"extra": e} for e in extras]
|
||||
for req_string in self.metadata.get_all("Requires-Dist", []):
|
||||
req = Requirement(req_string)
|
||||
if not req.marker:
|
||||
|
|
|
@ -151,7 +151,8 @@ def _emit_egg_deprecation(location: Optional[str]) -> None:
|
|||
deprecated(
|
||||
reason=f"Loading egg at {location} is deprecated.",
|
||||
replacement="to use pip for package installation.",
|
||||
gone_in="23.3",
|
||||
gone_in="24.3",
|
||||
issue=12330,
|
||||
)
|
||||
|
||||
|
||||
|
|
|
@ -24,8 +24,12 @@ from .base import (
|
|||
Wheel,
|
||||
)
|
||||
|
||||
__all__ = ["NAME", "Distribution", "Environment"]
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
NAME = "pkg_resources"
|
||||
|
||||
|
||||
class EntryPoint(NamedTuple):
|
||||
name: str
|
||||
|
@ -212,12 +216,16 @@ class Distribution(BaseDistribution):
|
|||
|
||||
def iter_dependencies(self, extras: Collection[str] = ()) -> Iterable[Requirement]:
|
||||
if extras: # pkg_resources raises on invalid extras, so we sanitize.
|
||||
extras = frozenset(extras).intersection(self._dist.extras)
|
||||
extras = frozenset(pkg_resources.safe_extra(e) for e in extras)
|
||||
extras = extras.intersection(self._dist.extras)
|
||||
return self._dist.requires(extras)
|
||||
|
||||
def iter_provided_extras(self) -> Iterable[str]:
|
||||
return self._dist.extras
|
||||
|
||||
def is_extra_provided(self, extra: str) -> bool:
|
||||
return pkg_resources.safe_extra(extra) in self._dist.extras
|
||||
|
||||
|
||||
class Environment(BaseEnvironment):
|
||||
def __init__(self, ws: pkg_resources.WorkingSet) -> None:
|
||||
|
|
|
@ -3,10 +3,11 @@
|
|||
|
||||
import os
|
||||
from contextlib import contextmanager
|
||||
from typing import Generator, Optional
|
||||
from datetime import datetime
|
||||
from typing import BinaryIO, Generator, Optional, Union
|
||||
|
||||
from pip._vendor.cachecontrol.cache import BaseCache
|
||||
from pip._vendor.cachecontrol.caches import FileCache
|
||||
from pip._vendor.cachecontrol.cache import SeparateBodyBaseCache
|
||||
from pip._vendor.cachecontrol.caches import SeparateBodyFileCache
|
||||
from pip._vendor.requests.models import Response
|
||||
|
||||
from pip._internal.utils.filesystem import adjacent_tmp_file, replace
|
||||
|
@ -28,10 +29,22 @@ def suppressed_cache_errors() -> Generator[None, None, None]:
|
|||
pass
|
||||
|
||||
|
||||
class SafeFileCache(BaseCache):
|
||||
class SafeFileCache(SeparateBodyBaseCache):
|
||||
"""
|
||||
A file based cache which is safe to use even when the target directory may
|
||||
not be accessible or writable.
|
||||
|
||||
There is a race condition when two processes try to write and/or read the
|
||||
same entry at the same time, since each entry consists of two separate
|
||||
files (https://github.com/psf/cachecontrol/issues/324). We therefore have
|
||||
additional logic that makes sure that both files to be present before
|
||||
returning an entry; this fixes the read side of the race condition.
|
||||
|
||||
For the write side, we assume that the server will only ever return the
|
||||
same data for the same URL, which ought to be the case for files pip is
|
||||
downloading. PyPI does not have a mechanism to swap out a wheel for
|
||||
another wheel, for example. If this assumption is not true, the
|
||||
CacheControl issue will need to be fixed.
|
||||
"""
|
||||
|
||||
def __init__(self, directory: str) -> None:
|
||||
|
@ -43,27 +56,51 @@ class SafeFileCache(BaseCache):
|
|||
# From cachecontrol.caches.file_cache.FileCache._fn, brought into our
|
||||
# class for backwards-compatibility and to avoid using a non-public
|
||||
# method.
|
||||
hashed = FileCache.encode(name)
|
||||
hashed = SeparateBodyFileCache.encode(name)
|
||||
parts = list(hashed[:5]) + [hashed]
|
||||
return os.path.join(self.directory, *parts)
|
||||
|
||||
def get(self, key: str) -> Optional[bytes]:
|
||||
path = self._get_cache_path(key)
|
||||
# The cache entry is only valid if both metadata and body exist.
|
||||
metadata_path = self._get_cache_path(key)
|
||||
body_path = metadata_path + ".body"
|
||||
if not (os.path.exists(metadata_path) and os.path.exists(body_path)):
|
||||
return None
|
||||
with suppressed_cache_errors():
|
||||
with open(path, "rb") as f:
|
||||
with open(metadata_path, "rb") as f:
|
||||
return f.read()
|
||||
|
||||
def set(self, key: str, value: bytes, expires: Optional[int] = None) -> None:
|
||||
path = self._get_cache_path(key)
|
||||
def _write(self, path: str, data: bytes) -> None:
|
||||
with suppressed_cache_errors():
|
||||
ensure_dir(os.path.dirname(path))
|
||||
|
||||
with adjacent_tmp_file(path) as f:
|
||||
f.write(value)
|
||||
f.write(data)
|
||||
|
||||
replace(f.name, path)
|
||||
|
||||
def set(
|
||||
self, key: str, value: bytes, expires: Union[int, datetime, None] = None
|
||||
) -> None:
|
||||
path = self._get_cache_path(key)
|
||||
self._write(path, value)
|
||||
|
||||
def delete(self, key: str) -> None:
|
||||
path = self._get_cache_path(key)
|
||||
with suppressed_cache_errors():
|
||||
os.remove(path)
|
||||
with suppressed_cache_errors():
|
||||
os.remove(path + ".body")
|
||||
|
||||
def get_body(self, key: str) -> Optional[BinaryIO]:
|
||||
# The cache entry is only valid if both metadata and body exist.
|
||||
metadata_path = self._get_cache_path(key)
|
||||
body_path = metadata_path + ".body"
|
||||
if not (os.path.exists(metadata_path) and os.path.exists(body_path)):
|
||||
return None
|
||||
with suppressed_cache_errors():
|
||||
return open(body_path, "rb")
|
||||
|
||||
def set_body(self, key: str, body: bytes) -> None:
|
||||
path = self._get_cache_path(key) + ".body"
|
||||
self._write(path, body)
|
||||
|
|
|
@ -168,7 +168,7 @@ def warn_legacy_versions_and_specifiers(package_set: PackageSet) -> None:
|
|||
f"release a version with a conforming version number"
|
||||
),
|
||||
issue=12063,
|
||||
gone_in="23.3",
|
||||
gone_in="24.0",
|
||||
)
|
||||
for dep in package_details.dependencies:
|
||||
if any(isinstance(spec, LegacySpecifier) for spec in dep.specifier):
|
||||
|
@ -183,5 +183,5 @@ def warn_legacy_versions_and_specifiers(package_set: PackageSet) -> None:
|
|||
f"release a version with a conforming dependency specifiers"
|
||||
),
|
||||
issue=12063,
|
||||
gone_in="23.3",
|
||||
gone_in="24.0",
|
||||
)
|
||||
|
|
|
@ -47,6 +47,7 @@ from pip._internal.utils.misc import (
|
|||
display_path,
|
||||
hash_file,
|
||||
hide_url,
|
||||
redact_auth_from_requirement,
|
||||
)
|
||||
from pip._internal.utils.temp_dir import TempDirectory
|
||||
from pip._internal.utils.unpacking import unpack_file
|
||||
|
@ -277,7 +278,7 @@ class RequirementPreparer:
|
|||
information = str(display_path(req.link.file_path))
|
||||
else:
|
||||
message = "Collecting %s"
|
||||
information = str(req.req or req)
|
||||
information = redact_auth_from_requirement(req.req) if req.req else str(req)
|
||||
|
||||
# If we used req.req, inject requirement source if available (this
|
||||
# would already be included if we used req directly)
|
||||
|
|
|
@ -8,10 +8,11 @@ These are meant to be used elsewhere within pip to create instances of
|
|||
InstallRequirement.
|
||||
"""
|
||||
|
||||
import copy
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
from typing import Dict, List, Optional, Set, Tuple, Union
|
||||
from typing import Collection, Dict, List, Optional, Set, Tuple, Union
|
||||
|
||||
from pip._vendor.packaging.markers import Marker
|
||||
from pip._vendor.packaging.requirements import InvalidRequirement, Requirement
|
||||
|
@ -57,6 +58,31 @@ def convert_extras(extras: Optional[str]) -> Set[str]:
|
|||
return get_requirement("placeholder" + extras.lower()).extras
|
||||
|
||||
|
||||
def _set_requirement_extras(req: Requirement, new_extras: Set[str]) -> Requirement:
|
||||
"""
|
||||
Returns a new requirement based on the given one, with the supplied extras. If the
|
||||
given requirement already has extras those are replaced (or dropped if no new extras
|
||||
are given).
|
||||
"""
|
||||
match: Optional[re.Match[str]] = re.fullmatch(
|
||||
# see https://peps.python.org/pep-0508/#complete-grammar
|
||||
r"([\w\t .-]+)(\[[^\]]*\])?(.*)",
|
||||
str(req),
|
||||
flags=re.ASCII,
|
||||
)
|
||||
# ireq.req is a valid requirement so the regex should always match
|
||||
assert (
|
||||
match is not None
|
||||
), f"regex match on requirement {req} failed, this should never happen"
|
||||
pre: Optional[str] = match.group(1)
|
||||
post: Optional[str] = match.group(3)
|
||||
assert (
|
||||
pre is not None and post is not None
|
||||
), f"regex group selection for requirement {req} failed, this should never happen"
|
||||
extras: str = "[%s]" % ",".join(sorted(new_extras)) if new_extras else ""
|
||||
return Requirement(f"{pre}{extras}{post}")
|
||||
|
||||
|
||||
def parse_editable(editable_req: str) -> Tuple[Optional[str], str, Set[str]]:
|
||||
"""Parses an editable requirement into:
|
||||
- a requirement name
|
||||
|
@ -504,3 +530,47 @@ def install_req_from_link_and_ireq(
|
|||
config_settings=ireq.config_settings,
|
||||
user_supplied=ireq.user_supplied,
|
||||
)
|
||||
|
||||
|
||||
def install_req_drop_extras(ireq: InstallRequirement) -> InstallRequirement:
|
||||
"""
|
||||
Creates a new InstallationRequirement using the given template but without
|
||||
any extras. Sets the original requirement as the new one's parent
|
||||
(comes_from).
|
||||
"""
|
||||
return InstallRequirement(
|
||||
req=(
|
||||
_set_requirement_extras(ireq.req, set()) if ireq.req is not None else None
|
||||
),
|
||||
comes_from=ireq,
|
||||
editable=ireq.editable,
|
||||
link=ireq.link,
|
||||
markers=ireq.markers,
|
||||
use_pep517=ireq.use_pep517,
|
||||
isolated=ireq.isolated,
|
||||
global_options=ireq.global_options,
|
||||
hash_options=ireq.hash_options,
|
||||
constraint=ireq.constraint,
|
||||
extras=[],
|
||||
config_settings=ireq.config_settings,
|
||||
user_supplied=ireq.user_supplied,
|
||||
permit_editable_wheels=ireq.permit_editable_wheels,
|
||||
)
|
||||
|
||||
|
||||
def install_req_extend_extras(
|
||||
ireq: InstallRequirement,
|
||||
extras: Collection[str],
|
||||
) -> InstallRequirement:
|
||||
"""
|
||||
Returns a copy of an installation requirement with some additional extras.
|
||||
Makes a shallow copy of the ireq object.
|
||||
"""
|
||||
result = copy.copy(ireq)
|
||||
result.extras = {*ireq.extras, *extras}
|
||||
result.req = (
|
||||
_set_requirement_extras(ireq.req, result.extras)
|
||||
if ireq.req is not None
|
||||
else None
|
||||
)
|
||||
return result
|
||||
|
|
|
@ -49,6 +49,7 @@ from pip._internal.utils.misc import (
|
|||
display_path,
|
||||
hide_url,
|
||||
is_installable_dir,
|
||||
redact_auth_from_requirement,
|
||||
redact_auth_from_url,
|
||||
)
|
||||
from pip._internal.utils.packaging import safe_extra
|
||||
|
@ -128,7 +129,7 @@ class InstallRequirement:
|
|||
if extras:
|
||||
self.extras = extras
|
||||
elif req:
|
||||
self.extras = {safe_extra(extra) for extra in req.extras}
|
||||
self.extras = req.extras
|
||||
else:
|
||||
self.extras = set()
|
||||
if markers is None and req:
|
||||
|
@ -188,7 +189,7 @@ class InstallRequirement:
|
|||
|
||||
def __str__(self) -> str:
|
||||
if self.req:
|
||||
s = str(self.req)
|
||||
s = redact_auth_from_requirement(self.req)
|
||||
if self.link:
|
||||
s += " from {}".format(redact_auth_from_url(self.link.url))
|
||||
elif self.link:
|
||||
|
@ -272,7 +273,12 @@ class InstallRequirement:
|
|||
extras_requested = ("",)
|
||||
if self.markers is not None:
|
||||
return any(
|
||||
self.markers.evaluate({"extra": extra}) for extra in extras_requested
|
||||
self.markers.evaluate({"extra": extra})
|
||||
# TODO: Remove these two variants when packaging is upgraded to
|
||||
# support the marker comparison logic specified in PEP 685.
|
||||
or self.markers.evaluate({"extra": safe_extra(extra)})
|
||||
or self.markers.evaluate({"extra": canonicalize_name(extra)})
|
||||
for extra in extras_requested
|
||||
)
|
||||
else:
|
||||
return True
|
||||
|
@ -509,7 +515,7 @@ class InstallRequirement:
|
|||
"to use --use-pep517 or add a "
|
||||
"pyproject.toml file to the project"
|
||||
),
|
||||
gone_in="23.3",
|
||||
gone_in="24.0",
|
||||
)
|
||||
self.use_pep517 = False
|
||||
return
|
||||
|
@ -899,7 +905,7 @@ def check_legacy_setup_py_options(
|
|||
reason="--build-option and --global-option are deprecated.",
|
||||
issue=11859,
|
||||
replacement="to use --config-settings",
|
||||
gone_in="23.3",
|
||||
gone_in="24.0",
|
||||
)
|
||||
logger.warning(
|
||||
"Implying --no-binary=:all: due to the presence of "
|
||||
|
|
|
@ -99,7 +99,7 @@ class RequirementSet:
|
|||
"or contact the package author to fix the version number"
|
||||
),
|
||||
issue=12063,
|
||||
gone_in="23.3",
|
||||
gone_in="24.0",
|
||||
)
|
||||
for dep in req.get_dist().iter_dependencies():
|
||||
if any(isinstance(spec, LegacySpecifier) for spec in dep.specifier):
|
||||
|
@ -115,5 +115,5 @@ class RequirementSet:
|
|||
"or contact the package author to fix the version number"
|
||||
),
|
||||
issue=12063,
|
||||
gone_in="23.3",
|
||||
gone_in="24.0",
|
||||
)
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
from typing import FrozenSet, Iterable, Optional, Tuple, Union
|
||||
|
||||
from pip._vendor.packaging.specifiers import SpecifierSet
|
||||
from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
|
||||
from pip._vendor.packaging.utils import NormalizedName
|
||||
from pip._vendor.packaging.version import LegacyVersion, Version
|
||||
|
||||
from pip._internal.models.link import Link, links_equivalent
|
||||
|
@ -12,11 +12,11 @@ CandidateLookup = Tuple[Optional["Candidate"], Optional[InstallRequirement]]
|
|||
CandidateVersion = Union[LegacyVersion, Version]
|
||||
|
||||
|
||||
def format_name(project: str, extras: FrozenSet[str]) -> str:
|
||||
def format_name(project: NormalizedName, extras: FrozenSet[NormalizedName]) -> str:
|
||||
if not extras:
|
||||
return project
|
||||
canonical_extras = sorted(canonicalize_name(e) for e in extras)
|
||||
return "{}[{}]".format(project, ",".join(canonical_extras))
|
||||
extras_expr = ",".join(sorted(extras))
|
||||
return f"{project}[{extras_expr}]"
|
||||
|
||||
|
||||
class Constraint:
|
||||
|
|
|
@ -240,7 +240,7 @@ class _InstallRequirementBackedCandidate(Candidate):
|
|||
def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]:
|
||||
requires = self.dist.iter_dependencies() if with_requires else ()
|
||||
for r in requires:
|
||||
yield self._factory.make_requirement_from_spec(str(r), self._ireq)
|
||||
yield from self._factory.make_requirements_from_spec(str(r), self._ireq)
|
||||
yield self._factory.make_requires_python_requirement(self.dist.requires_python)
|
||||
|
||||
def get_install_requirement(self) -> Optional[InstallRequirement]:
|
||||
|
@ -392,7 +392,7 @@ class AlreadyInstalledCandidate(Candidate):
|
|||
if not with_requires:
|
||||
return
|
||||
for r in self.dist.iter_dependencies():
|
||||
yield self._factory.make_requirement_from_spec(str(r), self._ireq)
|
||||
yield from self._factory.make_requirements_from_spec(str(r), self._ireq)
|
||||
|
||||
def get_install_requirement(self) -> Optional[InstallRequirement]:
|
||||
return None
|
||||
|
@ -427,9 +427,28 @@ class ExtrasCandidate(Candidate):
|
|||
self,
|
||||
base: BaseCandidate,
|
||||
extras: FrozenSet[str],
|
||||
*,
|
||||
comes_from: Optional[InstallRequirement] = None,
|
||||
) -> None:
|
||||
"""
|
||||
:param comes_from: the InstallRequirement that led to this candidate if it
|
||||
differs from the base's InstallRequirement. This will often be the
|
||||
case in the sense that this candidate's requirement has the extras
|
||||
while the base's does not. Unlike the InstallRequirement backed
|
||||
candidates, this requirement is used solely for reporting purposes,
|
||||
it does not do any leg work.
|
||||
"""
|
||||
self.base = base
|
||||
self.extras = extras
|
||||
self.extras = frozenset(canonicalize_name(e) for e in extras)
|
||||
# If any extras are requested in their non-normalized forms, keep track
|
||||
# of their raw values. This is needed when we look up dependencies
|
||||
# since PEP 685 has not been implemented for marker-matching, and using
|
||||
# the non-normalized extra for lookup ensures the user can select a
|
||||
# non-normalized extra in a package with its non-normalized form.
|
||||
# TODO: Remove this attribute when packaging is upgraded to support the
|
||||
# marker comparison logic specified in PEP 685.
|
||||
self._unnormalized_extras = extras.difference(self.extras)
|
||||
self._comes_from = comes_from if comes_from is not None else self.base._ireq
|
||||
|
||||
def __str__(self) -> str:
|
||||
name, rest = str(self.base).split(" ", 1)
|
||||
|
@ -480,6 +499,50 @@ class ExtrasCandidate(Candidate):
|
|||
def source_link(self) -> Optional[Link]:
|
||||
return self.base.source_link
|
||||
|
||||
def _warn_invalid_extras(
|
||||
self,
|
||||
requested: FrozenSet[str],
|
||||
valid: FrozenSet[str],
|
||||
) -> None:
|
||||
"""Emit warnings for invalid extras being requested.
|
||||
|
||||
This emits a warning for each requested extra that is not in the
|
||||
candidate's ``Provides-Extra`` list.
|
||||
"""
|
||||
invalid_extras_to_warn = frozenset(
|
||||
extra
|
||||
for extra in requested
|
||||
if extra not in valid
|
||||
# If an extra is requested in an unnormalized form, skip warning
|
||||
# about the normalized form being missing.
|
||||
and extra in self.extras
|
||||
)
|
||||
if not invalid_extras_to_warn:
|
||||
return
|
||||
for extra in sorted(invalid_extras_to_warn):
|
||||
logger.warning(
|
||||
"%s %s does not provide the extra '%s'",
|
||||
self.base.name,
|
||||
self.version,
|
||||
extra,
|
||||
)
|
||||
|
||||
def _calculate_valid_requested_extras(self) -> FrozenSet[str]:
|
||||
"""Get a list of valid extras requested by this candidate.
|
||||
|
||||
The user (or upstream dependant) may have specified extras that the
|
||||
candidate doesn't support. Any unsupported extras are dropped, and each
|
||||
cause a warning to be logged here.
|
||||
"""
|
||||
requested_extras = self.extras.union(self._unnormalized_extras)
|
||||
valid_extras = frozenset(
|
||||
extra
|
||||
for extra in requested_extras
|
||||
if self.base.dist.is_extra_provided(extra)
|
||||
)
|
||||
self._warn_invalid_extras(requested_extras, valid_extras)
|
||||
return valid_extras
|
||||
|
||||
def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]:
|
||||
factory = self.base._factory
|
||||
|
||||
|
@ -489,24 +552,13 @@ class ExtrasCandidate(Candidate):
|
|||
if not with_requires:
|
||||
return
|
||||
|
||||
# The user may have specified extras that the candidate doesn't
|
||||
# support. We ignore any unsupported extras here.
|
||||
valid_extras = self.extras.intersection(self.base.dist.iter_provided_extras())
|
||||
invalid_extras = self.extras.difference(self.base.dist.iter_provided_extras())
|
||||
for extra in sorted(invalid_extras):
|
||||
logger.warning(
|
||||
"%s %s does not provide the extra '%s'",
|
||||
self.base.name,
|
||||
self.version,
|
||||
extra,
|
||||
)
|
||||
|
||||
valid_extras = self._calculate_valid_requested_extras()
|
||||
for r in self.base.dist.iter_dependencies(valid_extras):
|
||||
requirement = factory.make_requirement_from_spec(
|
||||
str(r), self.base._ireq, valid_extras
|
||||
yield from factory.make_requirements_from_spec(
|
||||
str(r),
|
||||
self._comes_from,
|
||||
valid_extras,
|
||||
)
|
||||
if requirement:
|
||||
yield requirement
|
||||
|
||||
def get_install_requirement(self) -> Optional[InstallRequirement]:
|
||||
# We don't return anything here, because we always
|
||||
|
|
|
@ -62,6 +62,7 @@ from .requirements import (
|
|||
ExplicitRequirement,
|
||||
RequiresPythonRequirement,
|
||||
SpecifierRequirement,
|
||||
SpecifierWithoutExtrasRequirement,
|
||||
UnsatisfiableRequirement,
|
||||
)
|
||||
|
||||
|
@ -112,7 +113,7 @@ class Factory:
|
|||
self._editable_candidate_cache: Cache[EditableCandidate] = {}
|
||||
self._installed_candidate_cache: Dict[str, AlreadyInstalledCandidate] = {}
|
||||
self._extras_candidate_cache: Dict[
|
||||
Tuple[int, FrozenSet[str]], ExtrasCandidate
|
||||
Tuple[int, FrozenSet[NormalizedName]], ExtrasCandidate
|
||||
] = {}
|
||||
|
||||
if not ignore_installed:
|
||||
|
@ -138,13 +139,17 @@ class Factory:
|
|||
raise UnsupportedWheel(msg)
|
||||
|
||||
def _make_extras_candidate(
|
||||
self, base: BaseCandidate, extras: FrozenSet[str]
|
||||
self,
|
||||
base: BaseCandidate,
|
||||
extras: FrozenSet[str],
|
||||
*,
|
||||
comes_from: Optional[InstallRequirement] = None,
|
||||
) -> ExtrasCandidate:
|
||||
cache_key = (id(base), extras)
|
||||
cache_key = (id(base), frozenset(canonicalize_name(e) for e in extras))
|
||||
try:
|
||||
candidate = self._extras_candidate_cache[cache_key]
|
||||
except KeyError:
|
||||
candidate = ExtrasCandidate(base, extras)
|
||||
candidate = ExtrasCandidate(base, extras, comes_from=comes_from)
|
||||
self._extras_candidate_cache[cache_key] = candidate
|
||||
return candidate
|
||||
|
||||
|
@ -161,7 +166,7 @@ class Factory:
|
|||
self._installed_candidate_cache[dist.canonical_name] = base
|
||||
if not extras:
|
||||
return base
|
||||
return self._make_extras_candidate(base, extras)
|
||||
return self._make_extras_candidate(base, extras, comes_from=template)
|
||||
|
||||
def _make_candidate_from_link(
|
||||
self,
|
||||
|
@ -223,7 +228,7 @@ class Factory:
|
|||
|
||||
if not extras:
|
||||
return base
|
||||
return self._make_extras_candidate(base, extras)
|
||||
return self._make_extras_candidate(base, extras, comes_from=template)
|
||||
|
||||
def _iter_found_candidates(
|
||||
self,
|
||||
|
@ -385,16 +390,21 @@ class Factory:
|
|||
if ireq is not None:
|
||||
ireqs.append(ireq)
|
||||
|
||||
# If the current identifier contains extras, add explicit candidates
|
||||
# from entries from extra-less identifier.
|
||||
# If the current identifier contains extras, add requires and explicit
|
||||
# candidates from entries from extra-less identifier.
|
||||
with contextlib.suppress(InvalidRequirement):
|
||||
parsed_requirement = get_requirement(identifier)
|
||||
if parsed_requirement.name != identifier:
|
||||
explicit_candidates.update(
|
||||
self._iter_explicit_candidates_from_base(
|
||||
requirements.get(parsed_requirement.name, ()),
|
||||
frozenset(parsed_requirement.extras),
|
||||
),
|
||||
)
|
||||
for req in requirements.get(parsed_requirement.name, []):
|
||||
_, ireq = req.get_candidate_lookup()
|
||||
if ireq is not None:
|
||||
ireqs.append(ireq)
|
||||
|
||||
# Add explicit candidates from constraints. We only do this if there are
|
||||
# known ireqs, which represent requirements not already explicit. If
|
||||
|
@ -437,18 +447,29 @@ class Factory:
|
|||
and all(req.is_satisfied_by(c) for req in requirements[identifier])
|
||||
)
|
||||
|
||||
def _make_requirement_from_install_req(
|
||||
def _make_requirements_from_install_req(
|
||||
self, ireq: InstallRequirement, requested_extras: Iterable[str]
|
||||
) -> Optional[Requirement]:
|
||||
) -> Iterator[Requirement]:
|
||||
"""
|
||||
Returns requirement objects associated with the given InstallRequirement. In
|
||||
most cases this will be a single object but the following special cases exist:
|
||||
- the InstallRequirement has markers that do not apply -> result is empty
|
||||
- the InstallRequirement has both a constraint and extras -> result is split
|
||||
in two requirement objects: one with the constraint and one with the
|
||||
extra. This allows centralized constraint handling for the base,
|
||||
resulting in fewer candidate rejections.
|
||||
"""
|
||||
if not ireq.match_markers(requested_extras):
|
||||
logger.info(
|
||||
"Ignoring %s: markers '%s' don't match your environment",
|
||||
ireq.name,
|
||||
ireq.markers,
|
||||
)
|
||||
return None
|
||||
if not ireq.link:
|
||||
return SpecifierRequirement(ireq)
|
||||
elif not ireq.link:
|
||||
if ireq.extras and ireq.req is not None and ireq.req.specifier:
|
||||
yield SpecifierWithoutExtrasRequirement(ireq)
|
||||
yield SpecifierRequirement(ireq)
|
||||
else:
|
||||
self._fail_if_link_is_unsupported_wheel(ireq.link)
|
||||
cand = self._make_candidate_from_link(
|
||||
ireq.link,
|
||||
|
@ -466,8 +487,9 @@ class Factory:
|
|||
# ResolutionImpossible eventually.
|
||||
if not ireq.name:
|
||||
raise self._build_failures[ireq.link]
|
||||
return UnsatisfiableRequirement(canonicalize_name(ireq.name))
|
||||
return self.make_requirement_from_candidate(cand)
|
||||
yield UnsatisfiableRequirement(canonicalize_name(ireq.name))
|
||||
else:
|
||||
yield self.make_requirement_from_candidate(cand)
|
||||
|
||||
def collect_root_requirements(
|
||||
self, root_ireqs: List[InstallRequirement]
|
||||
|
@ -488,15 +510,27 @@ class Factory:
|
|||
else:
|
||||
collected.constraints[name] = Constraint.from_ireq(ireq)
|
||||
else:
|
||||
req = self._make_requirement_from_install_req(
|
||||
reqs = list(
|
||||
self._make_requirements_from_install_req(
|
||||
ireq,
|
||||
requested_extras=(),
|
||||
)
|
||||
if req is None:
|
||||
)
|
||||
if not reqs:
|
||||
continue
|
||||
if ireq.user_supplied and req.name not in collected.user_requested:
|
||||
collected.user_requested[req.name] = i
|
||||
collected.requirements.append(req)
|
||||
template = reqs[0]
|
||||
if ireq.user_supplied and template.name not in collected.user_requested:
|
||||
collected.user_requested[template.name] = i
|
||||
collected.requirements.extend(reqs)
|
||||
# Put requirements with extras at the end of the root requires. This does not
|
||||
# affect resolvelib's picking preference but it does affect its initial criteria
|
||||
# population: by putting extras at the end we enable the candidate finder to
|
||||
# present resolvelib with a smaller set of candidates to resolvelib, already
|
||||
# taking into account any non-transient constraints on the associated base. This
|
||||
# means resolvelib will have fewer candidates to visit and reject.
|
||||
# Python's list sort is stable, meaning relative order is kept for objects with
|
||||
# the same key.
|
||||
collected.requirements.sort(key=lambda r: r.name != r.project_name)
|
||||
return collected
|
||||
|
||||
def make_requirement_from_candidate(
|
||||
|
@ -504,14 +538,23 @@ class Factory:
|
|||
) -> ExplicitRequirement:
|
||||
return ExplicitRequirement(candidate)
|
||||
|
||||
def make_requirement_from_spec(
|
||||
def make_requirements_from_spec(
|
||||
self,
|
||||
specifier: str,
|
||||
comes_from: Optional[InstallRequirement],
|
||||
requested_extras: Iterable[str] = (),
|
||||
) -> Optional[Requirement]:
|
||||
) -> Iterator[Requirement]:
|
||||
"""
|
||||
Returns requirement objects associated with the given specifier. In most cases
|
||||
this will be a single object but the following special cases exist:
|
||||
- the specifier has markers that do not apply -> result is empty
|
||||
- the specifier has both a constraint and extras -> result is split
|
||||
in two requirement objects: one with the constraint and one with the
|
||||
extra. This allows centralized constraint handling for the base,
|
||||
resulting in fewer candidate rejections.
|
||||
"""
|
||||
ireq = self._make_install_req_from_spec(specifier, comes_from)
|
||||
return self._make_requirement_from_install_req(ireq, requested_extras)
|
||||
return self._make_requirements_from_install_req(ireq, requested_extras)
|
||||
|
||||
def make_requires_python_requirement(
|
||||
self,
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
from pip._vendor.packaging.specifiers import SpecifierSet
|
||||
from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
|
||||
|
||||
from pip._internal.req.constructors import install_req_drop_extras
|
||||
from pip._internal.req.req_install import InstallRequirement
|
||||
|
||||
from .base import Candidate, CandidateLookup, Requirement, format_name
|
||||
|
@ -43,7 +44,7 @@ class SpecifierRequirement(Requirement):
|
|||
def __init__(self, ireq: InstallRequirement) -> None:
|
||||
assert ireq.link is None, "This is a link, not a specifier"
|
||||
self._ireq = ireq
|
||||
self._extras = frozenset(ireq.extras)
|
||||
self._extras = frozenset(canonicalize_name(e) for e in self._ireq.extras)
|
||||
|
||||
def __str__(self) -> str:
|
||||
return str(self._ireq.req)
|
||||
|
@ -92,6 +93,18 @@ class SpecifierRequirement(Requirement):
|
|||
return spec.contains(candidate.version, prereleases=True)
|
||||
|
||||
|
||||
class SpecifierWithoutExtrasRequirement(SpecifierRequirement):
|
||||
"""
|
||||
Requirement backed by an install requirement on a base package.
|
||||
Trims extras from its install requirement if there are any.
|
||||
"""
|
||||
|
||||
def __init__(self, ireq: InstallRequirement) -> None:
|
||||
assert ireq.link is None, "This is a link, not a specifier"
|
||||
self._ireq = install_req_drop_extras(ireq)
|
||||
self._extras = frozenset(canonicalize_name(e) for e in self._ireq.extras)
|
||||
|
||||
|
||||
class RequiresPythonRequirement(Requirement):
|
||||
"""A requirement representing Requires-Python metadata."""
|
||||
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
import contextlib
|
||||
import functools
|
||||
import logging
|
||||
import os
|
||||
|
@ -11,6 +12,7 @@ from pip._vendor.resolvelib.structs import DirectedGraph
|
|||
from pip._internal.cache import WheelCache
|
||||
from pip._internal.index.package_finder import PackageFinder
|
||||
from pip._internal.operations.prepare import RequirementPreparer
|
||||
from pip._internal.req.constructors import install_req_extend_extras
|
||||
from pip._internal.req.req_install import InstallRequirement
|
||||
from pip._internal.req.req_set import RequirementSet
|
||||
from pip._internal.resolution.base import BaseResolver, InstallRequirementProvider
|
||||
|
@ -19,6 +21,7 @@ from pip._internal.resolution.resolvelib.reporter import (
|
|||
PipDebuggingReporter,
|
||||
PipReporter,
|
||||
)
|
||||
from pip._internal.utils.packaging import get_requirement
|
||||
|
||||
from .base import Candidate, Requirement
|
||||
from .factory import Factory
|
||||
|
@ -101,9 +104,24 @@ class Resolver(BaseResolver):
|
|||
raise error from e
|
||||
|
||||
req_set = RequirementSet(check_supported_wheels=check_supported_wheels)
|
||||
for candidate in result.mapping.values():
|
||||
# process candidates with extras last to ensure their base equivalent is
|
||||
# already in the req_set if appropriate.
|
||||
# Python's sort is stable so using a binary key function keeps relative order
|
||||
# within both subsets.
|
||||
for candidate in sorted(
|
||||
result.mapping.values(), key=lambda c: c.name != c.project_name
|
||||
):
|
||||
ireq = candidate.get_install_requirement()
|
||||
if ireq is None:
|
||||
if candidate.name != candidate.project_name:
|
||||
# extend existing req's extras
|
||||
with contextlib.suppress(KeyError):
|
||||
req = req_set.get_requirement(candidate.project_name)
|
||||
req_set.add_named_requirement(
|
||||
install_req_extend_extras(
|
||||
req, get_requirement(candidate.name).extras
|
||||
)
|
||||
)
|
||||
continue
|
||||
|
||||
# Check if there is already an installation under the same name,
|
||||
|
|
|
@ -39,6 +39,15 @@ def _get_statefile_name(key: str) -> str:
|
|||
return name
|
||||
|
||||
|
||||
def _convert_date(isodate: str) -> datetime.datetime:
|
||||
"""Convert an ISO format string to a date.
|
||||
|
||||
Handles the format 2020-01-22T14:24:01Z (trailing Z)
|
||||
which is not supported by older versions of fromisoformat.
|
||||
"""
|
||||
return datetime.datetime.fromisoformat(isodate.replace("Z", "+00:00"))
|
||||
|
||||
|
||||
class SelfCheckState:
|
||||
def __init__(self, cache_dir: str) -> None:
|
||||
self._state: Dict[str, Any] = {}
|
||||
|
@ -73,7 +82,7 @@ class SelfCheckState:
|
|||
return None
|
||||
|
||||
# Determine if we need to refresh the state
|
||||
last_check = datetime.datetime.fromisoformat(self._state["last_check"])
|
||||
last_check = _convert_date(self._state["last_check"])
|
||||
time_since_last_check = current_time - last_check
|
||||
if time_since_last_check > _WEEK:
|
||||
return None
|
||||
|
@ -233,7 +242,7 @@ def pip_self_version_check(session: PipSession, options: optparse.Values) -> Non
|
|||
),
|
||||
)
|
||||
if upgrade_prompt is not None:
|
||||
logger.warning("[present-rich] %s", upgrade_prompt)
|
||||
logger.warning("%s", upgrade_prompt, extra={"rich": True})
|
||||
except Exception:
|
||||
logger.warning("There was an error checking the latest version of pip.")
|
||||
logger.debug("See below for error", exc_info=True)
|
||||
|
|
|
@ -155,8 +155,8 @@ class RichPipStreamHandler(RichHandler):
|
|||
|
||||
# If we are given a diagnostic error to present, present it with indentation.
|
||||
assert isinstance(record.args, tuple)
|
||||
if record.msg == "[present-rich] %s" and len(record.args) == 1:
|
||||
rich_renderable = record.args[0]
|
||||
if getattr(record, "rich", False):
|
||||
(rich_renderable,) = record.args
|
||||
assert isinstance(
|
||||
rich_renderable, (ConsoleRenderable, RichCast, str)
|
||||
), f"{rich_renderable} is not rich-console-renderable"
|
||||
|
|
|
@ -35,6 +35,7 @@ from typing import (
|
|||
cast,
|
||||
)
|
||||
|
||||
from pip._vendor.packaging.requirements import Requirement
|
||||
from pip._vendor.pyproject_hooks import BuildBackendHookCaller
|
||||
from pip._vendor.tenacity import retry, stop_after_delay, wait_fixed
|
||||
|
||||
|
@ -578,6 +579,13 @@ def redact_auth_from_url(url: str) -> str:
|
|||
return _transform_url(url, _redact_netloc)[0]
|
||||
|
||||
|
||||
def redact_auth_from_requirement(req: Requirement) -> str:
|
||||
"""Replace the password in a given requirement url with ****."""
|
||||
if not req.url:
|
||||
return str(req)
|
||||
return str(req).replace(req.url, redact_auth_from_url(req.url))
|
||||
|
||||
|
||||
class HiddenText:
|
||||
def __init__(self, secret: str, redacted: str) -> None:
|
||||
self.secret = secret
|
||||
|
|
|
@ -209,7 +209,7 @@ def call_subprocess(
|
|||
output_lines=all_output if not showing_subprocess else None,
|
||||
)
|
||||
if log_failed_cmd:
|
||||
subprocess_logger.error("[present-rich] %s", error)
|
||||
subprocess_logger.error("%s", error, extra={"rich": True})
|
||||
subprocess_logger.verbose(
|
||||
"[bold magenta]full command[/]: [blue]%s[/]",
|
||||
escape(format_command_args(cmd)),
|
||||
|
|
|
@ -6,9 +6,9 @@ import tempfile
|
|||
import traceback
|
||||
from contextlib import ExitStack, contextmanager
|
||||
from pathlib import Path
|
||||
from types import FunctionType
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
Dict,
|
||||
Generator,
|
||||
List,
|
||||
|
@ -187,7 +187,7 @@ class TempDirectory:
|
|||
errors: List[BaseException] = []
|
||||
|
||||
def onerror(
|
||||
func: FunctionType,
|
||||
func: Callable[..., Any],
|
||||
path: Path,
|
||||
exc_val: BaseException,
|
||||
) -> None:
|
||||
|
|
|
@ -101,7 +101,7 @@ class Git(VersionControl):
|
|||
if not match:
|
||||
logger.warning("Can't parse git version: %s", version)
|
||||
return ()
|
||||
return tuple(int(c) for c in match.groups())
|
||||
return (int(match.group(1)), int(match.group(2)))
|
||||
|
||||
@classmethod
|
||||
def get_current_branch(cls, location: str) -> Optional[str]:
|
||||
|
|
|
@ -31,7 +31,7 @@ class Mercurial(VersionControl):
|
|||
|
||||
@staticmethod
|
||||
def get_base_rev_args(rev: str) -> List[str]:
|
||||
return ["-r", rev]
|
||||
return [f"-r={rev}"]
|
||||
|
||||
def fetch_new(
|
||||
self, dest: str, url: HiddenText, rev_options: RevOptions, verbosity: int
|
||||
|
|
|
@ -117,4 +117,5 @@ if DEBUNDLED:
|
|||
vendored("rich.traceback")
|
||||
vendored("tenacity")
|
||||
vendored("tomli")
|
||||
vendored("truststore")
|
||||
vendored("urllib3")
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
from cachecontrol import *
|
|
@ -8,11 +8,21 @@ Make it easy to import from cachecontrol without long namespaces.
|
|||
"""
|
||||
__author__ = "Eric Larson"
|
||||
__email__ = "eric@ionrock.org"
|
||||
__version__ = "0.12.11"
|
||||
__version__ = "0.13.1"
|
||||
|
||||
from .wrapper import CacheControl
|
||||
from .adapter import CacheControlAdapter
|
||||
from .controller import CacheController
|
||||
from pip._vendor.cachecontrol.adapter import CacheControlAdapter
|
||||
from pip._vendor.cachecontrol.controller import CacheController
|
||||
from pip._vendor.cachecontrol.wrapper import CacheControl
|
||||
|
||||
__all__ = [
|
||||
"__author__",
|
||||
"__email__",
|
||||
"__version__",
|
||||
"CacheControlAdapter",
|
||||
"CacheController",
|
||||
"CacheControl",
|
||||
]
|
||||
|
||||
import logging
|
||||
|
||||
logging.getLogger(__name__).addHandler(logging.NullHandler())
|
||||
|
|
|
@ -1,8 +1,11 @@
|
|||
# SPDX-FileCopyrightText: 2015 Eric Larson
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from argparse import ArgumentParser
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from pip._vendor import requests
|
||||
|
||||
|
@ -10,16 +13,19 @@ from pip._vendor.cachecontrol.adapter import CacheControlAdapter
|
|||
from pip._vendor.cachecontrol.cache import DictCache
|
||||
from pip._vendor.cachecontrol.controller import logger
|
||||
|
||||
from argparse import ArgumentParser
|
||||
if TYPE_CHECKING:
|
||||
from argparse import Namespace
|
||||
|
||||
from pip._vendor.cachecontrol.controller import CacheController
|
||||
|
||||
|
||||
def setup_logging():
|
||||
def setup_logging() -> None:
|
||||
logger.setLevel(logging.DEBUG)
|
||||
handler = logging.StreamHandler()
|
||||
logger.addHandler(handler)
|
||||
|
||||
|
||||
def get_session():
|
||||
def get_session() -> requests.Session:
|
||||
adapter = CacheControlAdapter(
|
||||
DictCache(), cache_etags=True, serializer=None, heuristic=None
|
||||
)
|
||||
|
@ -27,17 +33,17 @@ def get_session():
|
|||
sess.mount("http://", adapter)
|
||||
sess.mount("https://", adapter)
|
||||
|
||||
sess.cache_controller = adapter.controller
|
||||
sess.cache_controller = adapter.controller # type: ignore[attr-defined]
|
||||
return sess
|
||||
|
||||
|
||||
def get_args():
|
||||
def get_args() -> Namespace:
|
||||
parser = ArgumentParser()
|
||||
parser.add_argument("url", help="The URL to try and cache")
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def main(args=None):
|
||||
def main() -> None:
|
||||
args = get_args()
|
||||
sess = get_session()
|
||||
|
||||
|
@ -48,10 +54,13 @@ def main(args=None):
|
|||
setup_logging()
|
||||
|
||||
# try setting the cache
|
||||
sess.cache_controller.cache_response(resp.request, resp.raw)
|
||||
cache_controller: CacheController = (
|
||||
sess.cache_controller # type: ignore[attr-defined]
|
||||
)
|
||||
cache_controller.cache_response(resp.request, resp.raw)
|
||||
|
||||
# Now try to get it
|
||||
if sess.cache_controller.cached_request(resp.request):
|
||||
if cache_controller.cached_request(resp.request):
|
||||
print("Cached!")
|
||||
else:
|
||||
print("Not cached :(")
|
||||
|
|
|
@ -1,16 +1,26 @@
|
|||
# SPDX-FileCopyrightText: 2015 Eric Larson
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
from __future__ import annotations
|
||||
|
||||
import types
|
||||
import functools
|
||||
import types
|
||||
import zlib
|
||||
from typing import TYPE_CHECKING, Any, Collection, Mapping
|
||||
|
||||
from pip._vendor.requests.adapters import HTTPAdapter
|
||||
|
||||
from .controller import CacheController, PERMANENT_REDIRECT_STATUSES
|
||||
from .cache import DictCache
|
||||
from .filewrapper import CallbackFileWrapper
|
||||
from pip._vendor.cachecontrol.cache import DictCache
|
||||
from pip._vendor.cachecontrol.controller import PERMANENT_REDIRECT_STATUSES, CacheController
|
||||
from pip._vendor.cachecontrol.filewrapper import CallbackFileWrapper
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pip._vendor.requests import PreparedRequest, Response
|
||||
from pip._vendor.urllib3 import HTTPResponse
|
||||
|
||||
from pip._vendor.cachecontrol.cache import BaseCache
|
||||
from pip._vendor.cachecontrol.heuristics import BaseHeuristic
|
||||
from pip._vendor.cachecontrol.serialize import Serializer
|
||||
|
||||
|
||||
class CacheControlAdapter(HTTPAdapter):
|
||||
|
@ -18,16 +28,16 @@ class CacheControlAdapter(HTTPAdapter):
|
|||
|
||||
def __init__(
|
||||
self,
|
||||
cache=None,
|
||||
cache_etags=True,
|
||||
controller_class=None,
|
||||
serializer=None,
|
||||
heuristic=None,
|
||||
cacheable_methods=None,
|
||||
*args,
|
||||
**kw
|
||||
):
|
||||
super(CacheControlAdapter, self).__init__(*args, **kw)
|
||||
cache: BaseCache | None = None,
|
||||
cache_etags: bool = True,
|
||||
controller_class: type[CacheController] | None = None,
|
||||
serializer: Serializer | None = None,
|
||||
heuristic: BaseHeuristic | None = None,
|
||||
cacheable_methods: Collection[str] | None = None,
|
||||
*args: Any,
|
||||
**kw: Any,
|
||||
) -> None:
|
||||
super().__init__(*args, **kw)
|
||||
self.cache = DictCache() if cache is None else cache
|
||||
self.heuristic = heuristic
|
||||
self.cacheable_methods = cacheable_methods or ("GET",)
|
||||
|
@ -37,7 +47,16 @@ class CacheControlAdapter(HTTPAdapter):
|
|||
self.cache, cache_etags=cache_etags, serializer=serializer
|
||||
)
|
||||
|
||||
def send(self, request, cacheable_methods=None, **kw):
|
||||
def send(
|
||||
self,
|
||||
request: PreparedRequest,
|
||||
stream: bool = False,
|
||||
timeout: None | float | tuple[float, float] | tuple[float, None] = None,
|
||||
verify: bool | str = True,
|
||||
cert: (None | bytes | str | tuple[bytes | str, bytes | str]) = None,
|
||||
proxies: Mapping[str, str] | None = None,
|
||||
cacheable_methods: Collection[str] | None = None,
|
||||
) -> Response:
|
||||
"""
|
||||
Send a request. Use the request information to see if it
|
||||
exists in the cache and cache the response if we need to and can.
|
||||
|
@ -54,13 +73,17 @@ class CacheControlAdapter(HTTPAdapter):
|
|||
# check for etags and add headers if appropriate
|
||||
request.headers.update(self.controller.conditional_headers(request))
|
||||
|
||||
resp = super(CacheControlAdapter, self).send(request, **kw)
|
||||
resp = super().send(request, stream, timeout, verify, cert, proxies)
|
||||
|
||||
return resp
|
||||
|
||||
def build_response(
|
||||
self, request, response, from_cache=False, cacheable_methods=None
|
||||
):
|
||||
self,
|
||||
request: PreparedRequest,
|
||||
response: HTTPResponse,
|
||||
from_cache: bool = False,
|
||||
cacheable_methods: Collection[str] | None = None,
|
||||
) -> Response:
|
||||
"""
|
||||
Build a response by making a request or using the cache.
|
||||
|
||||
|
@ -102,36 +125,37 @@ class CacheControlAdapter(HTTPAdapter):
|
|||
else:
|
||||
# Wrap the response file with a wrapper that will cache the
|
||||
# response when the stream has been consumed.
|
||||
response._fp = CallbackFileWrapper(
|
||||
response._fp,
|
||||
response._fp = CallbackFileWrapper( # type: ignore[attr-defined]
|
||||
response._fp, # type: ignore[attr-defined]
|
||||
functools.partial(
|
||||
self.controller.cache_response, request, response
|
||||
),
|
||||
)
|
||||
if response.chunked:
|
||||
super_update_chunk_length = response._update_chunk_length
|
||||
super_update_chunk_length = response._update_chunk_length # type: ignore[attr-defined]
|
||||
|
||||
def _update_chunk_length(self):
|
||||
def _update_chunk_length(self: HTTPResponse) -> None:
|
||||
super_update_chunk_length()
|
||||
if self.chunk_left == 0:
|
||||
self._fp._close()
|
||||
self._fp._close() # type: ignore[attr-defined]
|
||||
|
||||
response._update_chunk_length = types.MethodType(
|
||||
response._update_chunk_length = types.MethodType( # type: ignore[attr-defined]
|
||||
_update_chunk_length, response
|
||||
)
|
||||
|
||||
resp = super(CacheControlAdapter, self).build_response(request, response)
|
||||
resp: Response = super().build_response(request, response) # type: ignore[no-untyped-call]
|
||||
|
||||
# See if we should invalidate the cache.
|
||||
if request.method in self.invalidating_methods and resp.ok:
|
||||
assert request.url is not None
|
||||
cache_url = self.controller.cache_url(request.url)
|
||||
self.cache.delete(cache_url)
|
||||
|
||||
# Give the request a from_cache attr to let people use it
|
||||
resp.from_cache = from_cache
|
||||
resp.from_cache = from_cache # type: ignore[attr-defined]
|
||||
|
||||
return resp
|
||||
|
||||
def close(self):
|
||||
def close(self) -> None:
|
||||
self.cache.close()
|
||||
super(CacheControlAdapter, self).close()
|
||||
super().close() # type: ignore[no-untyped-call]
|
||||
|
|
|
@ -6,38 +6,46 @@
|
|||
The cache object API for implementing caches. The default is a thread
|
||||
safe in-memory dictionary.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from threading import Lock
|
||||
from typing import IO, TYPE_CHECKING, MutableMapping
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
class BaseCache(object):
|
||||
|
||||
def get(self, key):
|
||||
class BaseCache:
|
||||
def get(self, key: str) -> bytes | None:
|
||||
raise NotImplementedError()
|
||||
|
||||
def set(self, key, value, expires=None):
|
||||
def set(
|
||||
self, key: str, value: bytes, expires: int | datetime | None = None
|
||||
) -> None:
|
||||
raise NotImplementedError()
|
||||
|
||||
def delete(self, key):
|
||||
def delete(self, key: str) -> None:
|
||||
raise NotImplementedError()
|
||||
|
||||
def close(self):
|
||||
def close(self) -> None:
|
||||
pass
|
||||
|
||||
|
||||
class DictCache(BaseCache):
|
||||
|
||||
def __init__(self, init_dict=None):
|
||||
def __init__(self, init_dict: MutableMapping[str, bytes] | None = None) -> None:
|
||||
self.lock = Lock()
|
||||
self.data = init_dict or {}
|
||||
|
||||
def get(self, key):
|
||||
def get(self, key: str) -> bytes | None:
|
||||
return self.data.get(key, None)
|
||||
|
||||
def set(self, key, value, expires=None):
|
||||
def set(
|
||||
self, key: str, value: bytes, expires: int | datetime | None = None
|
||||
) -> None:
|
||||
with self.lock:
|
||||
self.data.update({key: value})
|
||||
|
||||
def delete(self, key):
|
||||
def delete(self, key: str) -> None:
|
||||
with self.lock:
|
||||
if key in self.data:
|
||||
self.data.pop(key)
|
||||
|
@ -55,10 +63,11 @@ class SeparateBodyBaseCache(BaseCache):
|
|||
|
||||
Similarly, the body should be loaded separately via ``get_body()``.
|
||||
"""
|
||||
def set_body(self, key, body):
|
||||
|
||||
def set_body(self, key: str, body: bytes) -> None:
|
||||
raise NotImplementedError()
|
||||
|
||||
def get_body(self, key):
|
||||
def get_body(self, key: str) -> IO[bytes] | None:
|
||||
"""
|
||||
Return the body as file-like object.
|
||||
"""
|
||||
|
|
|
@ -2,8 +2,7 @@
|
|||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
from .file_cache import FileCache, SeparateBodyFileCache
|
||||
from .redis_cache import RedisCache
|
||||
|
||||
from pip._vendor.cachecontrol.caches.file_cache import FileCache, SeparateBodyFileCache
|
||||
from pip._vendor.cachecontrol.caches.redis_cache import RedisCache
|
||||
|
||||
__all__ = ["FileCache", "SeparateBodyFileCache", "RedisCache"]
|
||||
|
|
|
@ -1,22 +1,23 @@
|
|||
# SPDX-FileCopyrightText: 2015 Eric Larson
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
from __future__ import annotations
|
||||
|
||||
import hashlib
|
||||
import os
|
||||
from textwrap import dedent
|
||||
from typing import IO, TYPE_CHECKING
|
||||
|
||||
from ..cache import BaseCache, SeparateBodyBaseCache
|
||||
from ..controller import CacheController
|
||||
from pip._vendor.cachecontrol.cache import BaseCache, SeparateBodyBaseCache
|
||||
from pip._vendor.cachecontrol.controller import CacheController
|
||||
|
||||
try:
|
||||
FileNotFoundError
|
||||
except NameError:
|
||||
# py2.X
|
||||
FileNotFoundError = (IOError, OSError)
|
||||
if TYPE_CHECKING:
|
||||
from datetime import datetime
|
||||
|
||||
from filelock import BaseFileLock
|
||||
|
||||
|
||||
def _secure_open_write(filename, fmode):
|
||||
def _secure_open_write(filename: str, fmode: int) -> IO[bytes]:
|
||||
# We only want to write to this file, so open it in write only mode
|
||||
flags = os.O_WRONLY
|
||||
|
||||
|
@ -39,7 +40,7 @@ def _secure_open_write(filename, fmode):
|
|||
# there
|
||||
try:
|
||||
os.remove(filename)
|
||||
except (IOError, OSError):
|
||||
except OSError:
|
||||
# The file must not exist already, so we can just skip ahead to opening
|
||||
pass
|
||||
|
||||
|
@ -62,37 +63,27 @@ class _FileCacheMixin:
|
|||
|
||||
def __init__(
|
||||
self,
|
||||
directory,
|
||||
forever=False,
|
||||
filemode=0o0600,
|
||||
dirmode=0o0700,
|
||||
use_dir_lock=None,
|
||||
lock_class=None,
|
||||
):
|
||||
|
||||
if use_dir_lock is not None and lock_class is not None:
|
||||
raise ValueError("Cannot use use_dir_lock and lock_class together")
|
||||
|
||||
directory: str,
|
||||
forever: bool = False,
|
||||
filemode: int = 0o0600,
|
||||
dirmode: int = 0o0700,
|
||||
lock_class: type[BaseFileLock] | None = None,
|
||||
) -> None:
|
||||
try:
|
||||
from lockfile import LockFile
|
||||
from lockfile.mkdirlockfile import MkdirLockFile
|
||||
if lock_class is None:
|
||||
from filelock import FileLock
|
||||
|
||||
lock_class = FileLock
|
||||
except ImportError:
|
||||
notice = dedent(
|
||||
"""
|
||||
NOTE: In order to use the FileCache you must have
|
||||
lockfile installed. You can install it via pip:
|
||||
pip install lockfile
|
||||
filelock installed. You can install it via pip:
|
||||
pip install filelock
|
||||
"""
|
||||
)
|
||||
raise ImportError(notice)
|
||||
|
||||
else:
|
||||
if use_dir_lock:
|
||||
lock_class = MkdirLockFile
|
||||
|
||||
elif lock_class is None:
|
||||
lock_class = LockFile
|
||||
|
||||
self.directory = directory
|
||||
self.forever = forever
|
||||
self.filemode = filemode
|
||||
|
@ -100,17 +91,17 @@ class _FileCacheMixin:
|
|||
self.lock_class = lock_class
|
||||
|
||||
@staticmethod
|
||||
def encode(x):
|
||||
def encode(x: str) -> str:
|
||||
return hashlib.sha224(x.encode()).hexdigest()
|
||||
|
||||
def _fn(self, name):
|
||||
def _fn(self, name: str) -> str:
|
||||
# NOTE: This method should not change as some may depend on it.
|
||||
# See: https://github.com/ionrock/cachecontrol/issues/63
|
||||
hashed = self.encode(name)
|
||||
parts = list(hashed[:5]) + [hashed]
|
||||
return os.path.join(self.directory, *parts)
|
||||
|
||||
def get(self, key):
|
||||
def get(self, key: str) -> bytes | None:
|
||||
name = self._fn(key)
|
||||
try:
|
||||
with open(name, "rb") as fh:
|
||||
|
@ -119,26 +110,28 @@ class _FileCacheMixin:
|
|||
except FileNotFoundError:
|
||||
return None
|
||||
|
||||
def set(self, key, value, expires=None):
|
||||
def set(
|
||||
self, key: str, value: bytes, expires: int | datetime | None = None
|
||||
) -> None:
|
||||
name = self._fn(key)
|
||||
self._write(name, value)
|
||||
|
||||
def _write(self, path, data: bytes):
|
||||
def _write(self, path: str, data: bytes) -> None:
|
||||
"""
|
||||
Safely write the data to the given path.
|
||||
"""
|
||||
# Make sure the directory exists
|
||||
try:
|
||||
os.makedirs(os.path.dirname(path), self.dirmode)
|
||||
except (IOError, OSError):
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
with self.lock_class(path) as lock:
|
||||
with self.lock_class(path + ".lock"):
|
||||
# Write our actual file
|
||||
with _secure_open_write(lock.path, self.filemode) as fh:
|
||||
with _secure_open_write(path, self.filemode) as fh:
|
||||
fh.write(data)
|
||||
|
||||
def _delete(self, key, suffix):
|
||||
def _delete(self, key: str, suffix: str) -> None:
|
||||
name = self._fn(key) + suffix
|
||||
if not self.forever:
|
||||
try:
|
||||
|
@ -153,7 +146,7 @@ class FileCache(_FileCacheMixin, BaseCache):
|
|||
downloads.
|
||||
"""
|
||||
|
||||
def delete(self, key):
|
||||
def delete(self, key: str) -> None:
|
||||
self._delete(key, "")
|
||||
|
||||
|
||||
|
@ -163,23 +156,23 @@ class SeparateBodyFileCache(_FileCacheMixin, SeparateBodyBaseCache):
|
|||
peak memory usage.
|
||||
"""
|
||||
|
||||
def get_body(self, key):
|
||||
def get_body(self, key: str) -> IO[bytes] | None:
|
||||
name = self._fn(key) + ".body"
|
||||
try:
|
||||
return open(name, "rb")
|
||||
except FileNotFoundError:
|
||||
return None
|
||||
|
||||
def set_body(self, key, body):
|
||||
def set_body(self, key: str, body: bytes) -> None:
|
||||
name = self._fn(key) + ".body"
|
||||
self._write(name, body)
|
||||
|
||||
def delete(self, key):
|
||||
def delete(self, key: str) -> None:
|
||||
self._delete(key, "")
|
||||
self._delete(key, ".body")
|
||||
|
||||
|
||||
def url_to_file_path(url, filecache):
|
||||
def url_to_file_path(url: str, filecache: FileCache) -> str:
|
||||
"""Return the file cache path based on the URL.
|
||||
|
||||
This does not ensure the file exists!
|
||||
|
|
|
@ -1,39 +1,48 @@
|
|||
# SPDX-FileCopyrightText: 2015 Eric Larson
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
from __future__ import annotations
|
||||
|
||||
from __future__ import division
|
||||
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timezone
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from pip._vendor.cachecontrol.cache import BaseCache
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from redis import Redis
|
||||
|
||||
|
||||
class RedisCache(BaseCache):
|
||||
|
||||
def __init__(self, conn):
|
||||
def __init__(self, conn: Redis[bytes]) -> None:
|
||||
self.conn = conn
|
||||
|
||||
def get(self, key):
|
||||
def get(self, key: str) -> bytes | None:
|
||||
return self.conn.get(key)
|
||||
|
||||
def set(self, key, value, expires=None):
|
||||
def set(
|
||||
self, key: str, value: bytes, expires: int | datetime | None = None
|
||||
) -> None:
|
||||
if not expires:
|
||||
self.conn.set(key, value)
|
||||
elif isinstance(expires, datetime):
|
||||
expires = expires - datetime.utcnow()
|
||||
self.conn.setex(key, int(expires.total_seconds()), value)
|
||||
now_utc = datetime.now(timezone.utc)
|
||||
if expires.tzinfo is None:
|
||||
now_utc = now_utc.replace(tzinfo=None)
|
||||
delta = expires - now_utc
|
||||
self.conn.setex(key, int(delta.total_seconds()), value)
|
||||
else:
|
||||
self.conn.setex(key, expires, value)
|
||||
|
||||
def delete(self, key):
|
||||
def delete(self, key: str) -> None:
|
||||
self.conn.delete(key)
|
||||
|
||||
def clear(self):
|
||||
def clear(self) -> None:
|
||||
"""Helper for clearing all the keys in a database. Use with
|
||||
caution!"""
|
||||
for key in self.conn.keys():
|
||||
self.conn.delete(key)
|
||||
|
||||
def close(self):
|
||||
def close(self) -> None:
|
||||
"""Redis uses connection pooling, no need to close the connection."""
|
||||
pass
|
||||
|
|
|
@ -1,32 +0,0 @@
|
|||
# SPDX-FileCopyrightText: 2015 Eric Larson
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
try:
|
||||
from urllib.parse import urljoin
|
||||
except ImportError:
|
||||
from urlparse import urljoin
|
||||
|
||||
|
||||
try:
|
||||
import cPickle as pickle
|
||||
except ImportError:
|
||||
import pickle
|
||||
|
||||
# Handle the case where the requests module has been patched to not have
|
||||
# urllib3 bundled as part of its source.
|
||||
try:
|
||||
from pip._vendor.requests.packages.urllib3.response import HTTPResponse
|
||||
except ImportError:
|
||||
from pip._vendor.urllib3.response import HTTPResponse
|
||||
|
||||
try:
|
||||
from pip._vendor.requests.packages.urllib3.util import is_fp_closed
|
||||
except ImportError:
|
||||
from pip._vendor.urllib3.util import is_fp_closed
|
||||
|
||||
# Replicate some six behaviour
|
||||
try:
|
||||
text_type = unicode
|
||||
except NameError:
|
||||
text_type = str
|
|
@ -5,17 +5,27 @@
|
|||
"""
|
||||
The httplib2 algorithms ported for use with requests.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import calendar
|
||||
import logging
|
||||
import re
|
||||
import calendar
|
||||
import time
|
||||
from email.utils import parsedate_tz
|
||||
from typing import TYPE_CHECKING, Collection, Mapping
|
||||
|
||||
from pip._vendor.requests.structures import CaseInsensitiveDict
|
||||
|
||||
from .cache import DictCache, SeparateBodyBaseCache
|
||||
from .serialize import Serializer
|
||||
from pip._vendor.cachecontrol.cache import DictCache, SeparateBodyBaseCache
|
||||
from pip._vendor.cachecontrol.serialize import Serializer
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Literal
|
||||
|
||||
from pip._vendor.requests import PreparedRequest
|
||||
from pip._vendor.urllib3 import HTTPResponse
|
||||
|
||||
from pip._vendor.cachecontrol.cache import BaseCache
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
@ -24,20 +34,26 @@ URI = re.compile(r"^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?")
|
|||
PERMANENT_REDIRECT_STATUSES = (301, 308)
|
||||
|
||||
|
||||
def parse_uri(uri):
|
||||
def parse_uri(uri: str) -> tuple[str, str, str, str, str]:
|
||||
"""Parses a URI using the regex given in Appendix B of RFC 3986.
|
||||
|
||||
(scheme, authority, path, query, fragment) = parse_uri(uri)
|
||||
"""
|
||||
groups = URI.match(uri).groups()
|
||||
match = URI.match(uri)
|
||||
assert match is not None
|
||||
groups = match.groups()
|
||||
return (groups[1], groups[3], groups[4], groups[6], groups[8])
|
||||
|
||||
|
||||
class CacheController(object):
|
||||
class CacheController:
|
||||
"""An interface to see if request should cached or not."""
|
||||
|
||||
def __init__(
|
||||
self, cache=None, cache_etags=True, serializer=None, status_codes=None
|
||||
self,
|
||||
cache: BaseCache | None = None,
|
||||
cache_etags: bool = True,
|
||||
serializer: Serializer | None = None,
|
||||
status_codes: Collection[int] | None = None,
|
||||
):
|
||||
self.cache = DictCache() if cache is None else cache
|
||||
self.cache_etags = cache_etags
|
||||
|
@ -45,7 +61,7 @@ class CacheController(object):
|
|||
self.cacheable_status_codes = status_codes or (200, 203, 300, 301, 308)
|
||||
|
||||
@classmethod
|
||||
def _urlnorm(cls, uri):
|
||||
def _urlnorm(cls, uri: str) -> str:
|
||||
"""Normalize the URL to create a safe key for the cache"""
|
||||
(scheme, authority, path, query, fragment) = parse_uri(uri)
|
||||
if not scheme or not authority:
|
||||
|
@ -65,10 +81,10 @@ class CacheController(object):
|
|||
return defrag_uri
|
||||
|
||||
@classmethod
|
||||
def cache_url(cls, uri):
|
||||
def cache_url(cls, uri: str) -> str:
|
||||
return cls._urlnorm(uri)
|
||||
|
||||
def parse_cache_control(self, headers):
|
||||
def parse_cache_control(self, headers: Mapping[str, str]) -> dict[str, int | None]:
|
||||
known_directives = {
|
||||
# https://tools.ietf.org/html/rfc7234#section-5.2
|
||||
"max-age": (int, True),
|
||||
|
@ -87,7 +103,7 @@ class CacheController(object):
|
|||
|
||||
cc_headers = headers.get("cache-control", headers.get("Cache-Control", ""))
|
||||
|
||||
retval = {}
|
||||
retval: dict[str, int | None] = {}
|
||||
|
||||
for cc_directive in cc_headers.split(","):
|
||||
if not cc_directive.strip():
|
||||
|
@ -122,11 +138,33 @@ class CacheController(object):
|
|||
|
||||
return retval
|
||||
|
||||
def cached_request(self, request):
|
||||
def _load_from_cache(self, request: PreparedRequest) -> HTTPResponse | None:
|
||||
"""
|
||||
Load a cached response, or return None if it's not available.
|
||||
"""
|
||||
cache_url = request.url
|
||||
assert cache_url is not None
|
||||
cache_data = self.cache.get(cache_url)
|
||||
if cache_data is None:
|
||||
logger.debug("No cache entry available")
|
||||
return None
|
||||
|
||||
if isinstance(self.cache, SeparateBodyBaseCache):
|
||||
body_file = self.cache.get_body(cache_url)
|
||||
else:
|
||||
body_file = None
|
||||
|
||||
result = self.serializer.loads(request, cache_data, body_file)
|
||||
if result is None:
|
||||
logger.warning("Cache entry deserialization failed, entry ignored")
|
||||
return result
|
||||
|
||||
def cached_request(self, request: PreparedRequest) -> HTTPResponse | Literal[False]:
|
||||
"""
|
||||
Return a cached response if it exists in the cache, otherwise
|
||||
return False.
|
||||
"""
|
||||
assert request.url is not None
|
||||
cache_url = self.cache_url(request.url)
|
||||
logger.debug('Looking up "%s" in the cache', cache_url)
|
||||
cc = self.parse_cache_control(request.headers)
|
||||
|
@ -140,21 +178,9 @@ class CacheController(object):
|
|||
logger.debug('Request header has "max_age" as 0, cache bypassed')
|
||||
return False
|
||||
|
||||
# Request allows serving from the cache, let's see if we find something
|
||||
cache_data = self.cache.get(cache_url)
|
||||
if cache_data is None:
|
||||
logger.debug("No cache entry available")
|
||||
return False
|
||||
|
||||
if isinstance(self.cache, SeparateBodyBaseCache):
|
||||
body_file = self.cache.get_body(cache_url)
|
||||
else:
|
||||
body_file = None
|
||||
|
||||
# Check whether it can be deserialized
|
||||
resp = self.serializer.loads(request, cache_data, body_file)
|
||||
# Check whether we can load the response from the cache:
|
||||
resp = self._load_from_cache(request)
|
||||
if not resp:
|
||||
logger.warning("Cache entry deserialization failed, entry ignored")
|
||||
return False
|
||||
|
||||
# If we have a cached permanent redirect, return it immediately. We
|
||||
|
@ -174,7 +200,7 @@ class CacheController(object):
|
|||
logger.debug(msg)
|
||||
return resp
|
||||
|
||||
headers = CaseInsensitiveDict(resp.headers)
|
||||
headers: CaseInsensitiveDict[str] = CaseInsensitiveDict(resp.headers)
|
||||
if not headers or "date" not in headers:
|
||||
if "etag" not in headers:
|
||||
# Without date or etag, the cached response can never be used
|
||||
|
@ -185,7 +211,9 @@ class CacheController(object):
|
|||
return False
|
||||
|
||||
now = time.time()
|
||||
date = calendar.timegm(parsedate_tz(headers["date"]))
|
||||
time_tuple = parsedate_tz(headers["date"])
|
||||
assert time_tuple is not None
|
||||
date = calendar.timegm(time_tuple[:6])
|
||||
current_age = max(0, now - date)
|
||||
logger.debug("Current age based on date: %i", current_age)
|
||||
|
||||
|
@ -199,28 +227,30 @@ class CacheController(object):
|
|||
freshness_lifetime = 0
|
||||
|
||||
# Check the max-age pragma in the cache control header
|
||||
if "max-age" in resp_cc:
|
||||
freshness_lifetime = resp_cc["max-age"]
|
||||
max_age = resp_cc.get("max-age")
|
||||
if max_age is not None:
|
||||
freshness_lifetime = max_age
|
||||
logger.debug("Freshness lifetime from max-age: %i", freshness_lifetime)
|
||||
|
||||
# If there isn't a max-age, check for an expires header
|
||||
elif "expires" in headers:
|
||||
expires = parsedate_tz(headers["expires"])
|
||||
if expires is not None:
|
||||
expire_time = calendar.timegm(expires) - date
|
||||
expire_time = calendar.timegm(expires[:6]) - date
|
||||
freshness_lifetime = max(0, expire_time)
|
||||
logger.debug("Freshness lifetime from expires: %i", freshness_lifetime)
|
||||
|
||||
# Determine if we are setting freshness limit in the
|
||||
# request. Note, this overrides what was in the response.
|
||||
if "max-age" in cc:
|
||||
freshness_lifetime = cc["max-age"]
|
||||
max_age = cc.get("max-age")
|
||||
if max_age is not None:
|
||||
freshness_lifetime = max_age
|
||||
logger.debug(
|
||||
"Freshness lifetime from request max-age: %i", freshness_lifetime
|
||||
)
|
||||
|
||||
if "min-fresh" in cc:
|
||||
min_fresh = cc["min-fresh"]
|
||||
min_fresh = cc.get("min-fresh")
|
||||
if min_fresh is not None:
|
||||
# adjust our current age by our min fresh
|
||||
current_age += min_fresh
|
||||
logger.debug("Adjusted current age from min-fresh: %i", current_age)
|
||||
|
@ -239,13 +269,12 @@ class CacheController(object):
|
|||
# return the original handler
|
||||
return False
|
||||
|
||||
def conditional_headers(self, request):
|
||||
cache_url = self.cache_url(request.url)
|
||||
resp = self.serializer.loads(request, self.cache.get(cache_url))
|
||||
def conditional_headers(self, request: PreparedRequest) -> dict[str, str]:
|
||||
resp = self._load_from_cache(request)
|
||||
new_headers = {}
|
||||
|
||||
if resp:
|
||||
headers = CaseInsensitiveDict(resp.headers)
|
||||
headers: CaseInsensitiveDict[str] = CaseInsensitiveDict(resp.headers)
|
||||
|
||||
if "etag" in headers:
|
||||
new_headers["If-None-Match"] = headers["ETag"]
|
||||
|
@ -255,7 +284,14 @@ class CacheController(object):
|
|||
|
||||
return new_headers
|
||||
|
||||
def _cache_set(self, cache_url, request, response, body=None, expires_time=None):
|
||||
def _cache_set(
|
||||
self,
|
||||
cache_url: str,
|
||||
request: PreparedRequest,
|
||||
response: HTTPResponse,
|
||||
body: bytes | None = None,
|
||||
expires_time: int | None = None,
|
||||
) -> None:
|
||||
"""
|
||||
Store the data in the cache.
|
||||
"""
|
||||
|
@ -267,6 +303,9 @@ class CacheController(object):
|
|||
self.serializer.dumps(request, response, b""),
|
||||
expires=expires_time,
|
||||
)
|
||||
# body is None can happen when, for example, we're only updating
|
||||
# headers, as is the case in update_cached_response().
|
||||
if body is not None:
|
||||
self.cache.set_body(cache_url, body)
|
||||
else:
|
||||
self.cache.set(
|
||||
|
@ -275,7 +314,13 @@ class CacheController(object):
|
|||
expires=expires_time,
|
||||
)
|
||||
|
||||
def cache_response(self, request, response, body=None, status_codes=None):
|
||||
def cache_response(
|
||||
self,
|
||||
request: PreparedRequest,
|
||||
response: HTTPResponse,
|
||||
body: bytes | None = None,
|
||||
status_codes: Collection[int] | None = None,
|
||||
) -> None:
|
||||
"""
|
||||
Algorithm for caching requests.
|
||||
|
||||
|
@ -290,10 +335,14 @@ class CacheController(object):
|
|||
)
|
||||
return
|
||||
|
||||
response_headers = CaseInsensitiveDict(response.headers)
|
||||
response_headers: CaseInsensitiveDict[str] = CaseInsensitiveDict(
|
||||
response.headers
|
||||
)
|
||||
|
||||
if "date" in response_headers:
|
||||
date = calendar.timegm(parsedate_tz(response_headers["date"]))
|
||||
time_tuple = parsedate_tz(response_headers["date"])
|
||||
assert time_tuple is not None
|
||||
date = calendar.timegm(time_tuple[:6])
|
||||
else:
|
||||
date = 0
|
||||
|
||||
|
@ -312,6 +361,7 @@ class CacheController(object):
|
|||
cc_req = self.parse_cache_control(request.headers)
|
||||
cc = self.parse_cache_control(response_headers)
|
||||
|
||||
assert request.url is not None
|
||||
cache_url = self.cache_url(request.url)
|
||||
logger.debug('Updating cache with response from "%s"', cache_url)
|
||||
|
||||
|
@ -344,11 +394,11 @@ class CacheController(object):
|
|||
if response_headers.get("expires"):
|
||||
expires = parsedate_tz(response_headers["expires"])
|
||||
if expires is not None:
|
||||
expires_time = calendar.timegm(expires) - date
|
||||
expires_time = calendar.timegm(expires[:6]) - date
|
||||
|
||||
expires_time = max(expires_time, 14 * 86400)
|
||||
|
||||
logger.debug("etag object cached for {0} seconds".format(expires_time))
|
||||
logger.debug(f"etag object cached for {expires_time} seconds")
|
||||
logger.debug("Caching due to etag")
|
||||
self._cache_set(cache_url, request, response, body, expires_time)
|
||||
|
||||
|
@ -362,11 +412,14 @@ class CacheController(object):
|
|||
# is no date header then we can't do anything about expiring
|
||||
# the cache.
|
||||
elif "date" in response_headers:
|
||||
date = calendar.timegm(parsedate_tz(response_headers["date"]))
|
||||
time_tuple = parsedate_tz(response_headers["date"])
|
||||
assert time_tuple is not None
|
||||
date = calendar.timegm(time_tuple[:6])
|
||||
# cache when there is a max-age > 0
|
||||
if "max-age" in cc and cc["max-age"] > 0:
|
||||
max_age = cc.get("max-age")
|
||||
if max_age is not None and max_age > 0:
|
||||
logger.debug("Caching b/c date exists and max-age > 0")
|
||||
expires_time = cc["max-age"]
|
||||
expires_time = max_age
|
||||
self._cache_set(
|
||||
cache_url,
|
||||
request,
|
||||
|
@ -381,12 +434,12 @@ class CacheController(object):
|
|||
if response_headers["expires"]:
|
||||
expires = parsedate_tz(response_headers["expires"])
|
||||
if expires is not None:
|
||||
expires_time = calendar.timegm(expires) - date
|
||||
expires_time = calendar.timegm(expires[:6]) - date
|
||||
else:
|
||||
expires_time = None
|
||||
|
||||
logger.debug(
|
||||
"Caching b/c of expires header. expires in {0} seconds".format(
|
||||
"Caching b/c of expires header. expires in {} seconds".format(
|
||||
expires_time
|
||||
)
|
||||
)
|
||||
|
@ -398,16 +451,18 @@ class CacheController(object):
|
|||
expires_time,
|
||||
)
|
||||
|
||||
def update_cached_response(self, request, response):
|
||||
def update_cached_response(
|
||||
self, request: PreparedRequest, response: HTTPResponse
|
||||
) -> HTTPResponse:
|
||||
"""On a 304 we will get a new set of headers that we want to
|
||||
update our cached value with, assuming we have one.
|
||||
|
||||
This should only ever be called when we've sent an ETag and
|
||||
gotten a 304 as the response.
|
||||
"""
|
||||
assert request.url is not None
|
||||
cache_url = self.cache_url(request.url)
|
||||
|
||||
cached_response = self.serializer.loads(request, self.cache.get(cache_url))
|
||||
cached_response = self._load_from_cache(request)
|
||||
|
||||
if not cached_response:
|
||||
# we didn't have a cached response
|
||||
|
@ -423,11 +478,11 @@ class CacheController(object):
|
|||
excluded_headers = ["content-length"]
|
||||
|
||||
cached_response.headers.update(
|
||||
dict(
|
||||
(k, v)
|
||||
for k, v in response.headers.items()
|
||||
{
|
||||
k: v
|
||||
for k, v in response.headers.items() # type: ignore[no-untyped-call]
|
||||
if k.lower() not in excluded_headers
|
||||
)
|
||||
}
|
||||
)
|
||||
|
||||
# we want a 200 b/c we have content via the cache
|
||||
|
|
|
@ -1,12 +1,17 @@
|
|||
# SPDX-FileCopyrightText: 2015 Eric Larson
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
from __future__ import annotations
|
||||
|
||||
from tempfile import NamedTemporaryFile
|
||||
import mmap
|
||||
from tempfile import NamedTemporaryFile
|
||||
from typing import TYPE_CHECKING, Any, Callable
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from http.client import HTTPResponse
|
||||
|
||||
|
||||
class CallbackFileWrapper(object):
|
||||
class CallbackFileWrapper:
|
||||
"""
|
||||
Small wrapper around a fp object which will tee everything read into a
|
||||
buffer, and when that file is closed it will execute a callback with the
|
||||
|
@ -25,12 +30,14 @@ class CallbackFileWrapper(object):
|
|||
performance impact.
|
||||
"""
|
||||
|
||||
def __init__(self, fp, callback):
|
||||
def __init__(
|
||||
self, fp: HTTPResponse, callback: Callable[[bytes], None] | None
|
||||
) -> None:
|
||||
self.__buf = NamedTemporaryFile("rb+", delete=True)
|
||||
self.__fp = fp
|
||||
self.__callback = callback
|
||||
|
||||
def __getattr__(self, name):
|
||||
def __getattr__(self, name: str) -> Any:
|
||||
# The vaguaries of garbage collection means that self.__fp is
|
||||
# not always set. By using __getattribute__ and the private
|
||||
# name[0] allows looking up the attribute value and raising an
|
||||
|
@ -42,7 +49,7 @@ class CallbackFileWrapper(object):
|
|||
fp = self.__getattribute__("_CallbackFileWrapper__fp")
|
||||
return getattr(fp, name)
|
||||
|
||||
def __is_fp_closed(self):
|
||||
def __is_fp_closed(self) -> bool:
|
||||
try:
|
||||
return self.__fp.fp is None
|
||||
|
||||
|
@ -50,7 +57,8 @@ class CallbackFileWrapper(object):
|
|||
pass
|
||||
|
||||
try:
|
||||
return self.__fp.closed
|
||||
closed: bool = self.__fp.closed
|
||||
return closed
|
||||
|
||||
except AttributeError:
|
||||
pass
|
||||
|
@ -59,7 +67,7 @@ class CallbackFileWrapper(object):
|
|||
# TODO: Add some logging here...
|
||||
return False
|
||||
|
||||
def _close(self):
|
||||
def _close(self) -> None:
|
||||
if self.__callback:
|
||||
if self.__buf.tell() == 0:
|
||||
# Empty file:
|
||||
|
@ -86,8 +94,8 @@ class CallbackFileWrapper(object):
|
|||
# Important when caching big files.
|
||||
self.__buf.close()
|
||||
|
||||
def read(self, amt=None):
|
||||
data = self.__fp.read(amt)
|
||||
def read(self, amt: int | None = None) -> bytes:
|
||||
data: bytes = self.__fp.read(amt)
|
||||
if data:
|
||||
# We may be dealing with b'', a sign that things are over:
|
||||
# it's passed e.g. after we've already closed self.__buf.
|
||||
|
@ -97,8 +105,8 @@ class CallbackFileWrapper(object):
|
|||
|
||||
return data
|
||||
|
||||
def _safe_read(self, amt):
|
||||
data = self.__fp._safe_read(amt)
|
||||
def _safe_read(self, amt: int) -> bytes:
|
||||
data: bytes = self.__fp._safe_read(amt) # type: ignore[attr-defined]
|
||||
if amt == 2 and data == b"\r\n":
|
||||
# urllib executes this read to toss the CRLF at the end
|
||||
# of the chunk.
|
||||
|
|
|
@ -1,29 +1,31 @@
|
|||
# SPDX-FileCopyrightText: 2015 Eric Larson
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
from __future__ import annotations
|
||||
|
||||
import calendar
|
||||
import time
|
||||
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from email.utils import formatdate, parsedate, parsedate_tz
|
||||
from typing import TYPE_CHECKING, Any, Mapping
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
if TYPE_CHECKING:
|
||||
from pip._vendor.urllib3 import HTTPResponse
|
||||
|
||||
TIME_FMT = "%a, %d %b %Y %H:%M:%S GMT"
|
||||
|
||||
|
||||
def expire_after(delta, date=None):
|
||||
date = date or datetime.utcnow()
|
||||
def expire_after(delta: timedelta, date: datetime | None = None) -> datetime:
|
||||
date = date or datetime.now(timezone.utc)
|
||||
return date + delta
|
||||
|
||||
|
||||
def datetime_to_header(dt):
|
||||
def datetime_to_header(dt: datetime) -> str:
|
||||
return formatdate(calendar.timegm(dt.timetuple()))
|
||||
|
||||
|
||||
class BaseHeuristic(object):
|
||||
|
||||
def warning(self, response):
|
||||
class BaseHeuristic:
|
||||
def warning(self, response: HTTPResponse) -> str | None:
|
||||
"""
|
||||
Return a valid 1xx warning header value describing the cache
|
||||
adjustments.
|
||||
|
@ -34,7 +36,7 @@ class BaseHeuristic(object):
|
|||
"""
|
||||
return '110 - "Response is Stale"'
|
||||
|
||||
def update_headers(self, response):
|
||||
def update_headers(self, response: HTTPResponse) -> dict[str, str]:
|
||||
"""Update the response headers with any new headers.
|
||||
|
||||
NOTE: This SHOULD always include some Warning header to
|
||||
|
@ -43,7 +45,7 @@ class BaseHeuristic(object):
|
|||
"""
|
||||
return {}
|
||||
|
||||
def apply(self, response):
|
||||
def apply(self, response: HTTPResponse) -> HTTPResponse:
|
||||
updated_headers = self.update_headers(response)
|
||||
|
||||
if updated_headers:
|
||||
|
@ -61,12 +63,12 @@ class OneDayCache(BaseHeuristic):
|
|||
future.
|
||||
"""
|
||||
|
||||
def update_headers(self, response):
|
||||
def update_headers(self, response: HTTPResponse) -> dict[str, str]:
|
||||
headers = {}
|
||||
|
||||
if "expires" not in response.headers:
|
||||
date = parsedate(response.headers["date"])
|
||||
expires = expire_after(timedelta(days=1), date=datetime(*date[:6]))
|
||||
expires = expire_after(timedelta(days=1), date=datetime(*date[:6], tzinfo=timezone.utc)) # type: ignore[misc]
|
||||
headers["expires"] = datetime_to_header(expires)
|
||||
headers["cache-control"] = "public"
|
||||
return headers
|
||||
|
@ -77,14 +79,14 @@ class ExpiresAfter(BaseHeuristic):
|
|||
Cache **all** requests for a defined time period.
|
||||
"""
|
||||
|
||||
def __init__(self, **kw):
|
||||
def __init__(self, **kw: Any) -> None:
|
||||
self.delta = timedelta(**kw)
|
||||
|
||||
def update_headers(self, response):
|
||||
def update_headers(self, response: HTTPResponse) -> dict[str, str]:
|
||||
expires = expire_after(self.delta)
|
||||
return {"expires": datetime_to_header(expires), "cache-control": "public"}
|
||||
|
||||
def warning(self, response):
|
||||
def warning(self, response: HTTPResponse) -> str | None:
|
||||
tmpl = "110 - Automatically cached for %s. Response might be stale"
|
||||
return tmpl % self.delta
|
||||
|
||||
|
@ -101,12 +103,23 @@ class LastModified(BaseHeuristic):
|
|||
http://lxr.mozilla.org/mozilla-release/source/netwerk/protocol/http/nsHttpResponseHead.cpp#397
|
||||
Unlike mozilla we limit this to 24-hr.
|
||||
"""
|
||||
|
||||
cacheable_by_default_statuses = {
|
||||
200, 203, 204, 206, 300, 301, 404, 405, 410, 414, 501
|
||||
200,
|
||||
203,
|
||||
204,
|
||||
206,
|
||||
300,
|
||||
301,
|
||||
404,
|
||||
405,
|
||||
410,
|
||||
414,
|
||||
501,
|
||||
}
|
||||
|
||||
def update_headers(self, resp):
|
||||
headers = resp.headers
|
||||
def update_headers(self, resp: HTTPResponse) -> dict[str, str]:
|
||||
headers: Mapping[str, str] = resp.headers
|
||||
|
||||
if "expires" in headers:
|
||||
return {}
|
||||
|
@ -120,9 +133,11 @@ class LastModified(BaseHeuristic):
|
|||
if "date" not in headers or "last-modified" not in headers:
|
||||
return {}
|
||||
|
||||
date = calendar.timegm(parsedate_tz(headers["date"]))
|
||||
time_tuple = parsedate_tz(headers["date"])
|
||||
assert time_tuple is not None
|
||||
date = calendar.timegm(time_tuple[:6])
|
||||
last_modified = parsedate(headers["last-modified"])
|
||||
if date is None or last_modified is None:
|
||||
if last_modified is None:
|
||||
return {}
|
||||
|
||||
now = time.time()
|
||||
|
@ -135,5 +150,5 @@ class LastModified(BaseHeuristic):
|
|||
expires = date + freshness_lifetime
|
||||
return {"expires": time.strftime(TIME_FMT, time.gmtime(expires))}
|
||||
|
||||
def warning(self, resp):
|
||||
def warning(self, resp: HTTPResponse) -> str | None:
|
||||
return None
|
||||
|
|
|
@ -1,78 +1,76 @@
|
|||
# SPDX-FileCopyrightText: 2015 Eric Larson
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
from __future__ import annotations
|
||||
|
||||
import base64
|
||||
import io
|
||||
import json
|
||||
import zlib
|
||||
from typing import IO, TYPE_CHECKING, Any, Mapping, cast
|
||||
|
||||
from pip._vendor import msgpack
|
||||
from pip._vendor.requests.structures import CaseInsensitiveDict
|
||||
from pip._vendor.urllib3 import HTTPResponse
|
||||
|
||||
from .compat import HTTPResponse, pickle, text_type
|
||||
if TYPE_CHECKING:
|
||||
from pip._vendor.requests import PreparedRequest
|
||||
|
||||
|
||||
def _b64_decode_bytes(b):
|
||||
return base64.b64decode(b.encode("ascii"))
|
||||
class Serializer:
|
||||
serde_version = "4"
|
||||
|
||||
|
||||
def _b64_decode_str(s):
|
||||
return _b64_decode_bytes(s).decode("utf8")
|
||||
|
||||
|
||||
_default_body_read = object()
|
||||
|
||||
|
||||
class Serializer(object):
|
||||
def dumps(self, request, response, body=None):
|
||||
response_headers = CaseInsensitiveDict(response.headers)
|
||||
def dumps(
|
||||
self,
|
||||
request: PreparedRequest,
|
||||
response: HTTPResponse,
|
||||
body: bytes | None = None,
|
||||
) -> bytes:
|
||||
response_headers: CaseInsensitiveDict[str] = CaseInsensitiveDict(
|
||||
response.headers
|
||||
)
|
||||
|
||||
if body is None:
|
||||
# When a body isn't passed in, we'll read the response. We
|
||||
# also update the response with a new file handler to be
|
||||
# sure it acts as though it was never read.
|
||||
body = response.read(decode_content=False)
|
||||
response._fp = io.BytesIO(body)
|
||||
response._fp = io.BytesIO(body) # type: ignore[attr-defined]
|
||||
response.length_remaining = len(body)
|
||||
|
||||
# NOTE: This is all a bit weird, but it's really important that on
|
||||
# Python 2.x these objects are unicode and not str, even when
|
||||
# they contain only ascii. The problem here is that msgpack
|
||||
# understands the difference between unicode and bytes and we
|
||||
# have it set to differentiate between them, however Python 2
|
||||
# doesn't know the difference. Forcing these to unicode will be
|
||||
# enough to have msgpack know the difference.
|
||||
data = {
|
||||
u"response": {
|
||||
u"body": body, # Empty bytestring if body is stored separately
|
||||
u"headers": dict(
|
||||
(text_type(k), text_type(v)) for k, v in response.headers.items()
|
||||
),
|
||||
u"status": response.status,
|
||||
u"version": response.version,
|
||||
u"reason": text_type(response.reason),
|
||||
u"strict": response.strict,
|
||||
u"decode_content": response.decode_content,
|
||||
"response": {
|
||||
"body": body, # Empty bytestring if body is stored separately
|
||||
"headers": {str(k): str(v) for k, v in response.headers.items()}, # type: ignore[no-untyped-call]
|
||||
"status": response.status,
|
||||
"version": response.version,
|
||||
"reason": str(response.reason),
|
||||
"decode_content": response.decode_content,
|
||||
}
|
||||
}
|
||||
|
||||
# Construct our vary headers
|
||||
data[u"vary"] = {}
|
||||
if u"vary" in response_headers:
|
||||
varied_headers = response_headers[u"vary"].split(",")
|
||||
data["vary"] = {}
|
||||
if "vary" in response_headers:
|
||||
varied_headers = response_headers["vary"].split(",")
|
||||
for header in varied_headers:
|
||||
header = text_type(header).strip()
|
||||
header = str(header).strip()
|
||||
header_value = request.headers.get(header, None)
|
||||
if header_value is not None:
|
||||
header_value = text_type(header_value)
|
||||
data[u"vary"][header] = header_value
|
||||
header_value = str(header_value)
|
||||
data["vary"][header] = header_value
|
||||
|
||||
return b",".join([b"cc=4", msgpack.dumps(data, use_bin_type=True)])
|
||||
return b",".join([f"cc={self.serde_version}".encode(), self.serialize(data)])
|
||||
|
||||
def loads(self, request, data, body_file=None):
|
||||
def serialize(self, data: dict[str, Any]) -> bytes:
|
||||
return cast(bytes, msgpack.dumps(data, use_bin_type=True))
|
||||
|
||||
def loads(
|
||||
self,
|
||||
request: PreparedRequest,
|
||||
data: bytes,
|
||||
body_file: IO[bytes] | None = None,
|
||||
) -> HTTPResponse | None:
|
||||
# Short circuit if we've been given an empty set of data
|
||||
if not data:
|
||||
return
|
||||
return None
|
||||
|
||||
# Determine what version of the serializer the data was serialized
|
||||
# with
|
||||
|
@ -88,18 +86,23 @@ class Serializer(object):
|
|||
ver = b"cc=0"
|
||||
|
||||
# Get the version number out of the cc=N
|
||||
ver = ver.split(b"=", 1)[-1].decode("ascii")
|
||||
verstr = ver.split(b"=", 1)[-1].decode("ascii")
|
||||
|
||||
# Dispatch to the actual load method for the given version
|
||||
try:
|
||||
return getattr(self, "_loads_v{}".format(ver))(request, data, body_file)
|
||||
return getattr(self, f"_loads_v{verstr}")(request, data, body_file) # type: ignore[no-any-return]
|
||||
|
||||
except AttributeError:
|
||||
# This is a version we don't have a loads function for, so we'll
|
||||
# just treat it as a miss and return None
|
||||
return
|
||||
return None
|
||||
|
||||
def prepare_response(self, request, cached, body_file=None):
|
||||
def prepare_response(
|
||||
self,
|
||||
request: PreparedRequest,
|
||||
cached: Mapping[str, Any],
|
||||
body_file: IO[bytes] | None = None,
|
||||
) -> HTTPResponse | None:
|
||||
"""Verify our vary headers match and construct a real urllib3
|
||||
HTTPResponse object.
|
||||
"""
|
||||
|
@ -108,23 +111,26 @@ class Serializer(object):
|
|||
# This case is also handled in the controller code when creating
|
||||
# a cache entry, but is left here for backwards compatibility.
|
||||
if "*" in cached.get("vary", {}):
|
||||
return
|
||||
return None
|
||||
|
||||
# Ensure that the Vary headers for the cached response match our
|
||||
# request
|
||||
for header, value in cached.get("vary", {}).items():
|
||||
if request.headers.get(header, None) != value:
|
||||
return
|
||||
return None
|
||||
|
||||
body_raw = cached["response"].pop("body")
|
||||
|
||||
headers = CaseInsensitiveDict(data=cached["response"]["headers"])
|
||||
headers: CaseInsensitiveDict[str] = CaseInsensitiveDict(
|
||||
data=cached["response"]["headers"]
|
||||
)
|
||||
if headers.get("transfer-encoding", "") == "chunked":
|
||||
headers.pop("transfer-encoding")
|
||||
|
||||
cached["response"]["headers"] = headers
|
||||
|
||||
try:
|
||||
body: IO[bytes]
|
||||
if body_file is None:
|
||||
body = io.BytesIO(body_raw)
|
||||
else:
|
||||
|
@ -138,53 +144,63 @@ class Serializer(object):
|
|||
# TypeError: 'str' does not support the buffer interface
|
||||
body = io.BytesIO(body_raw.encode("utf8"))
|
||||
|
||||
# Discard any `strict` parameter serialized by older version of cachecontrol.
|
||||
cached["response"].pop("strict", None)
|
||||
|
||||
return HTTPResponse(body=body, preload_content=False, **cached["response"])
|
||||
|
||||
def _loads_v0(self, request, data, body_file=None):
|
||||
def _loads_v0(
|
||||
self,
|
||||
request: PreparedRequest,
|
||||
data: bytes,
|
||||
body_file: IO[bytes] | None = None,
|
||||
) -> None:
|
||||
# The original legacy cache data. This doesn't contain enough
|
||||
# information to construct everything we need, so we'll treat this as
|
||||
# a miss.
|
||||
return
|
||||
return None
|
||||
|
||||
def _loads_v1(self, request, data, body_file=None):
|
||||
try:
|
||||
cached = pickle.loads(data)
|
||||
except ValueError:
|
||||
return
|
||||
def _loads_v1(
|
||||
self,
|
||||
request: PreparedRequest,
|
||||
data: bytes,
|
||||
body_file: IO[bytes] | None = None,
|
||||
) -> HTTPResponse | None:
|
||||
# The "v1" pickled cache format. This is no longer supported
|
||||
# for security reasons, so we treat it as a miss.
|
||||
return None
|
||||
|
||||
return self.prepare_response(request, cached, body_file)
|
||||
def _loads_v2(
|
||||
self,
|
||||
request: PreparedRequest,
|
||||
data: bytes,
|
||||
body_file: IO[bytes] | None = None,
|
||||
) -> HTTPResponse | None:
|
||||
# The "v2" compressed base64 cache format.
|
||||
# This has been removed due to age and poor size/performance
|
||||
# characteristics, so we treat it as a miss.
|
||||
return None
|
||||
|
||||
def _loads_v2(self, request, data, body_file=None):
|
||||
assert body_file is None
|
||||
try:
|
||||
cached = json.loads(zlib.decompress(data).decode("utf8"))
|
||||
except (ValueError, zlib.error):
|
||||
return
|
||||
|
||||
# We need to decode the items that we've base64 encoded
|
||||
cached["response"]["body"] = _b64_decode_bytes(cached["response"]["body"])
|
||||
cached["response"]["headers"] = dict(
|
||||
(_b64_decode_str(k), _b64_decode_str(v))
|
||||
for k, v in cached["response"]["headers"].items()
|
||||
)
|
||||
cached["response"]["reason"] = _b64_decode_str(cached["response"]["reason"])
|
||||
cached["vary"] = dict(
|
||||
(_b64_decode_str(k), _b64_decode_str(v) if v is not None else v)
|
||||
for k, v in cached["vary"].items()
|
||||
)
|
||||
|
||||
return self.prepare_response(request, cached, body_file)
|
||||
|
||||
def _loads_v3(self, request, data, body_file):
|
||||
def _loads_v3(
|
||||
self,
|
||||
request: PreparedRequest,
|
||||
data: bytes,
|
||||
body_file: IO[bytes] | None = None,
|
||||
) -> None:
|
||||
# Due to Python 2 encoding issues, it's impossible to know for sure
|
||||
# exactly how to load v3 entries, thus we'll treat these as a miss so
|
||||
# that they get rewritten out as v4 entries.
|
||||
return
|
||||
return None
|
||||
|
||||
def _loads_v4(self, request, data, body_file=None):
|
||||
def _loads_v4(
|
||||
self,
|
||||
request: PreparedRequest,
|
||||
data: bytes,
|
||||
body_file: IO[bytes] | None = None,
|
||||
) -> HTTPResponse | None:
|
||||
try:
|
||||
cached = msgpack.loads(data, raw=False)
|
||||
except ValueError:
|
||||
return
|
||||
return None
|
||||
|
||||
return self.prepare_response(request, cached, body_file)
|
||||
|
|
|
@ -1,22 +1,32 @@
|
|||
# SPDX-FileCopyrightText: 2015 Eric Larson
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
from __future__ import annotations
|
||||
|
||||
from .adapter import CacheControlAdapter
|
||||
from .cache import DictCache
|
||||
from typing import TYPE_CHECKING, Collection
|
||||
|
||||
from pip._vendor.cachecontrol.adapter import CacheControlAdapter
|
||||
from pip._vendor.cachecontrol.cache import DictCache
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pip._vendor import requests
|
||||
|
||||
from pip._vendor.cachecontrol.cache import BaseCache
|
||||
from pip._vendor.cachecontrol.controller import CacheController
|
||||
from pip._vendor.cachecontrol.heuristics import BaseHeuristic
|
||||
from pip._vendor.cachecontrol.serialize import Serializer
|
||||
|
||||
|
||||
def CacheControl(
|
||||
sess,
|
||||
cache=None,
|
||||
cache_etags=True,
|
||||
serializer=None,
|
||||
heuristic=None,
|
||||
controller_class=None,
|
||||
adapter_class=None,
|
||||
cacheable_methods=None,
|
||||
):
|
||||
|
||||
sess: requests.Session,
|
||||
cache: BaseCache | None = None,
|
||||
cache_etags: bool = True,
|
||||
serializer: Serializer | None = None,
|
||||
heuristic: BaseHeuristic | None = None,
|
||||
controller_class: type[CacheController] | None = None,
|
||||
adapter_class: type[CacheControlAdapter] | None = None,
|
||||
cacheable_methods: Collection[str] | None = None,
|
||||
) -> requests.Session:
|
||||
cache = DictCache() if cache is None else cache
|
||||
adapter_class = adapter_class or CacheControlAdapter
|
||||
adapter = adapter_class(
|
||||
|
|
|
@ -0,0 +1,21 @@
|
|||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2022 Seth Michael Larson
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
|
@ -0,0 +1,13 @@
|
|||
"""Verify certificates using native system trust stores"""
|
||||
|
||||
import sys as _sys
|
||||
|
||||
if _sys.version_info < (3, 10):
|
||||
raise ImportError("truststore requires Python 3.10 or later")
|
||||
|
||||
from ._api import SSLContext, extract_from_ssl, inject_into_ssl # noqa: E402
|
||||
|
||||
del _api, _sys # type: ignore[name-defined] # noqa: F821
|
||||
|
||||
__all__ = ["SSLContext", "inject_into_ssl", "extract_from_ssl"]
|
||||
__version__ = "0.8.0"
|
|
@ -0,0 +1,302 @@
|
|||
import os
|
||||
import platform
|
||||
import socket
|
||||
import ssl
|
||||
import typing
|
||||
|
||||
import _ssl # type: ignore[import]
|
||||
|
||||
from ._ssl_constants import (
|
||||
_original_SSLContext,
|
||||
_original_super_SSLContext,
|
||||
_truststore_SSLContext_dunder_class,
|
||||
_truststore_SSLContext_super_class,
|
||||
)
|
||||
|
||||
if platform.system() == "Windows":
|
||||
from ._windows import _configure_context, _verify_peercerts_impl
|
||||
elif platform.system() == "Darwin":
|
||||
from ._macos import _configure_context, _verify_peercerts_impl
|
||||
else:
|
||||
from ._openssl import _configure_context, _verify_peercerts_impl
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
from pip._vendor.typing_extensions import Buffer
|
||||
|
||||
# From typeshed/stdlib/ssl.pyi
|
||||
_StrOrBytesPath: typing.TypeAlias = str | bytes | os.PathLike[str] | os.PathLike[bytes]
|
||||
_PasswordType: typing.TypeAlias = str | bytes | typing.Callable[[], str | bytes]
|
||||
|
||||
|
||||
def inject_into_ssl() -> None:
|
||||
"""Injects the :class:`truststore.SSLContext` into the ``ssl``
|
||||
module by replacing :class:`ssl.SSLContext`.
|
||||
"""
|
||||
setattr(ssl, "SSLContext", SSLContext)
|
||||
# urllib3 holds on to its own reference of ssl.SSLContext
|
||||
# so we need to replace that reference too.
|
||||
try:
|
||||
import pip._vendor.urllib3.util.ssl_ as urllib3_ssl
|
||||
|
||||
setattr(urllib3_ssl, "SSLContext", SSLContext)
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
def extract_from_ssl() -> None:
|
||||
"""Restores the :class:`ssl.SSLContext` class to its original state"""
|
||||
setattr(ssl, "SSLContext", _original_SSLContext)
|
||||
try:
|
||||
import pip._vendor.urllib3.util.ssl_ as urllib3_ssl
|
||||
|
||||
urllib3_ssl.SSLContext = _original_SSLContext
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
class SSLContext(_truststore_SSLContext_super_class): # type: ignore[misc]
|
||||
"""SSLContext API that uses system certificates on all platforms"""
|
||||
|
||||
@property # type: ignore[misc]
|
||||
def __class__(self) -> type:
|
||||
# Dirty hack to get around isinstance() checks
|
||||
# for ssl.SSLContext instances in aiohttp/trustme
|
||||
# when using non-CPython implementations.
|
||||
return _truststore_SSLContext_dunder_class or SSLContext
|
||||
|
||||
def __init__(self, protocol: int = None) -> None: # type: ignore[assignment]
|
||||
self._ctx = _original_SSLContext(protocol)
|
||||
|
||||
class TruststoreSSLObject(ssl.SSLObject):
|
||||
# This object exists because wrap_bio() doesn't
|
||||
# immediately do the handshake so we need to do
|
||||
# certificate verifications after SSLObject.do_handshake()
|
||||
|
||||
def do_handshake(self) -> None:
|
||||
ret = super().do_handshake()
|
||||
_verify_peercerts(self, server_hostname=self.server_hostname)
|
||||
return ret
|
||||
|
||||
self._ctx.sslobject_class = TruststoreSSLObject
|
||||
|
||||
def wrap_socket(
|
||||
self,
|
||||
sock: socket.socket,
|
||||
server_side: bool = False,
|
||||
do_handshake_on_connect: bool = True,
|
||||
suppress_ragged_eofs: bool = True,
|
||||
server_hostname: str | None = None,
|
||||
session: ssl.SSLSession | None = None,
|
||||
) -> ssl.SSLSocket:
|
||||
# Use a context manager here because the
|
||||
# inner SSLContext holds on to our state
|
||||
# but also does the actual handshake.
|
||||
with _configure_context(self._ctx):
|
||||
ssl_sock = self._ctx.wrap_socket(
|
||||
sock,
|
||||
server_side=server_side,
|
||||
server_hostname=server_hostname,
|
||||
do_handshake_on_connect=do_handshake_on_connect,
|
||||
suppress_ragged_eofs=suppress_ragged_eofs,
|
||||
session=session,
|
||||
)
|
||||
try:
|
||||
_verify_peercerts(ssl_sock, server_hostname=server_hostname)
|
||||
except Exception:
|
||||
ssl_sock.close()
|
||||
raise
|
||||
return ssl_sock
|
||||
|
||||
def wrap_bio(
|
||||
self,
|
||||
incoming: ssl.MemoryBIO,
|
||||
outgoing: ssl.MemoryBIO,
|
||||
server_side: bool = False,
|
||||
server_hostname: str | None = None,
|
||||
session: ssl.SSLSession | None = None,
|
||||
) -> ssl.SSLObject:
|
||||
with _configure_context(self._ctx):
|
||||
ssl_obj = self._ctx.wrap_bio(
|
||||
incoming,
|
||||
outgoing,
|
||||
server_hostname=server_hostname,
|
||||
server_side=server_side,
|
||||
session=session,
|
||||
)
|
||||
return ssl_obj
|
||||
|
||||
def load_verify_locations(
|
||||
self,
|
||||
cafile: str | bytes | os.PathLike[str] | os.PathLike[bytes] | None = None,
|
||||
capath: str | bytes | os.PathLike[str] | os.PathLike[bytes] | None = None,
|
||||
cadata: typing.Union[str, "Buffer", None] = None,
|
||||
) -> None:
|
||||
return self._ctx.load_verify_locations(
|
||||
cafile=cafile, capath=capath, cadata=cadata
|
||||
)
|
||||
|
||||
def load_cert_chain(
|
||||
self,
|
||||
certfile: _StrOrBytesPath,
|
||||
keyfile: _StrOrBytesPath | None = None,
|
||||
password: _PasswordType | None = None,
|
||||
) -> None:
|
||||
return self._ctx.load_cert_chain(
|
||||
certfile=certfile, keyfile=keyfile, password=password
|
||||
)
|
||||
|
||||
def load_default_certs(
|
||||
self, purpose: ssl.Purpose = ssl.Purpose.SERVER_AUTH
|
||||
) -> None:
|
||||
return self._ctx.load_default_certs(purpose)
|
||||
|
||||
def set_alpn_protocols(self, alpn_protocols: typing.Iterable[str]) -> None:
|
||||
return self._ctx.set_alpn_protocols(alpn_protocols)
|
||||
|
||||
def set_npn_protocols(self, npn_protocols: typing.Iterable[str]) -> None:
|
||||
return self._ctx.set_npn_protocols(npn_protocols)
|
||||
|
||||
def set_ciphers(self, __cipherlist: str) -> None:
|
||||
return self._ctx.set_ciphers(__cipherlist)
|
||||
|
||||
def get_ciphers(self) -> typing.Any:
|
||||
return self._ctx.get_ciphers()
|
||||
|
||||
def session_stats(self) -> dict[str, int]:
|
||||
return self._ctx.session_stats()
|
||||
|
||||
def cert_store_stats(self) -> dict[str, int]:
|
||||
raise NotImplementedError()
|
||||
|
||||
@typing.overload
|
||||
def get_ca_certs(
|
||||
self, binary_form: typing.Literal[False] = ...
|
||||
) -> list[typing.Any]:
|
||||
...
|
||||
|
||||
@typing.overload
|
||||
def get_ca_certs(self, binary_form: typing.Literal[True] = ...) -> list[bytes]:
|
||||
...
|
||||
|
||||
@typing.overload
|
||||
def get_ca_certs(self, binary_form: bool = ...) -> typing.Any:
|
||||
...
|
||||
|
||||
def get_ca_certs(self, binary_form: bool = False) -> list[typing.Any] | list[bytes]:
|
||||
raise NotImplementedError()
|
||||
|
||||
@property
|
||||
def check_hostname(self) -> bool:
|
||||
return self._ctx.check_hostname
|
||||
|
||||
@check_hostname.setter
|
||||
def check_hostname(self, value: bool) -> None:
|
||||
self._ctx.check_hostname = value
|
||||
|
||||
@property
|
||||
def hostname_checks_common_name(self) -> bool:
|
||||
return self._ctx.hostname_checks_common_name
|
||||
|
||||
@hostname_checks_common_name.setter
|
||||
def hostname_checks_common_name(self, value: bool) -> None:
|
||||
self._ctx.hostname_checks_common_name = value
|
||||
|
||||
@property
|
||||
def keylog_filename(self) -> str:
|
||||
return self._ctx.keylog_filename
|
||||
|
||||
@keylog_filename.setter
|
||||
def keylog_filename(self, value: str) -> None:
|
||||
self._ctx.keylog_filename = value
|
||||
|
||||
@property
|
||||
def maximum_version(self) -> ssl.TLSVersion:
|
||||
return self._ctx.maximum_version
|
||||
|
||||
@maximum_version.setter
|
||||
def maximum_version(self, value: ssl.TLSVersion) -> None:
|
||||
_original_super_SSLContext.maximum_version.__set__( # type: ignore[attr-defined]
|
||||
self._ctx, value
|
||||
)
|
||||
|
||||
@property
|
||||
def minimum_version(self) -> ssl.TLSVersion:
|
||||
return self._ctx.minimum_version
|
||||
|
||||
@minimum_version.setter
|
||||
def minimum_version(self, value: ssl.TLSVersion) -> None:
|
||||
_original_super_SSLContext.minimum_version.__set__( # type: ignore[attr-defined]
|
||||
self._ctx, value
|
||||
)
|
||||
|
||||
@property
|
||||
def options(self) -> ssl.Options:
|
||||
return self._ctx.options
|
||||
|
||||
@options.setter
|
||||
def options(self, value: ssl.Options) -> None:
|
||||
_original_super_SSLContext.options.__set__( # type: ignore[attr-defined]
|
||||
self._ctx, value
|
||||
)
|
||||
|
||||
@property
|
||||
def post_handshake_auth(self) -> bool:
|
||||
return self._ctx.post_handshake_auth
|
||||
|
||||
@post_handshake_auth.setter
|
||||
def post_handshake_auth(self, value: bool) -> None:
|
||||
self._ctx.post_handshake_auth = value
|
||||
|
||||
@property
|
||||
def protocol(self) -> ssl._SSLMethod:
|
||||
return self._ctx.protocol
|
||||
|
||||
@property
|
||||
def security_level(self) -> int:
|
||||
return self._ctx.security_level
|
||||
|
||||
@property
|
||||
def verify_flags(self) -> ssl.VerifyFlags:
|
||||
return self._ctx.verify_flags
|
||||
|
||||
@verify_flags.setter
|
||||
def verify_flags(self, value: ssl.VerifyFlags) -> None:
|
||||
_original_super_SSLContext.verify_flags.__set__( # type: ignore[attr-defined]
|
||||
self._ctx, value
|
||||
)
|
||||
|
||||
@property
|
||||
def verify_mode(self) -> ssl.VerifyMode:
|
||||
return self._ctx.verify_mode
|
||||
|
||||
@verify_mode.setter
|
||||
def verify_mode(self, value: ssl.VerifyMode) -> None:
|
||||
_original_super_SSLContext.verify_mode.__set__( # type: ignore[attr-defined]
|
||||
self._ctx, value
|
||||
)
|
||||
|
||||
|
||||
def _verify_peercerts(
|
||||
sock_or_sslobj: ssl.SSLSocket | ssl.SSLObject, server_hostname: str | None
|
||||
) -> None:
|
||||
"""
|
||||
Verifies the peer certificates from an SSLSocket or SSLObject
|
||||
against the certificates in the OS trust store.
|
||||
"""
|
||||
sslobj: ssl.SSLObject = sock_or_sslobj # type: ignore[assignment]
|
||||
try:
|
||||
while not hasattr(sslobj, "get_unverified_chain"):
|
||||
sslobj = sslobj._sslobj # type: ignore[attr-defined]
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
# SSLObject.get_unverified_chain() returns 'None'
|
||||
# if the peer sends no certificates. This is common
|
||||
# for the server-side scenario.
|
||||
unverified_chain: typing.Sequence[_ssl.Certificate] = (
|
||||
sslobj.get_unverified_chain() or () # type: ignore[attr-defined]
|
||||
)
|
||||
cert_bytes = [cert.public_bytes(_ssl.ENCODING_DER) for cert in unverified_chain]
|
||||
_verify_peercerts_impl(
|
||||
sock_or_sslobj.context, cert_bytes, server_hostname=server_hostname
|
||||
)
|
|
@ -0,0 +1,501 @@
|
|||
import contextlib
|
||||
import ctypes
|
||||
import platform
|
||||
import ssl
|
||||
import typing
|
||||
from ctypes import (
|
||||
CDLL,
|
||||
POINTER,
|
||||
c_bool,
|
||||
c_char_p,
|
||||
c_int32,
|
||||
c_long,
|
||||
c_uint32,
|
||||
c_ulong,
|
||||
c_void_p,
|
||||
)
|
||||
from ctypes.util import find_library
|
||||
|
||||
from ._ssl_constants import _set_ssl_context_verify_mode
|
||||
|
||||
_mac_version = platform.mac_ver()[0]
|
||||
_mac_version_info = tuple(map(int, _mac_version.split(".")))
|
||||
if _mac_version_info < (10, 8):
|
||||
raise ImportError(
|
||||
f"Only OS X 10.8 and newer are supported, not {_mac_version_info[0]}.{_mac_version_info[1]}"
|
||||
)
|
||||
|
||||
|
||||
def _load_cdll(name: str, macos10_16_path: str) -> CDLL:
|
||||
"""Loads a CDLL by name, falling back to known path on 10.16+"""
|
||||
try:
|
||||
# Big Sur is technically 11 but we use 10.16 due to the Big Sur
|
||||
# beta being labeled as 10.16.
|
||||
path: str | None
|
||||
if _mac_version_info >= (10, 16):
|
||||
path = macos10_16_path
|
||||
else:
|
||||
path = find_library(name)
|
||||
if not path:
|
||||
raise OSError # Caught and reraised as 'ImportError'
|
||||
return CDLL(path, use_errno=True)
|
||||
except OSError:
|
||||
raise ImportError(f"The library {name} failed to load") from None
|
||||
|
||||
|
||||
Security = _load_cdll(
|
||||
"Security", "/System/Library/Frameworks/Security.framework/Security"
|
||||
)
|
||||
CoreFoundation = _load_cdll(
|
||||
"CoreFoundation",
|
||||
"/System/Library/Frameworks/CoreFoundation.framework/CoreFoundation",
|
||||
)
|
||||
|
||||
Boolean = c_bool
|
||||
CFIndex = c_long
|
||||
CFStringEncoding = c_uint32
|
||||
CFData = c_void_p
|
||||
CFString = c_void_p
|
||||
CFArray = c_void_p
|
||||
CFMutableArray = c_void_p
|
||||
CFError = c_void_p
|
||||
CFType = c_void_p
|
||||
CFTypeID = c_ulong
|
||||
CFTypeRef = POINTER(CFType)
|
||||
CFAllocatorRef = c_void_p
|
||||
|
||||
OSStatus = c_int32
|
||||
|
||||
CFErrorRef = POINTER(CFError)
|
||||
CFDataRef = POINTER(CFData)
|
||||
CFStringRef = POINTER(CFString)
|
||||
CFArrayRef = POINTER(CFArray)
|
||||
CFMutableArrayRef = POINTER(CFMutableArray)
|
||||
CFArrayCallBacks = c_void_p
|
||||
CFOptionFlags = c_uint32
|
||||
|
||||
SecCertificateRef = POINTER(c_void_p)
|
||||
SecPolicyRef = POINTER(c_void_p)
|
||||
SecTrustRef = POINTER(c_void_p)
|
||||
SecTrustResultType = c_uint32
|
||||
SecTrustOptionFlags = c_uint32
|
||||
|
||||
try:
|
||||
Security.SecCertificateCreateWithData.argtypes = [CFAllocatorRef, CFDataRef]
|
||||
Security.SecCertificateCreateWithData.restype = SecCertificateRef
|
||||
|
||||
Security.SecCertificateCopyData.argtypes = [SecCertificateRef]
|
||||
Security.SecCertificateCopyData.restype = CFDataRef
|
||||
|
||||
Security.SecCopyErrorMessageString.argtypes = [OSStatus, c_void_p]
|
||||
Security.SecCopyErrorMessageString.restype = CFStringRef
|
||||
|
||||
Security.SecTrustSetAnchorCertificates.argtypes = [SecTrustRef, CFArrayRef]
|
||||
Security.SecTrustSetAnchorCertificates.restype = OSStatus
|
||||
|
||||
Security.SecTrustSetAnchorCertificatesOnly.argtypes = [SecTrustRef, Boolean]
|
||||
Security.SecTrustSetAnchorCertificatesOnly.restype = OSStatus
|
||||
|
||||
Security.SecTrustEvaluate.argtypes = [SecTrustRef, POINTER(SecTrustResultType)]
|
||||
Security.SecTrustEvaluate.restype = OSStatus
|
||||
|
||||
Security.SecPolicyCreateRevocation.argtypes = [CFOptionFlags]
|
||||
Security.SecPolicyCreateRevocation.restype = SecPolicyRef
|
||||
|
||||
Security.SecPolicyCreateSSL.argtypes = [Boolean, CFStringRef]
|
||||
Security.SecPolicyCreateSSL.restype = SecPolicyRef
|
||||
|
||||
Security.SecTrustCreateWithCertificates.argtypes = [
|
||||
CFTypeRef,
|
||||
CFTypeRef,
|
||||
POINTER(SecTrustRef),
|
||||
]
|
||||
Security.SecTrustCreateWithCertificates.restype = OSStatus
|
||||
|
||||
Security.SecTrustGetTrustResult.argtypes = [
|
||||
SecTrustRef,
|
||||
POINTER(SecTrustResultType),
|
||||
]
|
||||
Security.SecTrustGetTrustResult.restype = OSStatus
|
||||
|
||||
Security.SecTrustRef = SecTrustRef # type: ignore[attr-defined]
|
||||
Security.SecTrustResultType = SecTrustResultType # type: ignore[attr-defined]
|
||||
Security.OSStatus = OSStatus # type: ignore[attr-defined]
|
||||
|
||||
kSecRevocationUseAnyAvailableMethod = 3
|
||||
kSecRevocationRequirePositiveResponse = 8
|
||||
|
||||
CoreFoundation.CFRelease.argtypes = [CFTypeRef]
|
||||
CoreFoundation.CFRelease.restype = None
|
||||
|
||||
CoreFoundation.CFGetTypeID.argtypes = [CFTypeRef]
|
||||
CoreFoundation.CFGetTypeID.restype = CFTypeID
|
||||
|
||||
CoreFoundation.CFStringCreateWithCString.argtypes = [
|
||||
CFAllocatorRef,
|
||||
c_char_p,
|
||||
CFStringEncoding,
|
||||
]
|
||||
CoreFoundation.CFStringCreateWithCString.restype = CFStringRef
|
||||
|
||||
CoreFoundation.CFStringGetCStringPtr.argtypes = [CFStringRef, CFStringEncoding]
|
||||
CoreFoundation.CFStringGetCStringPtr.restype = c_char_p
|
||||
|
||||
CoreFoundation.CFStringGetCString.argtypes = [
|
||||
CFStringRef,
|
||||
c_char_p,
|
||||
CFIndex,
|
||||
CFStringEncoding,
|
||||
]
|
||||
CoreFoundation.CFStringGetCString.restype = c_bool
|
||||
|
||||
CoreFoundation.CFDataCreate.argtypes = [CFAllocatorRef, c_char_p, CFIndex]
|
||||
CoreFoundation.CFDataCreate.restype = CFDataRef
|
||||
|
||||
CoreFoundation.CFDataGetLength.argtypes = [CFDataRef]
|
||||
CoreFoundation.CFDataGetLength.restype = CFIndex
|
||||
|
||||
CoreFoundation.CFDataGetBytePtr.argtypes = [CFDataRef]
|
||||
CoreFoundation.CFDataGetBytePtr.restype = c_void_p
|
||||
|
||||
CoreFoundation.CFArrayCreate.argtypes = [
|
||||
CFAllocatorRef,
|
||||
POINTER(CFTypeRef),
|
||||
CFIndex,
|
||||
CFArrayCallBacks,
|
||||
]
|
||||
CoreFoundation.CFArrayCreate.restype = CFArrayRef
|
||||
|
||||
CoreFoundation.CFArrayCreateMutable.argtypes = [
|
||||
CFAllocatorRef,
|
||||
CFIndex,
|
||||
CFArrayCallBacks,
|
||||
]
|
||||
CoreFoundation.CFArrayCreateMutable.restype = CFMutableArrayRef
|
||||
|
||||
CoreFoundation.CFArrayAppendValue.argtypes = [CFMutableArrayRef, c_void_p]
|
||||
CoreFoundation.CFArrayAppendValue.restype = None
|
||||
|
||||
CoreFoundation.CFArrayGetCount.argtypes = [CFArrayRef]
|
||||
CoreFoundation.CFArrayGetCount.restype = CFIndex
|
||||
|
||||
CoreFoundation.CFArrayGetValueAtIndex.argtypes = [CFArrayRef, CFIndex]
|
||||
CoreFoundation.CFArrayGetValueAtIndex.restype = c_void_p
|
||||
|
||||
CoreFoundation.CFErrorGetCode.argtypes = [CFErrorRef]
|
||||
CoreFoundation.CFErrorGetCode.restype = CFIndex
|
||||
|
||||
CoreFoundation.CFErrorCopyDescription.argtypes = [CFErrorRef]
|
||||
CoreFoundation.CFErrorCopyDescription.restype = CFStringRef
|
||||
|
||||
CoreFoundation.kCFAllocatorDefault = CFAllocatorRef.in_dll( # type: ignore[attr-defined]
|
||||
CoreFoundation, "kCFAllocatorDefault"
|
||||
)
|
||||
CoreFoundation.kCFTypeArrayCallBacks = c_void_p.in_dll( # type: ignore[attr-defined]
|
||||
CoreFoundation, "kCFTypeArrayCallBacks"
|
||||
)
|
||||
|
||||
CoreFoundation.CFTypeRef = CFTypeRef # type: ignore[attr-defined]
|
||||
CoreFoundation.CFArrayRef = CFArrayRef # type: ignore[attr-defined]
|
||||
CoreFoundation.CFStringRef = CFStringRef # type: ignore[attr-defined]
|
||||
CoreFoundation.CFErrorRef = CFErrorRef # type: ignore[attr-defined]
|
||||
|
||||
except AttributeError:
|
||||
raise ImportError("Error initializing ctypes") from None
|
||||
|
||||
|
||||
def _handle_osstatus(result: OSStatus, _: typing.Any, args: typing.Any) -> typing.Any:
|
||||
"""
|
||||
Raises an error if the OSStatus value is non-zero.
|
||||
"""
|
||||
if int(result) == 0:
|
||||
return args
|
||||
|
||||
# Returns a CFString which we need to transform
|
||||
# into a UTF-8 Python string.
|
||||
error_message_cfstring = None
|
||||
try:
|
||||
error_message_cfstring = Security.SecCopyErrorMessageString(result, None)
|
||||
|
||||
# First step is convert the CFString into a C string pointer.
|
||||
# We try the fast no-copy way first.
|
||||
error_message_cfstring_c_void_p = ctypes.cast(
|
||||
error_message_cfstring, ctypes.POINTER(ctypes.c_void_p)
|
||||
)
|
||||
message = CoreFoundation.CFStringGetCStringPtr(
|
||||
error_message_cfstring_c_void_p, CFConst.kCFStringEncodingUTF8
|
||||
)
|
||||
|
||||
# Quoting the Apple dev docs:
|
||||
#
|
||||
# "A pointer to a C string or NULL if the internal
|
||||
# storage of theString does not allow this to be
|
||||
# returned efficiently."
|
||||
#
|
||||
# So we need to get our hands dirty.
|
||||
if message is None:
|
||||
buffer = ctypes.create_string_buffer(1024)
|
||||
result = CoreFoundation.CFStringGetCString(
|
||||
error_message_cfstring_c_void_p,
|
||||
buffer,
|
||||
1024,
|
||||
CFConst.kCFStringEncodingUTF8,
|
||||
)
|
||||
if not result:
|
||||
raise OSError("Error copying C string from CFStringRef")
|
||||
message = buffer.value
|
||||
|
||||
finally:
|
||||
if error_message_cfstring is not None:
|
||||
CoreFoundation.CFRelease(error_message_cfstring)
|
||||
|
||||
# If no message can be found for this status we come
|
||||
# up with a generic one that forwards the status code.
|
||||
if message is None or message == "":
|
||||
message = f"SecureTransport operation returned a non-zero OSStatus: {result}"
|
||||
|
||||
raise ssl.SSLError(message)
|
||||
|
||||
|
||||
Security.SecTrustCreateWithCertificates.errcheck = _handle_osstatus # type: ignore[assignment]
|
||||
Security.SecTrustSetAnchorCertificates.errcheck = _handle_osstatus # type: ignore[assignment]
|
||||
Security.SecTrustGetTrustResult.errcheck = _handle_osstatus # type: ignore[assignment]
|
||||
|
||||
|
||||
class CFConst:
|
||||
"""CoreFoundation constants"""
|
||||
|
||||
kCFStringEncodingUTF8 = CFStringEncoding(0x08000100)
|
||||
|
||||
errSecIncompleteCertRevocationCheck = -67635
|
||||
errSecHostNameMismatch = -67602
|
||||
errSecCertificateExpired = -67818
|
||||
errSecNotTrusted = -67843
|
||||
|
||||
|
||||
def _bytes_to_cf_data_ref(value: bytes) -> CFDataRef: # type: ignore[valid-type]
|
||||
return CoreFoundation.CFDataCreate( # type: ignore[no-any-return]
|
||||
CoreFoundation.kCFAllocatorDefault, value, len(value)
|
||||
)
|
||||
|
||||
|
||||
def _bytes_to_cf_string(value: bytes) -> CFString:
|
||||
"""
|
||||
Given a Python binary data, create a CFString.
|
||||
The string must be CFReleased by the caller.
|
||||
"""
|
||||
c_str = ctypes.c_char_p(value)
|
||||
cf_str = CoreFoundation.CFStringCreateWithCString(
|
||||
CoreFoundation.kCFAllocatorDefault,
|
||||
c_str,
|
||||
CFConst.kCFStringEncodingUTF8,
|
||||
)
|
||||
return cf_str # type: ignore[no-any-return]
|
||||
|
||||
|
||||
def _cf_string_ref_to_str(cf_string_ref: CFStringRef) -> str | None: # type: ignore[valid-type]
|
||||
"""
|
||||
Creates a Unicode string from a CFString object. Used entirely for error
|
||||
reporting.
|
||||
Yes, it annoys me quite a lot that this function is this complex.
|
||||
"""
|
||||
|
||||
string = CoreFoundation.CFStringGetCStringPtr(
|
||||
cf_string_ref, CFConst.kCFStringEncodingUTF8
|
||||
)
|
||||
if string is None:
|
||||
buffer = ctypes.create_string_buffer(1024)
|
||||
result = CoreFoundation.CFStringGetCString(
|
||||
cf_string_ref, buffer, 1024, CFConst.kCFStringEncodingUTF8
|
||||
)
|
||||
if not result:
|
||||
raise OSError("Error copying C string from CFStringRef")
|
||||
string = buffer.value
|
||||
if string is not None:
|
||||
string = string.decode("utf-8")
|
||||
return string # type: ignore[no-any-return]
|
||||
|
||||
|
||||
def _der_certs_to_cf_cert_array(certs: list[bytes]) -> CFMutableArrayRef: # type: ignore[valid-type]
|
||||
"""Builds a CFArray of SecCertificateRefs from a list of DER-encoded certificates.
|
||||
Responsibility of the caller to call CoreFoundation.CFRelease on the CFArray.
|
||||
"""
|
||||
cf_array = CoreFoundation.CFArrayCreateMutable(
|
||||
CoreFoundation.kCFAllocatorDefault,
|
||||
0,
|
||||
ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks),
|
||||
)
|
||||
if not cf_array:
|
||||
raise MemoryError("Unable to allocate memory!")
|
||||
|
||||
for cert_data in certs:
|
||||
cf_data = None
|
||||
sec_cert_ref = None
|
||||
try:
|
||||
cf_data = _bytes_to_cf_data_ref(cert_data)
|
||||
sec_cert_ref = Security.SecCertificateCreateWithData(
|
||||
CoreFoundation.kCFAllocatorDefault, cf_data
|
||||
)
|
||||
CoreFoundation.CFArrayAppendValue(cf_array, sec_cert_ref)
|
||||
finally:
|
||||
if cf_data:
|
||||
CoreFoundation.CFRelease(cf_data)
|
||||
if sec_cert_ref:
|
||||
CoreFoundation.CFRelease(sec_cert_ref)
|
||||
|
||||
return cf_array # type: ignore[no-any-return]
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _configure_context(ctx: ssl.SSLContext) -> typing.Iterator[None]:
|
||||
check_hostname = ctx.check_hostname
|
||||
verify_mode = ctx.verify_mode
|
||||
ctx.check_hostname = False
|
||||
_set_ssl_context_verify_mode(ctx, ssl.CERT_NONE)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
ctx.check_hostname = check_hostname
|
||||
_set_ssl_context_verify_mode(ctx, verify_mode)
|
||||
|
||||
|
||||
def _verify_peercerts_impl(
|
||||
ssl_context: ssl.SSLContext,
|
||||
cert_chain: list[bytes],
|
||||
server_hostname: str | None = None,
|
||||
) -> None:
|
||||
certs = None
|
||||
policies = None
|
||||
trust = None
|
||||
cf_error = None
|
||||
try:
|
||||
if server_hostname is not None:
|
||||
cf_str_hostname = None
|
||||
try:
|
||||
cf_str_hostname = _bytes_to_cf_string(server_hostname.encode("ascii"))
|
||||
ssl_policy = Security.SecPolicyCreateSSL(True, cf_str_hostname)
|
||||
finally:
|
||||
if cf_str_hostname:
|
||||
CoreFoundation.CFRelease(cf_str_hostname)
|
||||
else:
|
||||
ssl_policy = Security.SecPolicyCreateSSL(True, None)
|
||||
|
||||
policies = ssl_policy
|
||||
if ssl_context.verify_flags & ssl.VERIFY_CRL_CHECK_CHAIN:
|
||||
# Add explicit policy requiring positive revocation checks
|
||||
policies = CoreFoundation.CFArrayCreateMutable(
|
||||
CoreFoundation.kCFAllocatorDefault,
|
||||
0,
|
||||
ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks),
|
||||
)
|
||||
CoreFoundation.CFArrayAppendValue(policies, ssl_policy)
|
||||
CoreFoundation.CFRelease(ssl_policy)
|
||||
revocation_policy = Security.SecPolicyCreateRevocation(
|
||||
kSecRevocationUseAnyAvailableMethod
|
||||
| kSecRevocationRequirePositiveResponse
|
||||
)
|
||||
CoreFoundation.CFArrayAppendValue(policies, revocation_policy)
|
||||
CoreFoundation.CFRelease(revocation_policy)
|
||||
elif ssl_context.verify_flags & ssl.VERIFY_CRL_CHECK_LEAF:
|
||||
raise NotImplementedError("VERIFY_CRL_CHECK_LEAF not implemented for macOS")
|
||||
|
||||
certs = None
|
||||
try:
|
||||
certs = _der_certs_to_cf_cert_array(cert_chain)
|
||||
|
||||
# Now that we have certificates loaded and a SecPolicy
|
||||
# we can finally create a SecTrust object!
|
||||
trust = Security.SecTrustRef()
|
||||
Security.SecTrustCreateWithCertificates(
|
||||
certs, policies, ctypes.byref(trust)
|
||||
)
|
||||
|
||||
finally:
|
||||
# The certs are now being held by SecTrust so we can
|
||||
# release our handles for the array.
|
||||
if certs:
|
||||
CoreFoundation.CFRelease(certs)
|
||||
|
||||
# If there are additional trust anchors to load we need to transform
|
||||
# the list of DER-encoded certificates into a CFArray. Otherwise
|
||||
# pass 'None' to signal that we only want system / fetched certificates.
|
||||
ctx_ca_certs_der: list[bytes] | None = ssl_context.get_ca_certs(
|
||||
binary_form=True
|
||||
)
|
||||
if ctx_ca_certs_der:
|
||||
ctx_ca_certs = None
|
||||
try:
|
||||
ctx_ca_certs = _der_certs_to_cf_cert_array(cert_chain)
|
||||
Security.SecTrustSetAnchorCertificates(trust, ctx_ca_certs)
|
||||
finally:
|
||||
if ctx_ca_certs:
|
||||
CoreFoundation.CFRelease(ctx_ca_certs)
|
||||
else:
|
||||
Security.SecTrustSetAnchorCertificates(trust, None)
|
||||
|
||||
cf_error = CoreFoundation.CFErrorRef()
|
||||
sec_trust_eval_result = Security.SecTrustEvaluateWithError(
|
||||
trust, ctypes.byref(cf_error)
|
||||
)
|
||||
# sec_trust_eval_result is a bool (0 or 1)
|
||||
# where 1 means that the certs are trusted.
|
||||
if sec_trust_eval_result == 1:
|
||||
is_trusted = True
|
||||
elif sec_trust_eval_result == 0:
|
||||
is_trusted = False
|
||||
else:
|
||||
raise ssl.SSLError(
|
||||
f"Unknown result from Security.SecTrustEvaluateWithError: {sec_trust_eval_result!r}"
|
||||
)
|
||||
|
||||
cf_error_code = 0
|
||||
if not is_trusted:
|
||||
cf_error_code = CoreFoundation.CFErrorGetCode(cf_error)
|
||||
|
||||
# If the error is a known failure that we're
|
||||
# explicitly okay with from SSLContext configuration
|
||||
# we can set is_trusted accordingly.
|
||||
if ssl_context.verify_mode != ssl.CERT_REQUIRED and (
|
||||
cf_error_code == CFConst.errSecNotTrusted
|
||||
or cf_error_code == CFConst.errSecCertificateExpired
|
||||
):
|
||||
is_trusted = True
|
||||
elif (
|
||||
not ssl_context.check_hostname
|
||||
and cf_error_code == CFConst.errSecHostNameMismatch
|
||||
):
|
||||
is_trusted = True
|
||||
|
||||
# If we're still not trusted then we start to
|
||||
# construct and raise the SSLCertVerificationError.
|
||||
if not is_trusted:
|
||||
cf_error_string_ref = None
|
||||
try:
|
||||
cf_error_string_ref = CoreFoundation.CFErrorCopyDescription(cf_error)
|
||||
|
||||
# Can this ever return 'None' if there's a CFError?
|
||||
cf_error_message = (
|
||||
_cf_string_ref_to_str(cf_error_string_ref)
|
||||
or "Certificate verification failed"
|
||||
)
|
||||
|
||||
# TODO: Not sure if we need the SecTrustResultType for anything?
|
||||
# We only care whether or not it's a success or failure for now.
|
||||
sec_trust_result_type = Security.SecTrustResultType()
|
||||
Security.SecTrustGetTrustResult(
|
||||
trust, ctypes.byref(sec_trust_result_type)
|
||||
)
|
||||
|
||||
err = ssl.SSLCertVerificationError(cf_error_message)
|
||||
err.verify_message = cf_error_message
|
||||
err.verify_code = cf_error_code
|
||||
raise err
|
||||
finally:
|
||||
if cf_error_string_ref:
|
||||
CoreFoundation.CFRelease(cf_error_string_ref)
|
||||
|
||||
finally:
|
||||
if policies:
|
||||
CoreFoundation.CFRelease(policies)
|
||||
if trust:
|
||||
CoreFoundation.CFRelease(trust)
|
|
@ -0,0 +1,66 @@
|
|||
import contextlib
|
||||
import os
|
||||
import re
|
||||
import ssl
|
||||
import typing
|
||||
|
||||
# candidates based on https://github.com/tiran/certifi-system-store by Christian Heimes
|
||||
_CA_FILE_CANDIDATES = [
|
||||
# Alpine, Arch, Fedora 34+, OpenWRT, RHEL 9+, BSD
|
||||
"/etc/ssl/cert.pem",
|
||||
# Fedora <= 34, RHEL <= 9, CentOS <= 9
|
||||
"/etc/pki/tls/cert.pem",
|
||||
# Debian, Ubuntu (requires ca-certificates)
|
||||
"/etc/ssl/certs/ca-certificates.crt",
|
||||
# SUSE
|
||||
"/etc/ssl/ca-bundle.pem",
|
||||
]
|
||||
|
||||
_HASHED_CERT_FILENAME_RE = re.compile(r"^[0-9a-fA-F]{8}\.[0-9]$")
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _configure_context(ctx: ssl.SSLContext) -> typing.Iterator[None]:
|
||||
# First, check whether the default locations from OpenSSL
|
||||
# seem like they will give us a usable set of CA certs.
|
||||
# ssl.get_default_verify_paths already takes care of:
|
||||
# - getting cafile from either the SSL_CERT_FILE env var
|
||||
# or the path configured when OpenSSL was compiled,
|
||||
# and verifying that that path exists
|
||||
# - getting capath from either the SSL_CERT_DIR env var
|
||||
# or the path configured when OpenSSL was compiled,
|
||||
# and verifying that that path exists
|
||||
# In addition we'll check whether capath appears to contain certs.
|
||||
defaults = ssl.get_default_verify_paths()
|
||||
if defaults.cafile or (defaults.capath and _capath_contains_certs(defaults.capath)):
|
||||
ctx.set_default_verify_paths()
|
||||
else:
|
||||
# cafile from OpenSSL doesn't exist
|
||||
# and capath from OpenSSL doesn't contain certs.
|
||||
# Let's search other common locations instead.
|
||||
for cafile in _CA_FILE_CANDIDATES:
|
||||
if os.path.isfile(cafile):
|
||||
ctx.load_verify_locations(cafile=cafile)
|
||||
break
|
||||
|
||||
yield
|
||||
|
||||
|
||||
def _capath_contains_certs(capath: str) -> bool:
|
||||
"""Check whether capath exists and contains certs in the expected format."""
|
||||
if not os.path.isdir(capath):
|
||||
return False
|
||||
for name in os.listdir(capath):
|
||||
if _HASHED_CERT_FILENAME_RE.match(name):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _verify_peercerts_impl(
|
||||
ssl_context: ssl.SSLContext,
|
||||
cert_chain: list[bytes],
|
||||
server_hostname: str | None = None,
|
||||
) -> None:
|
||||
# This is a no-op because we've enabled SSLContext's built-in
|
||||
# verification via verify_mode=CERT_REQUIRED, and don't need to repeat it.
|
||||
pass
|
|
@ -0,0 +1,31 @@
|
|||
import ssl
|
||||
import sys
|
||||
import typing
|
||||
|
||||
# Hold on to the original class so we can create it consistently
|
||||
# even if we inject our own SSLContext into the ssl module.
|
||||
_original_SSLContext = ssl.SSLContext
|
||||
_original_super_SSLContext = super(_original_SSLContext, _original_SSLContext)
|
||||
|
||||
# CPython is known to be good, but non-CPython implementations
|
||||
# may implement SSLContext differently so to be safe we don't
|
||||
# subclass the SSLContext.
|
||||
|
||||
# This is returned by truststore.SSLContext.__class__()
|
||||
_truststore_SSLContext_dunder_class: typing.Optional[type]
|
||||
|
||||
# This value is the superclass of truststore.SSLContext.
|
||||
_truststore_SSLContext_super_class: type
|
||||
|
||||
if sys.implementation.name == "cpython":
|
||||
_truststore_SSLContext_super_class = _original_SSLContext
|
||||
_truststore_SSLContext_dunder_class = None
|
||||
else:
|
||||
_truststore_SSLContext_super_class = object
|
||||
_truststore_SSLContext_dunder_class = _original_SSLContext
|
||||
|
||||
|
||||
def _set_ssl_context_verify_mode(
|
||||
ssl_context: ssl.SSLContext, verify_mode: ssl.VerifyMode
|
||||
) -> None:
|
||||
_original_super_SSLContext.verify_mode.__set__(ssl_context, verify_mode) # type: ignore[attr-defined]
|
|
@ -0,0 +1,554 @@
|
|||
import contextlib
|
||||
import ssl
|
||||
import typing
|
||||
from ctypes import WinDLL # type: ignore
|
||||
from ctypes import WinError # type: ignore
|
||||
from ctypes import (
|
||||
POINTER,
|
||||
Structure,
|
||||
c_char_p,
|
||||
c_ulong,
|
||||
c_void_p,
|
||||
c_wchar_p,
|
||||
cast,
|
||||
create_unicode_buffer,
|
||||
pointer,
|
||||
sizeof,
|
||||
)
|
||||
from ctypes.wintypes import (
|
||||
BOOL,
|
||||
DWORD,
|
||||
HANDLE,
|
||||
LONG,
|
||||
LPCSTR,
|
||||
LPCVOID,
|
||||
LPCWSTR,
|
||||
LPFILETIME,
|
||||
LPSTR,
|
||||
LPWSTR,
|
||||
)
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from ._ssl_constants import _set_ssl_context_verify_mode
|
||||
|
||||
HCERTCHAINENGINE = HANDLE
|
||||
HCERTSTORE = HANDLE
|
||||
HCRYPTPROV_LEGACY = HANDLE
|
||||
|
||||
|
||||
class CERT_CONTEXT(Structure):
|
||||
_fields_ = (
|
||||
("dwCertEncodingType", DWORD),
|
||||
("pbCertEncoded", c_void_p),
|
||||
("cbCertEncoded", DWORD),
|
||||
("pCertInfo", c_void_p),
|
||||
("hCertStore", HCERTSTORE),
|
||||
)
|
||||
|
||||
|
||||
PCERT_CONTEXT = POINTER(CERT_CONTEXT)
|
||||
PCCERT_CONTEXT = POINTER(PCERT_CONTEXT)
|
||||
|
||||
|
||||
class CERT_ENHKEY_USAGE(Structure):
|
||||
_fields_ = (
|
||||
("cUsageIdentifier", DWORD),
|
||||
("rgpszUsageIdentifier", POINTER(LPSTR)),
|
||||
)
|
||||
|
||||
|
||||
PCERT_ENHKEY_USAGE = POINTER(CERT_ENHKEY_USAGE)
|
||||
|
||||
|
||||
class CERT_USAGE_MATCH(Structure):
|
||||
_fields_ = (
|
||||
("dwType", DWORD),
|
||||
("Usage", CERT_ENHKEY_USAGE),
|
||||
)
|
||||
|
||||
|
||||
class CERT_CHAIN_PARA(Structure):
|
||||
_fields_ = (
|
||||
("cbSize", DWORD),
|
||||
("RequestedUsage", CERT_USAGE_MATCH),
|
||||
("RequestedIssuancePolicy", CERT_USAGE_MATCH),
|
||||
("dwUrlRetrievalTimeout", DWORD),
|
||||
("fCheckRevocationFreshnessTime", BOOL),
|
||||
("dwRevocationFreshnessTime", DWORD),
|
||||
("pftCacheResync", LPFILETIME),
|
||||
("pStrongSignPara", c_void_p),
|
||||
("dwStrongSignFlags", DWORD),
|
||||
)
|
||||
|
||||
|
||||
if TYPE_CHECKING:
|
||||
PCERT_CHAIN_PARA = pointer[CERT_CHAIN_PARA] # type: ignore[misc]
|
||||
else:
|
||||
PCERT_CHAIN_PARA = POINTER(CERT_CHAIN_PARA)
|
||||
|
||||
|
||||
class CERT_TRUST_STATUS(Structure):
|
||||
_fields_ = (
|
||||
("dwErrorStatus", DWORD),
|
||||
("dwInfoStatus", DWORD),
|
||||
)
|
||||
|
||||
|
||||
class CERT_CHAIN_ELEMENT(Structure):
|
||||
_fields_ = (
|
||||
("cbSize", DWORD),
|
||||
("pCertContext", PCERT_CONTEXT),
|
||||
("TrustStatus", CERT_TRUST_STATUS),
|
||||
("pRevocationInfo", c_void_p),
|
||||
("pIssuanceUsage", PCERT_ENHKEY_USAGE),
|
||||
("pApplicationUsage", PCERT_ENHKEY_USAGE),
|
||||
("pwszExtendedErrorInfo", LPCWSTR),
|
||||
)
|
||||
|
||||
|
||||
PCERT_CHAIN_ELEMENT = POINTER(CERT_CHAIN_ELEMENT)
|
||||
|
||||
|
||||
class CERT_SIMPLE_CHAIN(Structure):
|
||||
_fields_ = (
|
||||
("cbSize", DWORD),
|
||||
("TrustStatus", CERT_TRUST_STATUS),
|
||||
("cElement", DWORD),
|
||||
("rgpElement", POINTER(PCERT_CHAIN_ELEMENT)),
|
||||
("pTrustListInfo", c_void_p),
|
||||
("fHasRevocationFreshnessTime", BOOL),
|
||||
("dwRevocationFreshnessTime", DWORD),
|
||||
)
|
||||
|
||||
|
||||
PCERT_SIMPLE_CHAIN = POINTER(CERT_SIMPLE_CHAIN)
|
||||
|
||||
|
||||
class CERT_CHAIN_CONTEXT(Structure):
|
||||
_fields_ = (
|
||||
("cbSize", DWORD),
|
||||
("TrustStatus", CERT_TRUST_STATUS),
|
||||
("cChain", DWORD),
|
||||
("rgpChain", POINTER(PCERT_SIMPLE_CHAIN)),
|
||||
("cLowerQualityChainContext", DWORD),
|
||||
("rgpLowerQualityChainContext", c_void_p),
|
||||
("fHasRevocationFreshnessTime", BOOL),
|
||||
("dwRevocationFreshnessTime", DWORD),
|
||||
)
|
||||
|
||||
|
||||
PCERT_CHAIN_CONTEXT = POINTER(CERT_CHAIN_CONTEXT)
|
||||
PCCERT_CHAIN_CONTEXT = POINTER(PCERT_CHAIN_CONTEXT)
|
||||
|
||||
|
||||
class SSL_EXTRA_CERT_CHAIN_POLICY_PARA(Structure):
|
||||
_fields_ = (
|
||||
("cbSize", DWORD),
|
||||
("dwAuthType", DWORD),
|
||||
("fdwChecks", DWORD),
|
||||
("pwszServerName", LPCWSTR),
|
||||
)
|
||||
|
||||
|
||||
class CERT_CHAIN_POLICY_PARA(Structure):
|
||||
_fields_ = (
|
||||
("cbSize", DWORD),
|
||||
("dwFlags", DWORD),
|
||||
("pvExtraPolicyPara", c_void_p),
|
||||
)
|
||||
|
||||
|
||||
PCERT_CHAIN_POLICY_PARA = POINTER(CERT_CHAIN_POLICY_PARA)
|
||||
|
||||
|
||||
class CERT_CHAIN_POLICY_STATUS(Structure):
|
||||
_fields_ = (
|
||||
("cbSize", DWORD),
|
||||
("dwError", DWORD),
|
||||
("lChainIndex", LONG),
|
||||
("lElementIndex", LONG),
|
||||
("pvExtraPolicyStatus", c_void_p),
|
||||
)
|
||||
|
||||
|
||||
PCERT_CHAIN_POLICY_STATUS = POINTER(CERT_CHAIN_POLICY_STATUS)
|
||||
|
||||
|
||||
class CERT_CHAIN_ENGINE_CONFIG(Structure):
|
||||
_fields_ = (
|
||||
("cbSize", DWORD),
|
||||
("hRestrictedRoot", HCERTSTORE),
|
||||
("hRestrictedTrust", HCERTSTORE),
|
||||
("hRestrictedOther", HCERTSTORE),
|
||||
("cAdditionalStore", DWORD),
|
||||
("rghAdditionalStore", c_void_p),
|
||||
("dwFlags", DWORD),
|
||||
("dwUrlRetrievalTimeout", DWORD),
|
||||
("MaximumCachedCertificates", DWORD),
|
||||
("CycleDetectionModulus", DWORD),
|
||||
("hExclusiveRoot", HCERTSTORE),
|
||||
("hExclusiveTrustedPeople", HCERTSTORE),
|
||||
("dwExclusiveFlags", DWORD),
|
||||
)
|
||||
|
||||
|
||||
PCERT_CHAIN_ENGINE_CONFIG = POINTER(CERT_CHAIN_ENGINE_CONFIG)
|
||||
PHCERTCHAINENGINE = POINTER(HCERTCHAINENGINE)
|
||||
|
||||
X509_ASN_ENCODING = 0x00000001
|
||||
PKCS_7_ASN_ENCODING = 0x00010000
|
||||
CERT_STORE_PROV_MEMORY = b"Memory"
|
||||
CERT_STORE_ADD_USE_EXISTING = 2
|
||||
USAGE_MATCH_TYPE_OR = 1
|
||||
OID_PKIX_KP_SERVER_AUTH = c_char_p(b"1.3.6.1.5.5.7.3.1")
|
||||
CERT_CHAIN_REVOCATION_CHECK_END_CERT = 0x10000000
|
||||
CERT_CHAIN_REVOCATION_CHECK_CHAIN = 0x20000000
|
||||
CERT_CHAIN_POLICY_IGNORE_ALL_NOT_TIME_VALID_FLAGS = 0x00000007
|
||||
CERT_CHAIN_POLICY_IGNORE_INVALID_BASIC_CONSTRAINTS_FLAG = 0x00000008
|
||||
CERT_CHAIN_POLICY_ALLOW_UNKNOWN_CA_FLAG = 0x00000010
|
||||
CERT_CHAIN_POLICY_IGNORE_INVALID_NAME_FLAG = 0x00000040
|
||||
CERT_CHAIN_POLICY_IGNORE_WRONG_USAGE_FLAG = 0x00000020
|
||||
CERT_CHAIN_POLICY_IGNORE_INVALID_POLICY_FLAG = 0x00000080
|
||||
CERT_CHAIN_POLICY_IGNORE_ALL_REV_UNKNOWN_FLAGS = 0x00000F00
|
||||
CERT_CHAIN_POLICY_ALLOW_TESTROOT_FLAG = 0x00008000
|
||||
CERT_CHAIN_POLICY_TRUST_TESTROOT_FLAG = 0x00004000
|
||||
AUTHTYPE_SERVER = 2
|
||||
CERT_CHAIN_POLICY_SSL = 4
|
||||
FORMAT_MESSAGE_FROM_SYSTEM = 0x00001000
|
||||
FORMAT_MESSAGE_IGNORE_INSERTS = 0x00000200
|
||||
|
||||
# Flags to set for SSLContext.verify_mode=CERT_NONE
|
||||
CERT_CHAIN_POLICY_VERIFY_MODE_NONE_FLAGS = (
|
||||
CERT_CHAIN_POLICY_IGNORE_ALL_NOT_TIME_VALID_FLAGS
|
||||
| CERT_CHAIN_POLICY_IGNORE_INVALID_BASIC_CONSTRAINTS_FLAG
|
||||
| CERT_CHAIN_POLICY_ALLOW_UNKNOWN_CA_FLAG
|
||||
| CERT_CHAIN_POLICY_IGNORE_INVALID_NAME_FLAG
|
||||
| CERT_CHAIN_POLICY_IGNORE_WRONG_USAGE_FLAG
|
||||
| CERT_CHAIN_POLICY_IGNORE_INVALID_POLICY_FLAG
|
||||
| CERT_CHAIN_POLICY_IGNORE_ALL_REV_UNKNOWN_FLAGS
|
||||
| CERT_CHAIN_POLICY_ALLOW_TESTROOT_FLAG
|
||||
| CERT_CHAIN_POLICY_TRUST_TESTROOT_FLAG
|
||||
)
|
||||
|
||||
wincrypt = WinDLL("crypt32.dll")
|
||||
kernel32 = WinDLL("kernel32.dll")
|
||||
|
||||
|
||||
def _handle_win_error(result: bool, _: Any, args: Any) -> Any:
|
||||
if not result:
|
||||
# Note, actually raises OSError after calling GetLastError and FormatMessage
|
||||
raise WinError()
|
||||
return args
|
||||
|
||||
|
||||
CertCreateCertificateChainEngine = wincrypt.CertCreateCertificateChainEngine
|
||||
CertCreateCertificateChainEngine.argtypes = (
|
||||
PCERT_CHAIN_ENGINE_CONFIG,
|
||||
PHCERTCHAINENGINE,
|
||||
)
|
||||
CertCreateCertificateChainEngine.errcheck = _handle_win_error
|
||||
|
||||
CertOpenStore = wincrypt.CertOpenStore
|
||||
CertOpenStore.argtypes = (LPCSTR, DWORD, HCRYPTPROV_LEGACY, DWORD, c_void_p)
|
||||
CertOpenStore.restype = HCERTSTORE
|
||||
CertOpenStore.errcheck = _handle_win_error
|
||||
|
||||
CertAddEncodedCertificateToStore = wincrypt.CertAddEncodedCertificateToStore
|
||||
CertAddEncodedCertificateToStore.argtypes = (
|
||||
HCERTSTORE,
|
||||
DWORD,
|
||||
c_char_p,
|
||||
DWORD,
|
||||
DWORD,
|
||||
PCCERT_CONTEXT,
|
||||
)
|
||||
CertAddEncodedCertificateToStore.restype = BOOL
|
||||
|
||||
CertCreateCertificateContext = wincrypt.CertCreateCertificateContext
|
||||
CertCreateCertificateContext.argtypes = (DWORD, c_char_p, DWORD)
|
||||
CertCreateCertificateContext.restype = PCERT_CONTEXT
|
||||
CertCreateCertificateContext.errcheck = _handle_win_error
|
||||
|
||||
CertGetCertificateChain = wincrypt.CertGetCertificateChain
|
||||
CertGetCertificateChain.argtypes = (
|
||||
HCERTCHAINENGINE,
|
||||
PCERT_CONTEXT,
|
||||
LPFILETIME,
|
||||
HCERTSTORE,
|
||||
PCERT_CHAIN_PARA,
|
||||
DWORD,
|
||||
c_void_p,
|
||||
PCCERT_CHAIN_CONTEXT,
|
||||
)
|
||||
CertGetCertificateChain.restype = BOOL
|
||||
CertGetCertificateChain.errcheck = _handle_win_error
|
||||
|
||||
CertVerifyCertificateChainPolicy = wincrypt.CertVerifyCertificateChainPolicy
|
||||
CertVerifyCertificateChainPolicy.argtypes = (
|
||||
c_ulong,
|
||||
PCERT_CHAIN_CONTEXT,
|
||||
PCERT_CHAIN_POLICY_PARA,
|
||||
PCERT_CHAIN_POLICY_STATUS,
|
||||
)
|
||||
CertVerifyCertificateChainPolicy.restype = BOOL
|
||||
|
||||
CertCloseStore = wincrypt.CertCloseStore
|
||||
CertCloseStore.argtypes = (HCERTSTORE, DWORD)
|
||||
CertCloseStore.restype = BOOL
|
||||
CertCloseStore.errcheck = _handle_win_error
|
||||
|
||||
CertFreeCertificateChain = wincrypt.CertFreeCertificateChain
|
||||
CertFreeCertificateChain.argtypes = (PCERT_CHAIN_CONTEXT,)
|
||||
|
||||
CertFreeCertificateContext = wincrypt.CertFreeCertificateContext
|
||||
CertFreeCertificateContext.argtypes = (PCERT_CONTEXT,)
|
||||
|
||||
CertFreeCertificateChainEngine = wincrypt.CertFreeCertificateChainEngine
|
||||
CertFreeCertificateChainEngine.argtypes = (HCERTCHAINENGINE,)
|
||||
|
||||
FormatMessageW = kernel32.FormatMessageW
|
||||
FormatMessageW.argtypes = (
|
||||
DWORD,
|
||||
LPCVOID,
|
||||
DWORD,
|
||||
DWORD,
|
||||
LPWSTR,
|
||||
DWORD,
|
||||
c_void_p,
|
||||
)
|
||||
FormatMessageW.restype = DWORD
|
||||
|
||||
|
||||
def _verify_peercerts_impl(
|
||||
ssl_context: ssl.SSLContext,
|
||||
cert_chain: list[bytes],
|
||||
server_hostname: str | None = None,
|
||||
) -> None:
|
||||
"""Verify the cert_chain from the server using Windows APIs."""
|
||||
pCertContext = None
|
||||
hIntermediateCertStore = CertOpenStore(CERT_STORE_PROV_MEMORY, 0, None, 0, None)
|
||||
try:
|
||||
# Add intermediate certs to an in-memory cert store
|
||||
for cert_bytes in cert_chain[1:]:
|
||||
CertAddEncodedCertificateToStore(
|
||||
hIntermediateCertStore,
|
||||
X509_ASN_ENCODING | PKCS_7_ASN_ENCODING,
|
||||
cert_bytes,
|
||||
len(cert_bytes),
|
||||
CERT_STORE_ADD_USE_EXISTING,
|
||||
None,
|
||||
)
|
||||
|
||||
# Cert context for leaf cert
|
||||
leaf_cert = cert_chain[0]
|
||||
pCertContext = CertCreateCertificateContext(
|
||||
X509_ASN_ENCODING | PKCS_7_ASN_ENCODING, leaf_cert, len(leaf_cert)
|
||||
)
|
||||
|
||||
# Chain params to match certs for serverAuth extended usage
|
||||
cert_enhkey_usage = CERT_ENHKEY_USAGE()
|
||||
cert_enhkey_usage.cUsageIdentifier = 1
|
||||
cert_enhkey_usage.rgpszUsageIdentifier = (c_char_p * 1)(OID_PKIX_KP_SERVER_AUTH)
|
||||
cert_usage_match = CERT_USAGE_MATCH()
|
||||
cert_usage_match.Usage = cert_enhkey_usage
|
||||
chain_params = CERT_CHAIN_PARA()
|
||||
chain_params.RequestedUsage = cert_usage_match
|
||||
chain_params.cbSize = sizeof(chain_params)
|
||||
pChainPara = pointer(chain_params)
|
||||
|
||||
if ssl_context.verify_flags & ssl.VERIFY_CRL_CHECK_CHAIN:
|
||||
chain_flags = CERT_CHAIN_REVOCATION_CHECK_CHAIN
|
||||
elif ssl_context.verify_flags & ssl.VERIFY_CRL_CHECK_LEAF:
|
||||
chain_flags = CERT_CHAIN_REVOCATION_CHECK_END_CERT
|
||||
else:
|
||||
chain_flags = 0
|
||||
|
||||
try:
|
||||
# First attempt to verify using the default Windows system trust roots
|
||||
# (default chain engine).
|
||||
_get_and_verify_cert_chain(
|
||||
ssl_context,
|
||||
None,
|
||||
hIntermediateCertStore,
|
||||
pCertContext,
|
||||
pChainPara,
|
||||
server_hostname,
|
||||
chain_flags=chain_flags,
|
||||
)
|
||||
except ssl.SSLCertVerificationError:
|
||||
# If that fails but custom CA certs have been added
|
||||
# to the SSLContext using load_verify_locations,
|
||||
# try verifying using a custom chain engine
|
||||
# that trusts the custom CA certs.
|
||||
custom_ca_certs: list[bytes] | None = ssl_context.get_ca_certs(
|
||||
binary_form=True
|
||||
)
|
||||
if custom_ca_certs:
|
||||
_verify_using_custom_ca_certs(
|
||||
ssl_context,
|
||||
custom_ca_certs,
|
||||
hIntermediateCertStore,
|
||||
pCertContext,
|
||||
pChainPara,
|
||||
server_hostname,
|
||||
chain_flags=chain_flags,
|
||||
)
|
||||
else:
|
||||
raise
|
||||
finally:
|
||||
CertCloseStore(hIntermediateCertStore, 0)
|
||||
if pCertContext:
|
||||
CertFreeCertificateContext(pCertContext)
|
||||
|
||||
|
||||
def _get_and_verify_cert_chain(
|
||||
ssl_context: ssl.SSLContext,
|
||||
hChainEngine: HCERTCHAINENGINE | None,
|
||||
hIntermediateCertStore: HCERTSTORE,
|
||||
pPeerCertContext: c_void_p,
|
||||
pChainPara: PCERT_CHAIN_PARA, # type: ignore[valid-type]
|
||||
server_hostname: str | None,
|
||||
chain_flags: int,
|
||||
) -> None:
|
||||
ppChainContext = None
|
||||
try:
|
||||
# Get cert chain
|
||||
ppChainContext = pointer(PCERT_CHAIN_CONTEXT())
|
||||
CertGetCertificateChain(
|
||||
hChainEngine, # chain engine
|
||||
pPeerCertContext, # leaf cert context
|
||||
None, # current system time
|
||||
hIntermediateCertStore, # additional in-memory cert store
|
||||
pChainPara, # chain-building parameters
|
||||
chain_flags,
|
||||
None, # reserved
|
||||
ppChainContext, # the resulting chain context
|
||||
)
|
||||
pChainContext = ppChainContext.contents
|
||||
|
||||
# Verify cert chain
|
||||
ssl_extra_cert_chain_policy_para = SSL_EXTRA_CERT_CHAIN_POLICY_PARA()
|
||||
ssl_extra_cert_chain_policy_para.cbSize = sizeof(
|
||||
ssl_extra_cert_chain_policy_para
|
||||
)
|
||||
ssl_extra_cert_chain_policy_para.dwAuthType = AUTHTYPE_SERVER
|
||||
ssl_extra_cert_chain_policy_para.fdwChecks = 0
|
||||
if server_hostname:
|
||||
ssl_extra_cert_chain_policy_para.pwszServerName = c_wchar_p(server_hostname)
|
||||
|
||||
chain_policy = CERT_CHAIN_POLICY_PARA()
|
||||
chain_policy.pvExtraPolicyPara = cast(
|
||||
pointer(ssl_extra_cert_chain_policy_para), c_void_p
|
||||
)
|
||||
if ssl_context.verify_mode == ssl.CERT_NONE:
|
||||
chain_policy.dwFlags |= CERT_CHAIN_POLICY_VERIFY_MODE_NONE_FLAGS
|
||||
if not ssl_context.check_hostname:
|
||||
chain_policy.dwFlags |= CERT_CHAIN_POLICY_IGNORE_INVALID_NAME_FLAG
|
||||
chain_policy.cbSize = sizeof(chain_policy)
|
||||
|
||||
pPolicyPara = pointer(chain_policy)
|
||||
policy_status = CERT_CHAIN_POLICY_STATUS()
|
||||
policy_status.cbSize = sizeof(policy_status)
|
||||
pPolicyStatus = pointer(policy_status)
|
||||
CertVerifyCertificateChainPolicy(
|
||||
CERT_CHAIN_POLICY_SSL,
|
||||
pChainContext,
|
||||
pPolicyPara,
|
||||
pPolicyStatus,
|
||||
)
|
||||
|
||||
# Check status
|
||||
error_code = policy_status.dwError
|
||||
if error_code:
|
||||
# Try getting a human readable message for an error code.
|
||||
error_message_buf = create_unicode_buffer(1024)
|
||||
error_message_chars = FormatMessageW(
|
||||
FORMAT_MESSAGE_FROM_SYSTEM | FORMAT_MESSAGE_IGNORE_INSERTS,
|
||||
None,
|
||||
error_code,
|
||||
0,
|
||||
error_message_buf,
|
||||
sizeof(error_message_buf),
|
||||
None,
|
||||
)
|
||||
|
||||
# See if we received a message for the error,
|
||||
# otherwise we use a generic error with the
|
||||
# error code and hope that it's search-able.
|
||||
if error_message_chars <= 0:
|
||||
error_message = f"Certificate chain policy error {error_code:#x} [{policy_status.lElementIndex}]"
|
||||
else:
|
||||
error_message = error_message_buf.value.strip()
|
||||
|
||||
err = ssl.SSLCertVerificationError(error_message)
|
||||
err.verify_message = error_message
|
||||
err.verify_code = error_code
|
||||
raise err from None
|
||||
finally:
|
||||
if ppChainContext:
|
||||
CertFreeCertificateChain(ppChainContext.contents)
|
||||
|
||||
|
||||
def _verify_using_custom_ca_certs(
|
||||
ssl_context: ssl.SSLContext,
|
||||
custom_ca_certs: list[bytes],
|
||||
hIntermediateCertStore: HCERTSTORE,
|
||||
pPeerCertContext: c_void_p,
|
||||
pChainPara: PCERT_CHAIN_PARA, # type: ignore[valid-type]
|
||||
server_hostname: str | None,
|
||||
chain_flags: int,
|
||||
) -> None:
|
||||
hChainEngine = None
|
||||
hRootCertStore = CertOpenStore(CERT_STORE_PROV_MEMORY, 0, None, 0, None)
|
||||
try:
|
||||
# Add custom CA certs to an in-memory cert store
|
||||
for cert_bytes in custom_ca_certs:
|
||||
CertAddEncodedCertificateToStore(
|
||||
hRootCertStore,
|
||||
X509_ASN_ENCODING | PKCS_7_ASN_ENCODING,
|
||||
cert_bytes,
|
||||
len(cert_bytes),
|
||||
CERT_STORE_ADD_USE_EXISTING,
|
||||
None,
|
||||
)
|
||||
|
||||
# Create a custom cert chain engine which exclusively trusts
|
||||
# certs from our hRootCertStore
|
||||
cert_chain_engine_config = CERT_CHAIN_ENGINE_CONFIG()
|
||||
cert_chain_engine_config.cbSize = sizeof(cert_chain_engine_config)
|
||||
cert_chain_engine_config.hExclusiveRoot = hRootCertStore
|
||||
pConfig = pointer(cert_chain_engine_config)
|
||||
phChainEngine = pointer(HCERTCHAINENGINE())
|
||||
CertCreateCertificateChainEngine(
|
||||
pConfig,
|
||||
phChainEngine,
|
||||
)
|
||||
hChainEngine = phChainEngine.contents
|
||||
|
||||
# Get and verify a cert chain using the custom chain engine
|
||||
_get_and_verify_cert_chain(
|
||||
ssl_context,
|
||||
hChainEngine,
|
||||
hIntermediateCertStore,
|
||||
pPeerCertContext,
|
||||
pChainPara,
|
||||
server_hostname,
|
||||
chain_flags,
|
||||
)
|
||||
finally:
|
||||
if hChainEngine:
|
||||
CertFreeCertificateChainEngine(hChainEngine)
|
||||
CertCloseStore(hRootCertStore, 0)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _configure_context(ctx: ssl.SSLContext) -> typing.Iterator[None]:
|
||||
check_hostname = ctx.check_hostname
|
||||
verify_mode = ctx.verify_mode
|
||||
ctx.check_hostname = False
|
||||
_set_ssl_context_verify_mode(ctx, ssl.CERT_NONE)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
ctx.check_hostname = check_hostname
|
||||
_set_ssl_context_verify_mode(ctx, verify_mode)
|
|
@ -1,2 +1,2 @@
|
|||
# This file is protected via CODEOWNERS
|
||||
__version__ = "1.26.16"
|
||||
__version__ = "1.26.17"
|
||||
|
|
|
@ -1,6 +1,9 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
import sys
|
||||
|
||||
from .filepost import encode_multipart_formdata
|
||||
from .packages import six
|
||||
from .packages.six.moves.urllib.parse import urlencode
|
||||
|
||||
__all__ = ["RequestMethods"]
|
||||
|
@ -168,3 +171,21 @@ class RequestMethods(object):
|
|||
extra_kw.update(urlopen_kw)
|
||||
|
||||
return self.urlopen(method, url, **extra_kw)
|
||||
|
||||
|
||||
if not six.PY2:
|
||||
|
||||
class RequestModule(sys.modules[__name__].__class__):
|
||||
def __call__(self, *args, **kwargs):
|
||||
"""
|
||||
If user tries to call this module directly urllib3 v2.x style raise an error to the user
|
||||
suggesting they may need urllib3 v2
|
||||
"""
|
||||
raise TypeError(
|
||||
"'module' object is not callable\n"
|
||||
"urllib3.request() method is not supported in this release, "
|
||||
"upgrade to urllib3 v2 to use it\n"
|
||||
"see https://urllib3.readthedocs.io/en/stable/v2-migration-guide.html"
|
||||
)
|
||||
|
||||
sys.modules[__name__].__class__ = RequestModule
|
||||
|
|
|
@ -235,7 +235,7 @@ class Retry(object):
|
|||
RETRY_AFTER_STATUS_CODES = frozenset([413, 429, 503])
|
||||
|
||||
#: Default headers to be used for ``remove_headers_on_redirect``
|
||||
DEFAULT_REMOVE_HEADERS_ON_REDIRECT = frozenset(["Authorization"])
|
||||
DEFAULT_REMOVE_HEADERS_ON_REDIRECT = frozenset(["Cookie", "Authorization"])
|
||||
|
||||
#: Maximum backoff time.
|
||||
DEFAULT_BACKOFF_MAX = 120
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
CacheControl==0.12.11 # Make sure to update the license in pyproject.toml for this.
|
||||
CacheControl==0.13.1 # Make sure to update the license in pyproject.toml for this.
|
||||
colorama==0.4.6
|
||||
distlib==0.3.6
|
||||
distro==1.8.0
|
||||
|
@ -11,7 +11,7 @@ requests==2.31.0
|
|||
certifi==2023.7.22
|
||||
chardet==5.1.0
|
||||
idna==3.4
|
||||
urllib3==1.26.16
|
||||
urllib3==1.26.17
|
||||
rich==13.4.2
|
||||
pygments==2.15.1
|
||||
typing_extensions==4.7.1
|
||||
|
@ -20,4 +20,5 @@ setuptools==68.0.0
|
|||
six==1.16.0
|
||||
tenacity==8.2.2
|
||||
tomli==2.0.1
|
||||
truststore==0.8.0
|
||||
webencodings==0.5.1
|
||||
|
|
|
@ -1,25 +1,24 @@
|
|||
import compileall
|
||||
import contextlib
|
||||
import fnmatch
|
||||
import http.server
|
||||
import io
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import threading
|
||||
from contextlib import ExitStack, contextmanager
|
||||
from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
from hashlib import sha256
|
||||
from pathlib import Path
|
||||
from textwrap import dedent
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
AnyStr,
|
||||
Callable,
|
||||
ClassVar,
|
||||
ContextManager,
|
||||
Dict,
|
||||
Iterable,
|
||||
Iterator,
|
||||
|
@ -27,7 +26,6 @@ from typing import (
|
|||
Optional,
|
||||
Set,
|
||||
Tuple,
|
||||
Union,
|
||||
)
|
||||
from unittest.mock import patch
|
||||
from zipfile import ZipFile
|
||||
|
@ -46,25 +44,20 @@ from installer.destinations import SchemeDictionaryDestination
|
|||
from installer.sources import WheelFile
|
||||
|
||||
from pip import __file__ as pip_location
|
||||
from pip._internal.cli.main import main as pip_entry_point
|
||||
from pip._internal.locations import _USE_SYSCONFIG
|
||||
from pip._internal.utils.temp_dir import global_tempdir_manager
|
||||
from tests.lib import DATA_DIR, SRC_DIR, PipTestEnvironment, TestData
|
||||
from tests.lib.server import MockServer as _MockServer
|
||||
from tests.lib.server import make_mock_server, server_running
|
||||
from tests.lib import (
|
||||
DATA_DIR,
|
||||
SRC_DIR,
|
||||
CertFactory,
|
||||
InMemoryPip,
|
||||
PipTestEnvironment,
|
||||
ScriptFactory,
|
||||
TestData,
|
||||
)
|
||||
from tests.lib.server import MockServer, make_mock_server
|
||||
from tests.lib.venv import VirtualEnvironment, VirtualEnvironmentType
|
||||
|
||||
from .lib.compat import nullcontext
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Protocol
|
||||
|
||||
from _typeshed.wsgi import WSGIApplication
|
||||
else:
|
||||
# TODO: Protocol was introduced in Python 3.8. Remove this branch when
|
||||
# dropping support for Python 3.7.
|
||||
Protocol = object
|
||||
|
||||
|
||||
def pytest_addoption(parser: Parser) -> None:
|
||||
parser.addoption(
|
||||
|
@ -76,8 +69,8 @@ def pytest_addoption(parser: Parser) -> None:
|
|||
parser.addoption(
|
||||
"--resolver",
|
||||
action="store",
|
||||
default="2020-resolver",
|
||||
choices=["2020-resolver", "legacy"],
|
||||
default="resolvelib",
|
||||
choices=["resolvelib", "legacy"],
|
||||
help="use given resolver in tests",
|
||||
)
|
||||
parser.addoption(
|
||||
|
@ -335,7 +328,7 @@ def scoped_global_tempdir_manager(request: pytest.FixtureRequest) -> Iterator[No
|
|||
temporary directories in the application.
|
||||
"""
|
||||
if "no_auto_tempdir_manager" in request.keywords:
|
||||
ctx = nullcontext
|
||||
ctx: Callable[[], ContextManager[None]] = contextlib.nullcontext
|
||||
else:
|
||||
ctx = global_tempdir_manager
|
||||
|
||||
|
@ -512,16 +505,6 @@ def virtualenv(
|
|||
yield virtualenv_factory(tmpdir.joinpath("workspace", "venv"))
|
||||
|
||||
|
||||
class ScriptFactory(Protocol):
|
||||
def __call__(
|
||||
self,
|
||||
tmpdir: Path,
|
||||
virtualenv: Optional[VirtualEnvironment] = None,
|
||||
environ: Optional[Dict[AnyStr, AnyStr]] = None,
|
||||
) -> PipTestEnvironment:
|
||||
...
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def script_factory(
|
||||
virtualenv_factory: Callable[[Path], VirtualEnvironment],
|
||||
|
@ -641,31 +624,6 @@ def data(tmpdir: Path) -> TestData:
|
|||
return TestData.copy(tmpdir.joinpath("data"))
|
||||
|
||||
|
||||
class InMemoryPipResult:
|
||||
def __init__(self, returncode: int, stdout: str) -> None:
|
||||
self.returncode = returncode
|
||||
self.stdout = stdout
|
||||
|
||||
|
||||
class InMemoryPip:
|
||||
def pip(self, *args: Union[str, Path]) -> InMemoryPipResult:
|
||||
orig_stdout = sys.stdout
|
||||
stdout = io.StringIO()
|
||||
sys.stdout = stdout
|
||||
try:
|
||||
returncode = pip_entry_point([os.fspath(a) for a in args])
|
||||
except SystemExit as e:
|
||||
if isinstance(e.code, int):
|
||||
returncode = e.code
|
||||
elif e.code:
|
||||
returncode = 1
|
||||
else:
|
||||
returncode = 0
|
||||
finally:
|
||||
sys.stdout = orig_stdout
|
||||
return InMemoryPipResult(returncode, stdout.getvalue())
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def in_memory_pip() -> InMemoryPip:
|
||||
return InMemoryPip()
|
||||
|
@ -677,9 +635,6 @@ def deprecated_python() -> bool:
|
|||
return sys.version_info[:2] in []
|
||||
|
||||
|
||||
CertFactory = Callable[[], str]
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def cert_factory(tmpdir_factory: pytest.TempPathFactory) -> CertFactory:
|
||||
# Delay the import requiring cryptography in order to make it possible
|
||||
|
@ -701,49 +656,6 @@ def cert_factory(tmpdir_factory: pytest.TempPathFactory) -> CertFactory:
|
|||
return factory
|
||||
|
||||
|
||||
class MockServer:
|
||||
def __init__(self, server: _MockServer) -> None:
|
||||
self._server = server
|
||||
self._running = False
|
||||
self.context = ExitStack()
|
||||
|
||||
@property
|
||||
def port(self) -> int:
|
||||
return self._server.port
|
||||
|
||||
@property
|
||||
def host(self) -> str:
|
||||
return self._server.host
|
||||
|
||||
def set_responses(self, responses: Iterable["WSGIApplication"]) -> None:
|
||||
assert not self._running, "responses cannot be set on running server"
|
||||
self._server.mock.side_effect = responses
|
||||
|
||||
def start(self) -> None:
|
||||
assert not self._running, "running server cannot be started"
|
||||
self.context.enter_context(server_running(self._server))
|
||||
self.context.enter_context(self._set_running())
|
||||
|
||||
@contextmanager
|
||||
def _set_running(self) -> Iterator[None]:
|
||||
self._running = True
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
self._running = False
|
||||
|
||||
def stop(self) -> None:
|
||||
assert self._running, "idle server cannot be stopped"
|
||||
self.context.close()
|
||||
|
||||
def get_requests(self) -> List[Dict[str, str]]:
|
||||
"""Get environ for each received request."""
|
||||
assert not self._running, "cannot get mock from running server"
|
||||
# Legacy: replace call[0][0] with call.args[0]
|
||||
# when pip drops support for python3.7
|
||||
return [call[0][0] for call in self._server.mock.call_args_list]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_server() -> Iterator[MockServer]:
|
||||
server = make_mock_server()
|
||||
|
@ -1031,7 +943,10 @@ def html_index_with_onetime_server(
|
|||
class InDirectoryServer(http.server.ThreadingHTTPServer):
|
||||
def finish_request(self, request: Any, client_address: Any) -> None:
|
||||
self.RequestHandlerClass(
|
||||
request, client_address, self, directory=str(html_index_for_packages) # type: ignore[call-arg] # noqa: E501
|
||||
request,
|
||||
client_address,
|
||||
self,
|
||||
directory=str(html_index_for_packages), # type: ignore[call-arg]
|
||||
)
|
||||
|
||||
class Handler(OneTimeDownloadHandler):
|
||||
|
|
|
@ -20,7 +20,7 @@ def cache_dir(script: PipTestEnvironment) -> str:
|
|||
|
||||
@pytest.fixture
|
||||
def http_cache_dir(cache_dir: str) -> str:
|
||||
return os.path.normcase(os.path.join(cache_dir, "http"))
|
||||
return os.path.normcase(os.path.join(cache_dir, "http-v2"))
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
|
@ -203,7 +203,10 @@ def test_cache_info(
|
|||
) -> None:
|
||||
result = script.pip("cache", "info")
|
||||
|
||||
assert f"Package index page cache location: {http_cache_dir}" in result.stdout
|
||||
assert (
|
||||
f"Package index page cache location (pip v23.3+): {http_cache_dir}"
|
||||
in result.stdout
|
||||
)
|
||||
assert f"Locally built wheels location: {wheel_cache_dir}" in result.stdout
|
||||
num_wheels = len(wheel_cache_files)
|
||||
assert f"Number of locally built wheels: {num_wheels}" in result.stdout
|
||||
|
|
|
@ -5,8 +5,7 @@ from typing import TYPE_CHECKING, Tuple, Union
|
|||
|
||||
import pytest
|
||||
|
||||
from tests.conftest import ScriptFactory
|
||||
from tests.lib import PipTestEnvironment, TestData, TestPipResult
|
||||
from tests.lib import PipTestEnvironment, ScriptFactory, TestData, TestPipResult
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Protocol
|
||||
|
@ -45,9 +44,18 @@ complete -fa "(__fish_complete_pip)" -c pip""",
|
|||
"zsh",
|
||||
"""\
|
||||
#compdef -P pip[0-9.]#
|
||||
__pip() {
|
||||
compadd $( COMP_WORDS="$words[*]" \\
|
||||
COMP_CWORD=$((CURRENT-1)) \\
|
||||
PIP_AUTO_COMPLETE=1 $words[1] 2>/dev/null )""",
|
||||
PIP_AUTO_COMPLETE=1 $words[1] 2>/dev/null )
|
||||
}
|
||||
if [[ $zsh_eval_context[-1] == loadautofunc ]]; then
|
||||
# autoload from fpath, call function directly
|
||||
__pip "$@"
|
||||
else
|
||||
# eval/source/. command, register function for later
|
||||
compdef __pip -P 'pip[0-9.]#'
|
||||
fi""",
|
||||
),
|
||||
(
|
||||
"powershell",
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue