Compare commits

...

45 Commits

Author SHA1 Message Date
Darshan 4af9cb6b54
Merge 2f299454bf into 2a0acb595c 2023-11-23 15:27:32 +00:00
Darshan 2f299454bf
Update __init__.py 2023-11-23 20:57:23 +05:30
Darshan a69e7c2573
Merge branch 'main' into main 2023-11-23 20:04:57 +05:30
Damian Shaw 2a0acb595c
Update and provide fixes for mypy pre-commit (#12389)
* Update mypy to 1.6.1

* Fix mypy "Source file found twice under different module names" error

* Ignore type of intialized abstract class in tests

* Use more specific type ignore method-assign

* Type ignore for message.get_all

* Remove unused type ignore

* Add SizedBuffer type for xmlrpc.client.Transport subclass

* Add Self type for RequestHandlerClass in test

* Add type ignore for shutil.rmtree onexc handler

* Quote SizedBuffer

* Add news entry

* Remove no longer correct comment

* Update self import

* Also ignore type onerror=handler

* Update news entry

* Update news entry
2023-11-07 09:39:01 +00:00
Damian Shaw 68529081c2
Enforce f-strings via Ruff (#12393) 2023-11-07 09:14:56 +00:00
Damian Shaw 9685f64fe8
Update ruff and config (#12390) 2023-11-06 09:30:05 +00:00
Dale fd77ebfc74
Rework the functionality of PIP_CONFIG_FILE (#11850) 2023-10-27 14:59:56 +02:00
efflamlemaillet 6dbd9c68f0
Fix hg: "parse error at 0: not a prefix:" (#12373)
Use two hypen argument `--rev=` instead of `-r=`

Co-authored-by: Efflam Lemaillet <elemaillet@logilab.fr>
Co-authored-by: Pradyun Gedam <pradyunsg@gmail.com>
2023-10-27 11:08:17 +02:00
Stéphane Bidoul 7aaca9f2c4
Merge pull request #12370 from sbidoul/release/23.3.1
Release/23.3.1
2023-10-21 13:05:39 +02:00
Stéphane Bidoul 576dbd813c Bump for development 2023-10-21 12:57:41 +02:00
Stéphane Bidoul 5364f26f96 Bump for release 2023-10-21 12:57:31 +02:00
Itamar Turner-Trauring 5e7cc16c3b
Fix parallel pip cache downloads causing crash (#12364)
Co-authored-by: Itamar Turner-Trauring <itamar@pythonspeed.com>
2023-10-18 23:14:22 +01:00
Stéphane Bidoul 8a0f77c171
Merge pull request #12355 from sbidoul/build-using-build
Build using `build`
2023-10-18 10:05:23 +02:00
Paul Moore f3620cdb5b
Merge pull request #12363 from pfmoore/safe_isoformat
Handle ISO formats with a trailing Z
2023-10-17 12:29:43 +01:00
Paul Moore fb06d12d5a Handle ISO formats with a trailing Z 2023-10-17 11:07:21 +01:00
Stéphane Bidoul 9f213bf69a
Merge pull request #12356 from sbidoul/clarify-changelog
Clarify changelog
2023-10-15 19:24:55 +02:00
Stéphane Bidoul a982c7bc35 Add a few PEP links in the changelog 2023-10-15 19:19:36 +02:00
Stéphane Bidoul e1e227d7d6 Clarify changelog 2023-10-15 18:48:53 +02:00
Stéphane Bidoul 9b0abc8c40 Build using `build`
Update the build-release nox session to build using `build`
instead of a direct setup.py call.
2023-10-15 18:44:34 +02:00
Stéphane Bidoul 9d4be7802f
Merge pull request #12353 from sbidoul/release/23.3
Release/23.3
2023-10-15 18:16:30 +02:00
Stéphane Bidoul 8ffe890dc5
Merge pull request #12354 from sbidoul/imp-release-docs
Minor docs improvements
2023-10-15 11:25:55 +02:00
Stéphane Bidoul c0cce3ca60 Bump for development 2023-10-15 10:23:09 +02:00
Stéphane Bidoul e3dc91dad9 Bump for release 2023-10-15 10:23:02 +02:00
Stéphane Bidoul 3e85558b10 Update AUTHORS.txt 2023-10-15 10:23:01 +02:00
Stéphane Bidoul 8d0278771c Reclassify news fragment
This is not for the process category, and
probably not significant enough for a feature news entry.
2023-10-15 10:22:52 +02:00
Stéphane Bidoul bf9a9cbdae Mention 'skip news' label in docs 2023-10-15 10:20:24 +02:00
Stéphane Bidoul 8ff33edfc5 Don't mention setuptools in release process docs 2023-10-15 10:08:24 +02:00
Stéphane Bidoul f6ecf406c3
Merge pull request #12350 from sbidoul/readact-collecting-url
Redact URLs in Collecting... logs
2023-10-15 10:02:04 +02:00
Stéphane Bidoul 306086513b
Merge pull request #12335 from edmorley/patch-1
Correct issue number for NEWS entry added by #12197
2023-10-14 16:17:29 +02:00
Stéphane Bidoul 8f0ed32413 Redact URLs in Collecting... logs 2023-10-14 14:02:55 +02:00
Ed Morley d1659b87e4 Correct issue number for NEWS entry added by #12197
The NEWS entry added in PR #12197 referenced issue #12191,
however, the issue it actually fixed was #11847.
2023-10-14 10:13:03 +00:00
Paul Moore 2333ef3b53
Upgrade urllib3 to 1.26.17 (#12343) 2023-10-12 13:12:06 +02:00
Damian Shaw 496b268c1b
Update "Running Tests" documentation (#12334)
Co-authored-by: Paul Moore <p.f.moore@gmail.com>
Co-authored-by: Pradyun Gedam <pradyunsg@gmail.com>
2023-10-11 17:36:40 +02:00
Darshan 08c4664ba1
Merge branch 'main' into main 2023-04-30 20:02:01 +05:30
Darshan 3e72a0b173 Revert "testing linters 2"
This reverts commit 81eb6c6d13.
2023-04-26 20:52:20 +05:30
Darshan a208a51603 Revert "testing linters"
This reverts commit 5d9bbcd5fd.
2023-04-26 20:52:16 +05:30
Darshan 81eb6c6d13 testing linters 2 2023-04-26 20:51:40 +05:30
Darshan 5d9bbcd5fd testing linters
by reverting changes
2023-04-25 19:08:19 +05:30
Darshan 21d6327afb
Merge branch 'pypa:main' into main 2023-04-25 18:25:12 +05:30
Darshan bec1d0644f
Merge branch 'main' into main 2022-11-11 10:51:28 +05:30
darshanip 2c9f8f04c4 attempt commit with ERROR 2022-11-11 10:47:23 +05:30
Tzu-ping Chung f927891b36 Merge branch 'main' into darshanip/main 2022-11-10 17:42:21 +08:00
darshanip b0a5b037ff updated code 2022-11-03 18:46:31 +05:30
pre-commit-ci[bot] 0dc566f06b [pre-commit.ci] auto fixes from pre-commit.com hooks
for more information, see https://pre-commit.ci
2022-10-16 14:43:46 +00:00
darshanpatidar1 61efec2e19 Initial commit 2022-10-16 19:55:11 +05:30
131 changed files with 492 additions and 449 deletions

View File

@ -22,25 +22,26 @@ repos:
- id: black - id: black
- repo: https://github.com/astral-sh/ruff-pre-commit - repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.0.292 rev: v0.1.4
hooks: hooks:
- id: ruff - id: ruff
args: [--fix, --exit-non-zero-on-fix]
- repo: https://github.com/pre-commit/mirrors-mypy - repo: https://github.com/pre-commit/mirrors-mypy
rev: v0.961 rev: v1.6.1
hooks: hooks:
- id: mypy - id: mypy
exclude: tests/data exclude: tests/data
args: ["--pretty", "--show-error-codes"] args: ["--pretty", "--show-error-codes"]
additional_dependencies: [ additional_dependencies: [
'keyring==23.0.1', 'keyring==24.2.0',
'nox==2021.6.12', 'nox==2023.4.22',
'pytest', 'pytest',
'types-docutils==0.18.3', 'types-docutils==0.20.0.3',
'types-setuptools==57.4.14', 'types-setuptools==68.2.0.0',
'types-freezegun==1.1.9', 'types-freezegun==1.1.10',
'types-six==1.16.15', 'types-six==1.16.21.9',
'types-pyyaml==6.0.12.2', 'types-pyyaml==6.0.12.12',
] ]
- repo: https://github.com/pre-commit/pygrep-hooks - repo: https://github.com/pre-commit/pygrep-hooks

View File

@ -20,6 +20,7 @@ Albert-Guan
albertg albertg
Alberto Sottile Alberto Sottile
Aleks Bunin Aleks Bunin
Ales Erjavec
Alethea Flowers Alethea Flowers
Alex Gaynor Alex Gaynor
Alex Grönholm Alex Grönholm
@ -30,6 +31,7 @@ Alex Stachowiak
Alexander Shtyrov Alexander Shtyrov
Alexandre Conrad Alexandre Conrad
Alexey Popravka Alexey Popravka
Aleš Erjavec
Alli Alli
Ami Fischman Ami Fischman
Ananya Maiti Ananya Maiti
@ -196,9 +198,11 @@ David Runge
David Tucker David Tucker
David Wales David Wales
Davidovich Davidovich
ddelange
Deepak Sharma Deepak Sharma
Deepyaman Datta Deepyaman Datta
Denise Yu Denise Yu
dependabot[bot]
derwolfe derwolfe
Desetude Desetude
Devesh Kumar Singh Devesh Kumar Singh
@ -312,6 +316,7 @@ Ilya Baryshev
Inada Naoki Inada Naoki
Ionel Cristian Mărieș Ionel Cristian Mărieș
Ionel Maries Cristian Ionel Maries Cristian
Itamar Turner-Trauring
Ivan Pozdeev Ivan Pozdeev
Jacob Kim Jacob Kim
Jacob Walls Jacob Walls
@ -338,6 +343,7 @@ Jay Graves
Jean-Christophe Fillion-Robin Jean-Christophe Fillion-Robin
Jeff Barber Jeff Barber
Jeff Dairiki Jeff Dairiki
Jeff Widman
Jelmer Vernooij Jelmer Vernooij
jenix21 jenix21
Jeremy Stanley Jeremy Stanley
@ -367,6 +373,7 @@ Joseph Long
Josh Bronson Josh Bronson
Josh Hansen Josh Hansen
Josh Schneier Josh Schneier
Joshua
Juan Luis Cano Rodríguez Juan Luis Cano Rodríguez
Juanjo Bazán Juanjo Bazán
Judah Rand Judah Rand
@ -397,6 +404,7 @@ KOLANICH
kpinc kpinc
Krishna Oza Krishna Oza
Kumar McMillan Kumar McMillan
Kurt McKee
Kyle Persohn Kyle Persohn
lakshmanaram lakshmanaram
Laszlo Kiss-Kollar Laszlo Kiss-Kollar
@ -413,6 +421,7 @@ lorddavidiii
Loren Carvalho Loren Carvalho
Lucas Cimon Lucas Cimon
Ludovic Gasc Ludovic Gasc
Lukas Geiger
Lukas Juhrich Lukas Juhrich
Luke Macken Luke Macken
Luo Jiebin Luo Jiebin
@ -529,6 +538,7 @@ Patrick Jenkins
Patrick Lawson Patrick Lawson
patricktokeeffe patricktokeeffe
Patrik Kopkan Patrik Kopkan
Paul Ganssle
Paul Kehrer Paul Kehrer
Paul Moore Paul Moore
Paul Nasrat Paul Nasrat
@ -609,6 +619,7 @@ ryneeverett
Sachi King Sachi King
Salvatore Rinchiera Salvatore Rinchiera
sandeepkiran-js sandeepkiran-js
Sander Van Balen
Savio Jomton Savio Jomton
schlamar schlamar
Scott Kitterman Scott Kitterman
@ -621,6 +632,7 @@ SeongSoo Cho
Sergey Vasilyev Sergey Vasilyev
Seth Michael Larson Seth Michael Larson
Seth Woodworth Seth Woodworth
Shahar Epstein
Shantanu Shantanu
shireenrao shireenrao
Shivansh-007 Shivansh-007
@ -648,6 +660,7 @@ Steve Kowalik
Steven Myint Steven Myint
Steven Silvester Steven Silvester
stonebig stonebig
studioj
Stéphane Bidoul Stéphane Bidoul
Stéphane Bidoul (ACSONE) Stéphane Bidoul (ACSONE)
Stéphane Klein Stéphane Klein

View File

@ -9,13 +9,80 @@
.. towncrier release notes start .. towncrier release notes start
23.3.1 (2023-10-21)
===================
Bug Fixes
---------
- Handle a timezone indicator of Z when parsing dates in the self check. (`#12338 <https://github.com/pypa/pip/issues/12338>`_)
- Fix bug where installing the same package at the same time with multiple pip processes could fail. (`#12361 <https://github.com/pypa/pip/issues/12361>`_)
23.3 (2023-10-15)
=================
Process
-------
- Added reference to `vulnerability reporting guidelines <https://www.python.org/dev/security/>`_ to pip's security policy.
Deprecations and Removals
-------------------------
- Drop a fallback to using SecureTransport on macOS. It was useful when pip detected OpenSSL older than 1.0.1, but the current pip does not support any Python version supporting such old OpenSSL versions. (`#12175 <https://github.com/pypa/pip/issues/12175>`_)
Features
--------
- Improve extras resolution for multiple constraints on same base package. (`#11924 <https://github.com/pypa/pip/issues/11924>`_)
- Improve use of datastructures to make candidate selection 1.6x faster. (`#12204 <https://github.com/pypa/pip/issues/12204>`_)
- Allow ``pip install --dry-run`` to use platform and ABI overriding options. (`#12215 <https://github.com/pypa/pip/issues/12215>`_)
- Add ``is_yanked`` boolean entry to the installation report (``--report``) to indicate whether the requirement was yanked from the index, but was still selected by pip conform to :pep:`592`. (`#12224 <https://github.com/pypa/pip/issues/12224>`_)
Bug Fixes
---------
- Ignore errors in temporary directory cleanup (show a warning instead). (`#11394 <https://github.com/pypa/pip/issues/11394>`_)
- Normalize extras according to :pep:`685` from package metadata in the resolver
for comparison. This ensures extras are correctly compared and merged as long
as the package providing the extra(s) is built with values normalized according
to the standard. Note, however, that this *does not* solve cases where the
package itself contains unnormalized extra values in the metadata. (`#11649 <https://github.com/pypa/pip/issues/11649>`_)
- Prevent downloading sdists twice when :pep:`658` metadata is present. (`#11847 <https://github.com/pypa/pip/issues/11847>`_)
- Include all requested extras in the install report (``--report``). (`#11924 <https://github.com/pypa/pip/issues/11924>`_)
- Removed uses of ``datetime.datetime.utcnow`` from non-vendored code. (`#12005 <https://github.com/pypa/pip/issues/12005>`_)
- Consistently report whether a dependency comes from an extra. (`#12095 <https://github.com/pypa/pip/issues/12095>`_)
- Fix completion script for zsh (`#12166 <https://github.com/pypa/pip/issues/12166>`_)
- Fix improper handling of the new onexc argument of ``shutil.rmtree()`` in Python 3.12. (`#12187 <https://github.com/pypa/pip/issues/12187>`_)
- Filter out yanked links from the available versions error message: "(from versions: 1.0, 2.0, 3.0)" will not contain yanked versions conform PEP 592. The yanked versions (if any) will be mentioned in a separate error message. (`#12225 <https://github.com/pypa/pip/issues/12225>`_)
- Fix crash when the git version number contains something else than digits and dots. (`#12280 <https://github.com/pypa/pip/issues/12280>`_)
- Use ``-r=...`` instead of ``-r ...`` to specify references with Mercurial. (`#12306 <https://github.com/pypa/pip/issues/12306>`_)
- Redact password from URLs in some additional places. (`#12350 <https://github.com/pypa/pip/issues/12350>`_)
- pip uses less memory when caching large packages. As a result, there is a new on-disk cache format stored in a new directory ($PIP_CACHE_DIR/http-v2). (`#2984 <https://github.com/pypa/pip/issues/2984>`_)
Vendored Libraries
------------------
- Upgrade certifi to 2023.7.22
- Add truststore 0.8.0
- Upgrade urllib3 to 1.26.17
Improved Documentation
----------------------
- Document that ``pip search`` support has been removed from PyPI (`#12059 <https://github.com/pypa/pip/issues/12059>`_)
- Clarify --prefer-binary in CLI and docs (`#12122 <https://github.com/pypa/pip/issues/12122>`_)
- Document that using OS-provided Python can cause pip's test suite to report false failures. (`#12334 <https://github.com/pypa/pip/issues/12334>`_)
23.2.1 (2023-07-22) 23.2.1 (2023-07-22)
=================== ===================
Bug Fixes Bug Fixes
--------- ---------
- Disable PEP 658 metadata fetching with the legacy resolver. (`#12156 <https://github.com/pypa/pip/issues/12156>`_) - Disable :pep:`658` metadata fetching with the legacy resolver. (`#12156 <https://github.com/pypa/pip/issues/12156>`_)
23.2 (2023-07-15) 23.2 (2023-07-15)
@ -45,11 +112,11 @@ Bug Fixes
--------- ---------
- Fix ``pip completion --zsh``. (`#11417 <https://github.com/pypa/pip/issues/11417>`_) - Fix ``pip completion --zsh``. (`#11417 <https://github.com/pypa/pip/issues/11417>`_)
- Prevent downloading files twice when PEP 658 metadata is present (`#11847 <https://github.com/pypa/pip/issues/11847>`_) - Prevent downloading files twice when :pep:`658` metadata is present (`#11847 <https://github.com/pypa/pip/issues/11847>`_)
- Add permission check before configuration (`#11920 <https://github.com/pypa/pip/issues/11920>`_) - Add permission check before configuration (`#11920 <https://github.com/pypa/pip/issues/11920>`_)
- Fix deprecation warnings in Python 3.12 for usage of shutil.rmtree (`#11957 <https://github.com/pypa/pip/issues/11957>`_) - Fix deprecation warnings in Python 3.12 for usage of shutil.rmtree (`#11957 <https://github.com/pypa/pip/issues/11957>`_)
- Ignore invalid or unreadable ``origin.json`` files in the cache of locally built wheels. (`#11985 <https://github.com/pypa/pip/issues/11985>`_) - Ignore invalid or unreadable ``origin.json`` files in the cache of locally built wheels. (`#11985 <https://github.com/pypa/pip/issues/11985>`_)
- Fix installation of packages with PEP658 metadata using non-canonicalized names (`#12038 <https://github.com/pypa/pip/issues/12038>`_) - Fix installation of packages with :pep:`658` metadata using non-canonicalized names (`#12038 <https://github.com/pypa/pip/issues/12038>`_)
- Correctly parse ``dist-info-metadata`` values from JSON-format index data. (`#12042 <https://github.com/pypa/pip/issues/12042>`_) - Correctly parse ``dist-info-metadata`` values from JSON-format index data. (`#12042 <https://github.com/pypa/pip/issues/12042>`_)
- Fail with an error if the ``--python`` option is specified after the subcommand name. (`#12067 <https://github.com/pypa/pip/issues/12067>`_) - Fail with an error if the ``--python`` option is specified after the subcommand name. (`#12067 <https://github.com/pypa/pip/issues/12067>`_)
- Fix slowness when using ``importlib.metadata`` (the default way for pip to read metadata in Python 3.11+) and there is a large overlap between already installed and to-be-installed packages. (`#12079 <https://github.com/pypa/pip/issues/12079>`_) - Fix slowness when using ``importlib.metadata`` (the default way for pip to read metadata in Python 3.11+) and there is a large overlap between already installed and to-be-installed packages. (`#12079 <https://github.com/pypa/pip/issues/12079>`_)
@ -220,7 +287,7 @@ Features
- Change the hashes in the installation report to be a mapping. Emit the - Change the hashes in the installation report to be a mapping. Emit the
``archive_info.hashes`` dictionary in ``direct_url.json``. (`#11312 <https://github.com/pypa/pip/issues/11312>`_) ``archive_info.hashes`` dictionary in ``direct_url.json``. (`#11312 <https://github.com/pypa/pip/issues/11312>`_)
- Implement logic to read the ``EXTERNALLY-MANAGED`` file as specified in PEP 668. - Implement logic to read the ``EXTERNALLY-MANAGED`` file as specified in :pep:`668`.
This allows a downstream Python distributor to prevent users from using pip to This allows a downstream Python distributor to prevent users from using pip to
modify the externally managed environment. (`#11381 <https://github.com/pypa/pip/issues/11381>`_) modify the externally managed environment. (`#11381 <https://github.com/pypa/pip/issues/11381>`_)
- Enable the use of ``keyring`` found on ``PATH``. This allows ``keyring`` - Enable the use of ``keyring`` found on ``PATH``. This allows ``keyring``
@ -236,7 +303,7 @@ Bug Fixes
- Use the "venv" scheme if available to obtain prefixed lib paths. (`#11598 <https://github.com/pypa/pip/issues/11598>`_) - Use the "venv" scheme if available to obtain prefixed lib paths. (`#11598 <https://github.com/pypa/pip/issues/11598>`_)
- Deprecated a historical ambiguity in how ``egg`` fragments in URL-style - Deprecated a historical ambiguity in how ``egg`` fragments in URL-style
requirements are formatted and handled. ``egg`` fragments that do not look requirements are formatted and handled. ``egg`` fragments that do not look
like PEP 508 names now produce a deprecation warning. (`#11617 <https://github.com/pypa/pip/issues/11617>`_) like :pep:`508` names now produce a deprecation warning. (`#11617 <https://github.com/pypa/pip/issues/11617>`_)
- Fix scripts path in isolated build environment on Debian. (`#11623 <https://github.com/pypa/pip/issues/11623>`_) - Fix scripts path in isolated build environment on Debian. (`#11623 <https://github.com/pypa/pip/issues/11623>`_)
- Make ``pip show`` show the editable location if package is editable (`#11638 <https://github.com/pypa/pip/issues/11638>`_) - Make ``pip show`` show the editable location if package is editable (`#11638 <https://github.com/pypa/pip/issues/11638>`_)
- Stop checking that ``wheel`` is present when ``build-system.requires`` - Stop checking that ``wheel`` is present when ``build-system.requires``

View File

@ -112,7 +112,7 @@ the ``news/`` directory with the extension of ``.trivial.rst``. If you are on a
POSIX like operating system, one can be added by running POSIX like operating system, one can be added by running
``touch news/$(uuidgen).trivial.rst``. On Windows, the same result can be ``touch news/$(uuidgen).trivial.rst``. On Windows, the same result can be
achieved in Powershell using ``New-Item "news/$([guid]::NewGuid()).trivial.rst"``. achieved in Powershell using ``New-Item "news/$([guid]::NewGuid()).trivial.rst"``.
Core committers may also add a "trivial" label to the PR which will accomplish Core committers may also add a "skip news" label to the PR which will accomplish
the same thing. the same thing.
Upgrading, removing, or adding a new vendored library gets a special mention Upgrading, removing, or adding a new vendored library gets a special mention

View File

@ -73,7 +73,7 @@ pip's tests are written using the :pypi:`pytest` test framework and
:mod:`unittest.mock`. :pypi:`nox` is used to automate the setup and execution :mod:`unittest.mock`. :pypi:`nox` is used to automate the setup and execution
of pip's tests. of pip's tests.
It is preferable to run the tests in parallel for better experience during development, It is preferable to run the tests in parallel for a better experience during development,
since the tests can take a long time to finish when run sequentially. since the tests can take a long time to finish when run sequentially.
To run tests: To run tests:
@ -104,6 +104,15 @@ can select tests using the various ways that pytest provides:
$ # Using keywords $ # Using keywords
$ nox -s test-3.10 -- -k "install and not wheel" $ nox -s test-3.10 -- -k "install and not wheel"
.. note::
When running pip's tests with OS distribution Python versions, be aware that some
functional tests may fail due to potential patches introduced by the distribution.
For all tests to pass consider:
- Installing Python from `python.org`_ or compile from source
- Or, using `pyenv`_ to assist with source compilation
Running pip's entire test suite requires supported version control tools Running pip's entire test suite requires supported version control tools
(subversion, bazaar, git, and mercurial) to be installed. If you are missing (subversion, bazaar, git, and mercurial) to be installed. If you are missing
any of these VCS, those tests should be skipped automatically. You can also any of these VCS, those tests should be skipped automatically. You can also
@ -114,6 +123,9 @@ explicitly tell pytest to skip those tests:
$ nox -s test-3.10 -- -k "not svn" $ nox -s test-3.10 -- -k "not svn"
$ nox -s test-3.10 -- -k "not (svn or git)" $ nox -s test-3.10 -- -k "not (svn or git)"
.. _python.org: https://www.python.org/downloads/
.. _pyenv: https://github.com/pyenv/pyenv
Running Linters Running Linters
=============== ===============

View File

@ -145,8 +145,8 @@ Creating a new release
#. Push the tag created by ``prepare-release``. #. Push the tag created by ``prepare-release``.
#. Regenerate the ``get-pip.py`` script in the `get-pip repository`_ (as #. Regenerate the ``get-pip.py`` script in the `get-pip repository`_ (as
documented there) and commit the results. documented there) and commit the results.
#. Submit a Pull Request to `CPython`_ adding the new version of pip (and upgrading #. Submit a Pull Request to `CPython`_ adding the new version of pip
setuptools) to ``Lib/ensurepip/_bundled``, removing the existing version, and to ``Lib/ensurepip/_bundled``, removing the existing version, and
adjusting the versions listed in ``Lib/ensurepip/__init__.py``. adjusting the versions listed in ``Lib/ensurepip/__init__.py``.

View File

@ -19,8 +19,8 @@ and how they are related to pip's various command line options.
## Configuration Files ## Configuration Files
Configuration files can change the default values for command line option. Configuration files can change the default values for command line options.
They are written using a standard INI style configuration files. The files are written using standard INI format.
pip has 3 "levels" of configuration files: pip has 3 "levels" of configuration files:
@ -28,11 +28,15 @@ pip has 3 "levels" of configuration files:
- `user`: per-user configuration file. - `user`: per-user configuration file.
- `site`: per-environment configuration file; i.e. per-virtualenv. - `site`: per-environment configuration file; i.e. per-virtualenv.
Additionally, environment variables can be specified which will override any of the above.
### Location ### Location
pip's configuration files are located in fairly standard locations. This pip's configuration files are located in fairly standard locations. This
location is different on different operating systems, and has some additional location is different on different operating systems, and has some additional
complexity for backwards compatibility reasons. complexity for backwards compatibility reasons. Note that if user config files
exist in both the legacy and current locations, values in the current file
will override values in the legacy file.
```{tab} Unix ```{tab} Unix
@ -88,9 +92,10 @@ Site
### `PIP_CONFIG_FILE` ### `PIP_CONFIG_FILE`
Additionally, the environment variable `PIP_CONFIG_FILE` can be used to specify Additionally, the environment variable `PIP_CONFIG_FILE` can be used to specify
a configuration file that's loaded first, and whose values are overridden by a configuration file that's loaded last, and whose values override the values
the values set in the aforementioned files. Setting this to {any}`os.devnull` set in the aforementioned files. Setting this to {any}`os.devnull`
disables the loading of _all_ configuration files. disables the loading of _all_ configuration files. Note that if a file exists
at the location that this is set to, the user config file will not be loaded.
(config-precedence)= (config-precedence)=
@ -99,10 +104,10 @@ disables the loading of _all_ configuration files.
When multiple configuration files are found, pip combines them in the following When multiple configuration files are found, pip combines them in the following
order: order:
- `PIP_CONFIG_FILE`, if given.
- Global - Global
- User - User
- Site - Site
- `PIP_CONFIG_FILE`, if given.
Each file read overrides any values read from previous files, so if the Each file read overrides any values read from previous files, so if the
global timeout is specified in both the global file and the per-user file global timeout is specified in both the global file and the per-user file

View File

@ -194,22 +194,17 @@ class PipReqFileOptionsReference(PipOptions):
opt = option() opt = option()
opt_name = opt._long_opts[0] opt_name = opt._long_opts[0]
if opt._short_opts: if opt._short_opts:
short_opt_name = "{}, ".format(opt._short_opts[0]) short_opt_name = f"{opt._short_opts[0]}, "
else: else:
short_opt_name = "" short_opt_name = ""
if option in cmdoptions.general_group["options"]: if option in cmdoptions.general_group["options"]:
prefix = "" prefix = ""
else: else:
prefix = "{}_".format(self.determine_opt_prefix(opt_name)) prefix = f"{self.determine_opt_prefix(opt_name)}_"
self.view_list.append( self.view_list.append(
"* :ref:`{short}{long}<{prefix}{opt_name}>`".format( f"* :ref:`{short_opt_name}{opt_name}<{prefix}{opt_name}>`",
short=short_opt_name,
long=opt_name,
prefix=prefix,
opt_name=opt_name,
),
"\n", "\n",
) )

View File

@ -1 +0,0 @@
Ignore errors in temporary directory cleanup (show a warning instead).

View File

@ -1,5 +0,0 @@
Normalize extras according to :pep:`685` from package metadata in the resolver
for comparison. This ensures extras are correctly compared and merged as long
as the package providing the extra(s) is built with values normalized according
to the standard. Note, however, that this *does not* solve cases where the
package itself contains unnormalized extra values in the metadata.

1
news/11815.doc.rst Normal file
View File

@ -0,0 +1 @@
Fix explanation of how PIP_CONFIG_FILE works

View File

@ -1 +0,0 @@
Include all requested extras in the install report (``--report``).

View File

@ -1 +0,0 @@
Improve extras resolution for multiple constraints on same base package.

View File

@ -1 +0,0 @@
Removed uses of ``datetime.datetime.utcnow`` from non-vendored code.

View File

@ -1 +0,0 @@
Document that ``pip search`` support has been removed from PyPI

View File

@ -1 +0,0 @@
Consistently report whether a dependency comes from an extra.

View File

@ -1 +0,0 @@
Clarify --prefer-binary in CLI and docs

View File

@ -1,6 +0,0 @@
The metadata-fetching log message is moved to the VERBOSE level and now hidden
by default. The more significant information in this message to most users are
already available in surrounding logs (the package name and version of the
metadata being fetched), while the URL to the exact metadata file is generally
too long and clutters the output. The message can be brought back with
``--verbose``.

View File

@ -1 +0,0 @@
Fix completion script for zsh

View File

@ -1 +0,0 @@
Drop a fallback to using SecureTransport on macOS. It was useful when pip detected OpenSSL older than 1.0.1, but the current pip does not support any Python version supporting such old OpenSSL versions.

View File

@ -1 +0,0 @@
Add test cases for some behaviors of ``install --dry-run`` and ``--use-feature=fast-deps``.

View File

@ -1 +0,0 @@
Fix improper handling of the new onexc argument of ``shutil.rmtree()`` in Python 3.12.

View File

@ -1 +0,0 @@
Prevent downloading sdists twice when PEP 658 metadata is present.

View File

@ -1 +0,0 @@
Add lots of comments to the ``BuildTracker``.

View File

@ -1 +0,0 @@
Improve use of datastructures to make candidate selection 1.6x faster

View File

@ -1 +0,0 @@
Allow ``pip install --dry-run`` to use platform and ABI overriding options similar to ``--target``.

View File

@ -1 +0,0 @@
Add ``is_yanked`` boolean entry to the installation report (``--report``) to indicate whether the requirement was yanked from the index, but was still selected by pip conform to PEP 592.

View File

@ -1 +0,0 @@
Filter out yanked links from the available versions error message: "(from versions: 1.0, 2.0, 3.0)" will not contain yanked versions conform PEP 592. The yanked versions (if any) will be mentioned in a separate error message.

View File

@ -1 +0,0 @@
Added reference to `vulnerability reporting guidelines <https://www.python.org/dev/security/>`_ to pip's security policy.

View File

View File

@ -1 +0,0 @@
Fix crash when the git version number contains something else than digits and dots.

View File

@ -1 +0,0 @@
Use ``-r=...`` instead of ``-r ...`` to specify references with Mercurial.

1
news/12389.bugfix.rst Normal file
View File

@ -0,0 +1 @@
Update mypy to 1.6.1 and fix/ignore types

1
news/12390.trivial.rst Normal file
View File

@ -0,0 +1 @@
Update ruff versions and config for dev

1
news/12393.trivial.rst Normal file
View File

@ -0,0 +1 @@
Enforce and update code to use f-strings via Ruff rule UP032

View File

@ -1 +0,0 @@
pip uses less memory when caching large packages. As a result, there is a new on-disk cache format stored in a new directory ($PIP_CACHE_DIR/http-v2).

View File

@ -0,0 +1 @@
Fix mercurial revision "parse error": use ``--rev={ref}`` instead of ``-r={ref}``

View File

@ -1 +0,0 @@
Add ruff rules ASYNC,C4,C90,PERF,PLE,PLR for minor optimizations and to set upper limits on code complexity.

1
news/5073.bugfix.rst Normal file
View File

@ -0,0 +1 @@
Return error when trying to uninstall package that exist but not uninstallable.

View File

@ -1 +0,0 @@
Upgrade certifi to 2023.7.22

View File

@ -1 +0,0 @@
Add truststore 0.8.0

View File

@ -322,7 +322,7 @@ def build_release(session: nox.Session) -> None:
) )
session.log("# Install dependencies") session.log("# Install dependencies")
session.install("setuptools", "wheel", "twine") session.install("build", "twine")
with release.isolated_temporary_checkout(session, version) as build_dir: with release.isolated_temporary_checkout(session, version) as build_dir:
session.log( session.log(
@ -358,8 +358,7 @@ def build_dists(session: nox.Session) -> List[str]:
) )
session.log("# Build distributions") session.log("# Build distributions")
session.install("setuptools", "wheel") session.run("python", "-m", "build", silent=True)
session.run("python", "setup.py", "sdist", "bdist_wheel", silent=True)
produced_dists = glob.glob("dist/*") produced_dists = glob.glob("dist/*")
session.log(f"# Verify distributions: {', '.join(produced_dists)}") session.log(f"# Verify distributions: {', '.join(produced_dists)}")

View File

@ -84,8 +84,8 @@ ignore = [
"B020", "B020",
"B904", # Ruff enables opinionated warnings by default "B904", # Ruff enables opinionated warnings by default
"B905", # Ruff enables opinionated warnings by default "B905", # Ruff enables opinionated warnings by default
"G202",
] ]
target-version = "py37"
line-length = 88 line-length = 88
select = [ select = [
"ASYNC", "ASYNC",
@ -102,6 +102,7 @@ select = [
"PLR0", "PLR0",
"W", "W",
"RUF100", "RUF100",
"UP032",
] ]
[tool.ruff.isort] [tool.ruff.isort]

View File

@ -77,7 +77,7 @@ setup(
entry_points={ entry_points={
"console_scripts": [ "console_scripts": [
"pip=pip._internal.cli.main:main", "pip=pip._internal.cli.main:main",
"pip{}=pip._internal.cli.main:main".format(sys.version_info[0]), f"pip{sys.version_info[0]}=pip._internal.cli.main:main",
"pip{}.{}=pip._internal.cli.main:main".format(*sys.version_info[:2]), "pip{}.{}=pip._internal.cli.main:main".format(*sys.version_info[:2]),
], ],
}, },

View File

@ -1,6 +1,6 @@
from typing import List, Optional from typing import List, Optional
__version__ = "23.3.dev0" __version__ = "24.0.dev0"
def main(args: Optional[List[str]] = None) -> int: def main(args: Optional[List[str]] = None) -> int:

View File

@ -582,10 +582,7 @@ def _handle_python_version(
""" """
version_info, error_msg = _convert_python_version(value) version_info, error_msg = _convert_python_version(value)
if error_msg is not None: if error_msg is not None:
msg = "invalid --python-version value: {!r}: {}".format( msg = f"invalid --python-version value: {value!r}: {error_msg}"
value,
error_msg,
)
raise_option_error(parser, option=option, msg=msg) raise_option_error(parser, option=option, msg=msg)
parser.values.python_version = version_info parser.values.python_version = version_info
@ -921,9 +918,9 @@ def _handle_merge_hash(
algo, digest = value.split(":", 1) algo, digest = value.split(":", 1)
except ValueError: except ValueError:
parser.error( parser.error(
"Arguments to {} must be a hash name " f"Arguments to {opt_str} must be a hash name "
"followed by a value, like --hash=sha256:" "followed by a value, like --hash=sha256:"
"abcde...".format(opt_str) "abcde..."
) )
if algo not in STRONG_HASHES: if algo not in STRONG_HASHES:
parser.error( parser.error(

View File

@ -229,9 +229,9 @@ class ConfigOptionParser(CustomOptionParser):
val = strtobool(val) val = strtobool(val)
except ValueError: except ValueError:
self.error( self.error(
"{} is not a valid value for {} option, " f"{val} is not a valid value for {key} option, "
"please specify a boolean value like yes/no, " "please specify a boolean value like yes/no, "
"true/false or 1/0 instead.".format(val, key) "true/false or 1/0 instead."
) )
elif option.action == "count": elif option.action == "count":
with suppress(ValueError): with suppress(ValueError):
@ -240,10 +240,10 @@ class ConfigOptionParser(CustomOptionParser):
val = int(val) val = int(val)
if not isinstance(val, int) or val < 0: if not isinstance(val, int) or val < 0:
self.error( self.error(
"{} is not a valid value for {} option, " f"{val} is not a valid value for {key} option, "
"please instead specify either a non-negative integer " "please instead specify either a non-negative integer "
"or a boolean value like yes/no or false/true " "or a boolean value like yes/no or false/true "
"which is equivalent to 1/0.".format(val, key) "which is equivalent to 1/0."
) )
elif option.action == "append": elif option.action == "append":
val = val.split() val = val.split()

View File

@ -175,7 +175,7 @@ class CacheCommand(Command):
files += self._find_http_files(options) files += self._find_http_files(options)
else: else:
# Add the pattern to the log message # Add the pattern to the log message
no_matching_msg += ' for pattern "{}"'.format(args[0]) no_matching_msg += f' for pattern "{args[0]}"'
if not files: if not files:
logger.warning(no_matching_msg) logger.warning(no_matching_msg)

View File

@ -242,17 +242,15 @@ class ConfigurationCommand(Command):
e.filename = editor e.filename = editor
raise raise
except subprocess.CalledProcessError as e: except subprocess.CalledProcessError as e:
raise PipError( raise PipError(f"Editor Subprocess exited with exit code {e.returncode}")
"Editor Subprocess exited with exit code {}".format(e.returncode)
)
def _get_n_args(self, args: List[str], example: str, n: int) -> Any: def _get_n_args(self, args: List[str], example: str, n: int) -> Any:
"""Helper to make sure the command got the right number of arguments""" """Helper to make sure the command got the right number of arguments"""
if len(args) != n: if len(args) != n:
msg = ( msg = (
"Got unexpected number of arguments, expected {}. " f"Got unexpected number of arguments, expected {n}. "
'(example: "{} config {}")' f'(example: "{get_prog()} config {example}")'
).format(n, get_prog(), example) )
raise PipError(msg) raise PipError(msg)
if n == 1: if n == 1:

View File

@ -95,7 +95,7 @@ def show_actual_vendor_versions(vendor_txt_versions: Dict[str, str]) -> None:
elif parse_version(actual_version) != parse_version(expected_version): elif parse_version(actual_version) != parse_version(expected_version):
extra_message = ( extra_message = (
" (CONFLICT: vendor.txt suggests version should" " (CONFLICT: vendor.txt suggests version should"
" be {})".format(expected_version) f" be {expected_version})"
) )
logger.info("%s==%s%s", module_name, actual_version, extra_message) logger.info("%s==%s%s", module_name, actual_version, extra_message)
@ -120,7 +120,7 @@ def show_tags(options: Values) -> None:
if formatted_target: if formatted_target:
suffix = f" (target: {formatted_target})" suffix = f" (target: {formatted_target})"
msg = "Compatible tags: {}{}".format(len(tags), suffix) msg = f"Compatible tags: {len(tags)}{suffix}"
logger.info(msg) logger.info(msg)
if options.verbose < 1 and len(tags) > tag_limit: if options.verbose < 1 and len(tags) > tag_limit:
@ -134,9 +134,7 @@ def show_tags(options: Values) -> None:
logger.info(str(tag)) logger.info(str(tag))
if tags_limited: if tags_limited:
msg = ( msg = f"...\n[First {tag_limit} tags shown. Pass --verbose to show all.]"
"...\n[First {tag_limit} tags shown. Pass --verbose to show all.]"
).format(tag_limit=tag_limit)
logger.info(msg) logger.info(msg)

View File

@ -128,12 +128,12 @@ class IndexCommand(IndexGroupCommand):
if not versions: if not versions:
raise DistributionNotFound( raise DistributionNotFound(
"No matching distribution found for {}".format(query) f"No matching distribution found for {query}"
) )
formatted_versions = [str(ver) for ver in sorted(versions, reverse=True)] formatted_versions = [str(ver) for ver in sorted(versions, reverse=True)]
latest = formatted_versions[0] latest = formatted_versions[0]
write_output("{} ({})".format(query, latest)) write_output(f"{query} ({latest})")
write_output("Available versions: {}".format(", ".join(formatted_versions))) write_output("Available versions: {}".format(", ".join(formatted_versions)))
print_dist_installation_info(query, latest) print_dist_installation_info(query, latest)

View File

@ -607,12 +607,8 @@ class InstallCommand(RequirementCommand):
version = package_set[project_name][0] version = package_set[project_name][0]
for dependency in missing[project_name]: for dependency in missing[project_name]:
message = ( message = (
"{name} {version} requires {requirement}, " f"{project_name} {version} requires {dependency[1]}, "
"which is not installed." "which is not installed."
).format(
name=project_name,
version=version,
requirement=dependency[1],
) )
parts.append(message) parts.append(message)

View File

@ -7,7 +7,7 @@ from pip._vendor.packaging.utils import canonicalize_name
from pip._internal.cli import cmdoptions from pip._internal.cli import cmdoptions
from pip._internal.cli.base_command import Command from pip._internal.cli.base_command import Command
from pip._internal.cli.req_command import SessionCommandMixin, warn_if_run_as_root from pip._internal.cli.req_command import SessionCommandMixin, warn_if_run_as_root
from pip._internal.cli.status_codes import SUCCESS from pip._internal.cli.status_codes import ERROR, SUCCESS
from pip._internal.exceptions import InstallationError from pip._internal.exceptions import InstallationError
from pip._internal.req import parse_requirements from pip._internal.req import parse_requirements
from pip._internal.req.constructors import ( from pip._internal.req.constructors import (
@ -108,6 +108,8 @@ class UninstallCommand(Command, SessionCommandMixin):
) )
if uninstall_pathset: if uninstall_pathset:
uninstall_pathset.commit() uninstall_pathset.commit()
if req.files_skipped:
return ERROR
if options.root_user_action == "warn": if options.root_user_action == "warn":
warn_if_run_as_root() warn_if_run_as_root()
return SUCCESS return SUCCESS

View File

@ -59,8 +59,8 @@ def _disassemble_key(name: str) -> List[str]:
if "." not in name: if "." not in name:
error_message = ( error_message = (
"Key does not contain dot separated section and key. " "Key does not contain dot separated section and key. "
"Perhaps you wanted to use 'global.{}' instead?" f"Perhaps you wanted to use 'global.{name}' instead?"
).format(name) )
raise ConfigurationError(error_message) raise ConfigurationError(error_message)
return name.split(".", 1) return name.split(".", 1)
@ -327,33 +327,35 @@ class Configuration:
def iter_config_files(self) -> Iterable[Tuple[Kind, List[str]]]: def iter_config_files(self) -> Iterable[Tuple[Kind, List[str]]]:
"""Yields variant and configuration files associated with it. """Yields variant and configuration files associated with it.
This should be treated like items of a dictionary. This should be treated like items of a dictionary. The order
here doesn't affect what gets overridden. That is controlled
by OVERRIDE_ORDER. However this does control the order they are
displayed to the user. It's probably most ergononmic to display
things in the same order as OVERRIDE_ORDER
""" """
# SMELL: Move the conditions out of this function # SMELL: Move the conditions out of this function
# environment variables have the lowest priority env_config_file = os.environ.get("PIP_CONFIG_FILE", None)
config_file = os.environ.get("PIP_CONFIG_FILE", None)
if config_file is not None:
yield kinds.ENV, [config_file]
else:
yield kinds.ENV, []
config_files = get_configuration_files() config_files = get_configuration_files()
# at the base we have any global configuration
yield kinds.GLOBAL, config_files[kinds.GLOBAL] yield kinds.GLOBAL, config_files[kinds.GLOBAL]
# per-user configuration next # per-user config is not loaded when env_config_file exists
should_load_user_config = not self.isolated and not ( should_load_user_config = not self.isolated and not (
config_file and os.path.exists(config_file) env_config_file and os.path.exists(env_config_file)
) )
if should_load_user_config: if should_load_user_config:
# The legacy config file is overridden by the new config file # The legacy config file is overridden by the new config file
yield kinds.USER, config_files[kinds.USER] yield kinds.USER, config_files[kinds.USER]
# finally virtualenv configuration first trumping others # virtualenv config
yield kinds.SITE, config_files[kinds.SITE] yield kinds.SITE, config_files[kinds.SITE]
if env_config_file is not None:
yield kinds.ENV, [env_config_file]
else:
yield kinds.ENV, []
def get_values_in_config(self, variant: Kind) -> Dict[str, Any]: def get_values_in_config(self, variant: Kind) -> Dict[str, Any]:
"""Get values present in a config file""" """Get values present in a config file"""
return self._config[variant] return self._config[variant]

View File

@ -247,10 +247,7 @@ class NoneMetadataError(PipError):
def __str__(self) -> str: def __str__(self) -> str:
# Use `dist` in the error message because its stringification # Use `dist` in the error message because its stringification
# includes more information, like the version and location. # includes more information, like the version and location.
return "None {} metadata found for distribution: {}".format( return f"None {self.metadata_name} metadata found for distribution: {self.dist}"
self.metadata_name,
self.dist,
)
class UserInstallationInvalid(InstallationError): class UserInstallationInvalid(InstallationError):
@ -594,7 +591,7 @@ class HashMismatch(HashError):
self.gots = gots self.gots = gots
def body(self) -> str: def body(self) -> str:
return " {}:\n{}".format(self._requirement_name(), self._hash_comparison()) return f" {self._requirement_name()}:\n{self._hash_comparison()}"
def _hash_comparison(self) -> str: def _hash_comparison(self) -> str:
""" """
@ -616,11 +613,9 @@ class HashMismatch(HashError):
lines: List[str] = [] lines: List[str] = []
for hash_name, expecteds in self.allowed.items(): for hash_name, expecteds in self.allowed.items():
prefix = hash_then_or(hash_name) prefix = hash_then_or(hash_name)
lines.extend( lines.extend((f" Expected {next(prefix)} {e}") for e in expecteds)
(" Expected {} {}".format(next(prefix), e)) for e in expecteds
)
lines.append( lines.append(
" Got {}\n".format(self.gots[hash_name].hexdigest()) f" Got {self.gots[hash_name].hexdigest()}\n"
) )
return "\n".join(lines) return "\n".join(lines)

View File

@ -533,8 +533,8 @@ class CandidateEvaluator:
) )
except ValueError: except ValueError:
raise UnsupportedWheel( raise UnsupportedWheel(
"{} is not a supported wheel for this platform. It " f"{wheel.filename} is not a supported wheel for this platform. It "
"can't be sorted.".format(wheel.filename) "can't be sorted."
) )
if self._prefer_binary: if self._prefer_binary:
binary_preference = 1 binary_preference = 1
@ -939,9 +939,7 @@ class PackageFinder:
_format_versions(best_candidate_result.iter_all()), _format_versions(best_candidate_result.iter_all()),
) )
raise DistributionNotFound( raise DistributionNotFound(f"No matching distribution found for {req}")
"No matching distribution found for {}".format(req)
)
def _should_install_candidate( def _should_install_candidate(
candidate: Optional[InstallationCandidate], candidate: Optional[InstallationCandidate],

View File

@ -56,8 +56,7 @@ def distutils_scheme(
try: try:
d.parse_config_files() d.parse_config_files()
except UnicodeDecodeError: except UnicodeDecodeError:
# Typeshed does not include find_config_files() for some reason. paths = d.find_config_files()
paths = d.find_config_files() # type: ignore
logger.warning( logger.warning(
"Ignore distutils configs in %s due to encoding errors.", "Ignore distutils configs in %s due to encoding errors.",
", ".join(os.path.basename(p) for p in paths), ", ".join(os.path.basename(p) for p in paths),

View File

@ -64,10 +64,10 @@ def msg_to_json(msg: Message) -> Dict[str, Any]:
key = json_name(field) key = json_name(field)
if multi: if multi:
value: Union[str, List[str]] = [ value: Union[str, List[str]] = [
sanitise_header(v) for v in msg.get_all(field) sanitise_header(v) for v in msg.get_all(field) # type: ignore
] ]
else: else:
value = sanitise_header(msg.get(field)) value = sanitise_header(msg.get(field)) # type: ignore
if key == "keywords": if key == "keywords":
# Accept both comma-separated and space-separated # Accept both comma-separated and space-separated
# forms, for better compatibility with old data. # forms, for better compatibility with old data.

View File

@ -27,8 +27,4 @@ class InstallationCandidate(KeyBasedCompareMixin):
) )
def __str__(self) -> str: def __str__(self) -> str:
return "{!r} candidate (version {} at {})".format( return f"{self.name!r} candidate (version {self.version} at {self.link})"
self.name,
self.version,
self.link,
)

View File

@ -31,9 +31,7 @@ def _get(
value = d[key] value = d[key]
if not isinstance(value, expected_type): if not isinstance(value, expected_type):
raise DirectUrlValidationError( raise DirectUrlValidationError(
"{!r} has unexpected type for {} (expected {})".format( f"{value!r} has unexpected type for {key} (expected {expected_type})"
value, key, expected_type
)
) )
return value return value

View File

@ -33,9 +33,7 @@ class FormatControl:
return all(getattr(self, k) == getattr(other, k) for k in self.__slots__) return all(getattr(self, k) == getattr(other, k) for k in self.__slots__)
def __repr__(self) -> str: def __repr__(self) -> str:
return "{}({}, {})".format( return f"{self.__class__.__name__}({self.no_binary}, {self.only_binary})"
self.__class__.__name__, self.no_binary, self.only_binary
)
@staticmethod @staticmethod
def handle_mutual_excludes(value: str, target: Set[str], other: Set[str]) -> None: def handle_mutual_excludes(value: str, target: Set[str], other: Set[str]) -> None:

View File

@ -368,9 +368,7 @@ class Link(KeyBasedCompareMixin):
else: else:
rp = "" rp = ""
if self.comes_from: if self.comes_from:
return "{} (from {}){}".format( return f"{redact_auth_from_url(self._url)} (from {self.comes_from}){rp}"
redact_auth_from_url(self._url), self.comes_from, rp
)
else: else:
return redact_auth_from_url(str(self._url)) return redact_auth_from_url(str(self._url))

View File

@ -33,6 +33,18 @@ class SafeFileCache(SeparateBodyBaseCache):
""" """
A file based cache which is safe to use even when the target directory may A file based cache which is safe to use even when the target directory may
not be accessible or writable. not be accessible or writable.
There is a race condition when two processes try to write and/or read the
same entry at the same time, since each entry consists of two separate
files (https://github.com/psf/cachecontrol/issues/324). We therefore have
additional logic that makes sure that both files to be present before
returning an entry; this fixes the read side of the race condition.
For the write side, we assume that the server will only ever return the
same data for the same URL, which ought to be the case for files pip is
downloading. PyPI does not have a mechanism to swap out a wheel for
another wheel, for example. If this assumption is not true, the
CacheControl issue will need to be fixed.
""" """
def __init__(self, directory: str) -> None: def __init__(self, directory: str) -> None:
@ -49,9 +61,13 @@ class SafeFileCache(SeparateBodyBaseCache):
return os.path.join(self.directory, *parts) return os.path.join(self.directory, *parts)
def get(self, key: str) -> Optional[bytes]: def get(self, key: str) -> Optional[bytes]:
path = self._get_cache_path(key) # The cache entry is only valid if both metadata and body exist.
metadata_path = self._get_cache_path(key)
body_path = metadata_path + ".body"
if not (os.path.exists(metadata_path) and os.path.exists(body_path)):
return None
with suppressed_cache_errors(): with suppressed_cache_errors():
with open(path, "rb") as f: with open(metadata_path, "rb") as f:
return f.read() return f.read()
def _write(self, path: str, data: bytes) -> None: def _write(self, path: str, data: bytes) -> None:
@ -77,9 +93,13 @@ class SafeFileCache(SeparateBodyBaseCache):
os.remove(path + ".body") os.remove(path + ".body")
def get_body(self, key: str) -> Optional[BinaryIO]: def get_body(self, key: str) -> Optional[BinaryIO]:
path = self._get_cache_path(key) + ".body" # The cache entry is only valid if both metadata and body exist.
metadata_path = self._get_cache_path(key)
body_path = metadata_path + ".body"
if not (os.path.exists(metadata_path) and os.path.exists(body_path)):
return None
with suppressed_cache_errors(): with suppressed_cache_errors():
return open(path, "rb") return open(body_path, "rb")
def set_body(self, key: str, body: bytes) -> None: def set_body(self, key: str, body: bytes) -> None:
path = self._get_cache_path(key) + ".body" path = self._get_cache_path(key) + ".body"

View File

@ -42,7 +42,7 @@ def _prepare_download(
logged_url = redact_auth_from_url(url) logged_url = redact_auth_from_url(url)
if total_length: if total_length:
logged_url = "{} ({})".format(logged_url, format_size(total_length)) logged_url = f"{logged_url} ({format_size(total_length)})"
if is_from_cache(resp): if is_from_cache(resp):
logger.info("Using cached %s", logged_url) logger.info("Using cached %s", logged_url)

View File

@ -13,6 +13,8 @@ from pip._internal.network.utils import raise_for_status
if TYPE_CHECKING: if TYPE_CHECKING:
from xmlrpc.client import _HostType, _Marshallable from xmlrpc.client import _HostType, _Marshallable
from _typeshed import SizedBuffer
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -33,7 +35,7 @@ class PipXmlrpcTransport(xmlrpc.client.Transport):
self, self,
host: "_HostType", host: "_HostType",
handler: str, handler: str,
request_body: bytes, request_body: "SizedBuffer",
verbose: bool = False, verbose: bool = False,
) -> Tuple["_Marshallable", ...]: ) -> Tuple["_Marshallable", ...]:
assert isinstance(host, str) assert isinstance(host, str)

View File

@ -164,16 +164,14 @@ def message_about_scripts_not_on_PATH(scripts: Sequence[str]) -> Optional[str]:
for parent_dir, dir_scripts in warn_for.items(): for parent_dir, dir_scripts in warn_for.items():
sorted_scripts: List[str] = sorted(dir_scripts) sorted_scripts: List[str] = sorted(dir_scripts)
if len(sorted_scripts) == 1: if len(sorted_scripts) == 1:
start_text = "script {} is".format(sorted_scripts[0]) start_text = f"script {sorted_scripts[0]} is"
else: else:
start_text = "scripts {} are".format( start_text = "scripts {} are".format(
", ".join(sorted_scripts[:-1]) + " and " + sorted_scripts[-1] ", ".join(sorted_scripts[:-1]) + " and " + sorted_scripts[-1]
) )
msg_lines.append( msg_lines.append(
"The {} installed in '{}' which is not on PATH.".format( f"The {start_text} installed in '{parent_dir}' which is not on PATH."
start_text, parent_dir
)
) )
last_line_fmt = ( last_line_fmt = (
@ -321,9 +319,7 @@ def get_console_script_specs(console: Dict[str, str]) -> List[str]:
scripts_to_generate.append("pip = " + pip_script) scripts_to_generate.append("pip = " + pip_script)
if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall": if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall":
scripts_to_generate.append( scripts_to_generate.append(f"pip{sys.version_info[0]} = {pip_script}")
"pip{} = {}".format(sys.version_info[0], pip_script)
)
scripts_to_generate.append(f"pip{get_major_minor_version()} = {pip_script}") scripts_to_generate.append(f"pip{get_major_minor_version()} = {pip_script}")
# Delete any other versioned pip entry points # Delete any other versioned pip entry points
@ -336,9 +332,7 @@ def get_console_script_specs(console: Dict[str, str]) -> List[str]:
scripts_to_generate.append("easy_install = " + easy_install_script) scripts_to_generate.append("easy_install = " + easy_install_script)
scripts_to_generate.append( scripts_to_generate.append(
"easy_install-{} = {}".format( f"easy_install-{get_major_minor_version()} = {easy_install_script}"
get_major_minor_version(), easy_install_script
)
) )
# Delete any other versioned easy_install entry points # Delete any other versioned easy_install entry points
easy_install_ep = [ easy_install_ep = [
@ -408,10 +402,10 @@ class ScriptFile:
class MissingCallableSuffix(InstallationError): class MissingCallableSuffix(InstallationError):
def __init__(self, entry_point: str) -> None: def __init__(self, entry_point: str) -> None:
super().__init__( super().__init__(
"Invalid script entry point: {} - A callable " f"Invalid script entry point: {entry_point} - A callable "
"suffix is required. Cf https://packaging.python.org/" "suffix is required. Cf https://packaging.python.org/"
"specifications/entry-points/#use-for-scripts for more " "specifications/entry-points/#use-for-scripts for more "
"information.".format(entry_point) "information."
) )
@ -712,7 +706,7 @@ def req_error_context(req_description: str) -> Generator[None, None, None]:
try: try:
yield yield
except InstallationError as e: except InstallationError as e:
message = "For req: {}. {}".format(req_description, e.args[0]) message = f"For req: {req_description}. {e.args[0]}"
raise InstallationError(message) from e raise InstallationError(message) from e

View File

@ -47,6 +47,7 @@ from pip._internal.utils.misc import (
display_path, display_path,
hash_file, hash_file,
hide_url, hide_url,
redact_auth_from_requirement,
) )
from pip._internal.utils.temp_dir import TempDirectory from pip._internal.utils.temp_dir import TempDirectory
from pip._internal.utils.unpacking import unpack_file from pip._internal.utils.unpacking import unpack_file
@ -277,7 +278,7 @@ class RequirementPreparer:
information = str(display_path(req.link.file_path)) information = str(display_path(req.link.file_path))
else: else:
message = "Collecting %s" message = "Collecting %s"
information = str(req.req or req) information = redact_auth_from_requirement(req.req) if req.req else str(req)
# If we used req.req, inject requirement source if available (this # If we used req.req, inject requirement source if available (this
# would already be included if we used req directly) # would already be included if we used req directly)
@ -602,8 +603,8 @@ class RequirementPreparer:
) )
except NetworkConnectionError as exc: except NetworkConnectionError as exc:
raise InstallationError( raise InstallationError(
"Could not install requirement {} because of HTTP " f"Could not install requirement {req} because of HTTP "
"error {} for URL {}".format(req, exc, link) f"error {exc} for URL {link}"
) )
else: else:
file_path = self._downloaded[link.url] file_path = self._downloaded[link.url]
@ -683,9 +684,9 @@ class RequirementPreparer:
with indent_log(): with indent_log():
if self.require_hashes: if self.require_hashes:
raise InstallationError( raise InstallationError(
"The editable requirement {} cannot be installed when " f"The editable requirement {req} cannot be installed when "
"requiring hashes, because there is no single file to " "requiring hashes, because there is no single file to "
"hash.".format(req) "hash."
) )
req.ensure_has_source_dir(self.src_dir) req.ensure_has_source_dir(self.src_dir)
req.update_editable() req.update_editable()
@ -713,7 +714,7 @@ class RequirementPreparer:
assert req.satisfied_by, "req should have been satisfied but isn't" assert req.satisfied_by, "req should have been satisfied but isn't"
assert skip_reason is not None, ( assert skip_reason is not None, (
"did not get skip reason skipped but req.satisfied_by " "did not get skip reason skipped but req.satisfied_by "
"is set to {}".format(req.satisfied_by) f"is set to {req.satisfied_by}"
) )
logger.info( logger.info(
"Requirement %s: %s (%s)", skip_reason, req, req.satisfied_by.version "Requirement %s: %s (%s)", skip_reason, req, req.satisfied_by.version

View File

@ -462,7 +462,7 @@ def install_req_from_req_string(
raise InstallationError( raise InstallationError(
"Packages installed from PyPI cannot depend on packages " "Packages installed from PyPI cannot depend on packages "
"which are not also hosted on PyPI.\n" "which are not also hosted on PyPI.\n"
"{} depends on {} ".format(comes_from.name, req) f"{comes_from.name} depends on {req} "
) )
return InstallRequirement( return InstallRequirement(

View File

@ -49,6 +49,7 @@ from pip._internal.utils.misc import (
display_path, display_path,
hide_url, hide_url,
is_installable_dir, is_installable_dir,
redact_auth_from_requirement,
redact_auth_from_url, redact_auth_from_url,
) )
from pip._internal.utils.packaging import safe_extra from pip._internal.utils.packaging import safe_extra
@ -188,9 +189,9 @@ class InstallRequirement:
def __str__(self) -> str: def __str__(self) -> str:
if self.req: if self.req:
s = str(self.req) s = redact_auth_from_requirement(self.req)
if self.link: if self.link:
s += " from {}".format(redact_auth_from_url(self.link.url)) s += f" from {redact_auth_from_url(self.link.url)}"
elif self.link: elif self.link:
s = redact_auth_from_url(self.link.url) s = redact_auth_from_url(self.link.url)
else: else:
@ -220,7 +221,7 @@ class InstallRequirement:
attributes = vars(self) attributes = vars(self)
names = sorted(attributes) names = sorted(attributes)
state = ("{}={!r}".format(attr, attributes[attr]) for attr in sorted(names)) state = (f"{attr}={attributes[attr]!r}" for attr in sorted(names))
return "<{name} object: {{{state}}}>".format( return "<{name} object: {{{state}}}>".format(
name=self.__class__.__name__, name=self.__class__.__name__,
state=", ".join(state), state=", ".join(state),
@ -722,6 +723,7 @@ class InstallRequirement:
uninstalled_pathset = UninstallPathSet.from_dist(dist) uninstalled_pathset = UninstallPathSet.from_dist(dist)
uninstalled_pathset.remove(auto_confirm, verbose) uninstalled_pathset.remove(auto_confirm, verbose)
self.files_skipped = uninstalled_pathset.files_skipped
return uninstalled_pathset return uninstalled_pathset
def _get_archive_name(self, path: str, parentdir: str, rootdir: str) -> str: def _get_archive_name(self, path: str, parentdir: str, rootdir: str) -> str:
@ -753,8 +755,8 @@ class InstallRequirement:
if os.path.exists(archive_path): if os.path.exists(archive_path):
response = ask_path_exists( response = ask_path_exists(
"The file {} exists. (i)gnore, (w)ipe, " f"The file {display_path(archive_path)} exists. (i)gnore, (w)ipe, "
"(b)ackup, (a)bort ".format(display_path(archive_path)), "(b)ackup, (a)bort ",
("i", "w", "b", "a"), ("i", "w", "b", "a"),
) )
if response == "i": if response == "i":

View File

@ -71,16 +71,16 @@ def uninstallation_paths(dist: BaseDistribution) -> Generator[str, None, None]:
entries = dist.iter_declared_entries() entries = dist.iter_declared_entries()
if entries is None: if entries is None:
msg = "Cannot uninstall {dist}, RECORD file not found.".format(dist=dist) msg = f"Cannot uninstall {dist}, RECORD file not found."
installer = dist.installer installer = dist.installer
if not installer or installer == "pip": if not installer or installer == "pip":
dep = "{}=={}".format(dist.raw_name, dist.version) dep = f"{dist.raw_name}=={dist.version}"
msg += ( msg += (
" You might be able to recover from this via: " " You might be able to recover from this via: "
"'pip install --force-reinstall --no-deps {}'.".format(dep) f"'pip install --force-reinstall --no-deps {dep}'."
) )
else: else:
msg += " Hint: The package was installed by {}.".format(installer) msg += f" Hint: The package was installed by {installer}."
raise UninstallationError(msg) raise UninstallationError(msg)
for entry in entries: for entry in entries:
@ -360,12 +360,14 @@ class UninstallPathSet:
def remove(self, auto_confirm: bool = False, verbose: bool = False) -> None: def remove(self, auto_confirm: bool = False, verbose: bool = False) -> None:
"""Remove paths in ``self._paths`` with confirmation (unless """Remove paths in ``self._paths`` with confirmation (unless
``auto_confirm`` is True).""" ``auto_confirm`` is True)."""
self.files_skipped = False
if not self._paths: if not self._paths:
logger.info( logger.info(
"Can't uninstall '%s'. No files were found to uninstall.", "Can't uninstall '%s'. No files were found to uninstall.",
self._dist.raw_name, self._dist.raw_name,
) )
self.files_skipped = True
return return
dist_name_version = f"{self._dist.raw_name}-{self._dist.version}" dist_name_version = f"{self._dist.raw_name}-{self._dist.version}"

View File

@ -231,9 +231,7 @@ class Resolver(BaseResolver):
tags = compatibility_tags.get_supported() tags = compatibility_tags.get_supported()
if requirement_set.check_supported_wheels and not wheel.supported(tags): if requirement_set.check_supported_wheels and not wheel.supported(tags):
raise InstallationError( raise InstallationError(
"{} is not a supported wheel on this platform.".format( f"{wheel.filename} is not a supported wheel on this platform."
wheel.filename
)
) )
# This next bit is really a sanity check. # This next bit is really a sanity check.
@ -287,9 +285,9 @@ class Resolver(BaseResolver):
) )
if does_not_satisfy_constraint: if does_not_satisfy_constraint:
raise InstallationError( raise InstallationError(
"Could not satisfy constraints for '{}': " f"Could not satisfy constraints for '{install_req.name}': "
"installation from path or url cannot be " "installation from path or url cannot be "
"constrained to a version".format(install_req.name) "constrained to a version"
) )
# If we're now installing a constraint, mark the existing # If we're now installing a constraint, mark the existing
# object for real installation. # object for real installation.
@ -398,9 +396,9 @@ class Resolver(BaseResolver):
# "UnicodeEncodeError: 'ascii' codec can't encode character" # "UnicodeEncodeError: 'ascii' codec can't encode character"
# in Python 2 when the reason contains non-ascii characters. # in Python 2 when the reason contains non-ascii characters.
"The candidate selected for download or install is a " "The candidate selected for download or install is a "
"yanked version: {candidate}\n" f"yanked version: {best_candidate}\n"
"Reason for being yanked: {reason}" f"Reason for being yanked: {reason}"
).format(candidate=best_candidate, reason=reason) )
logger.warning(msg) logger.warning(msg)
return link return link

View File

@ -159,10 +159,7 @@ class _InstallRequirementBackedCandidate(Candidate):
return f"{self.name} {self.version}" return f"{self.name} {self.version}"
def __repr__(self) -> str: def __repr__(self) -> str:
return "{class_name}({link!r})".format( return f"{self.__class__.__name__}({str(self._link)!r})"
class_name=self.__class__.__name__,
link=str(self._link),
)
def __hash__(self) -> int: def __hash__(self) -> int:
return hash((self.__class__, self._link)) return hash((self.__class__, self._link))
@ -354,10 +351,7 @@ class AlreadyInstalledCandidate(Candidate):
return str(self.dist) return str(self.dist)
def __repr__(self) -> str: def __repr__(self) -> str:
return "{class_name}({distribution!r})".format( return f"{self.__class__.__name__}({self.dist!r})"
class_name=self.__class__.__name__,
distribution=self.dist,
)
def __hash__(self) -> int: def __hash__(self) -> int:
return hash((self.__class__, self.name, self.version)) return hash((self.__class__, self.name, self.version))
@ -455,11 +449,7 @@ class ExtrasCandidate(Candidate):
return "{}[{}] {}".format(name, ",".join(self.extras), rest) return "{}[{}] {}".format(name, ",".join(self.extras), rest)
def __repr__(self) -> str: def __repr__(self) -> str:
return "{class_name}(base={base!r}, extras={extras!r})".format( return f"{self.__class__.__name__}(base={self.base!r}, extras={self.extras!r})"
class_name=self.__class__.__name__,
base=self.base,
extras=self.extras,
)
def __hash__(self) -> int: def __hash__(self) -> int:
return hash((self.base, self.extras)) return hash((self.base, self.extras))

View File

@ -753,8 +753,8 @@ class Factory:
info = "the requested packages" info = "the requested packages"
msg = ( msg = (
"Cannot install {} because these package versions " f"Cannot install {info} because these package versions "
"have conflicting dependencies.".format(info) "have conflicting dependencies."
) )
logger.critical(msg) logger.critical(msg)
msg = "\nThe conflict is caused by:" msg = "\nThe conflict is caused by:"

View File

@ -15,10 +15,7 @@ class ExplicitRequirement(Requirement):
return str(self.candidate) return str(self.candidate)
def __repr__(self) -> str: def __repr__(self) -> str:
return "{class_name}({candidate!r})".format( return f"{self.__class__.__name__}({self.candidate!r})"
class_name=self.__class__.__name__,
candidate=self.candidate,
)
@property @property
def project_name(self) -> NormalizedName: def project_name(self) -> NormalizedName:
@ -50,10 +47,7 @@ class SpecifierRequirement(Requirement):
return str(self._ireq.req) return str(self._ireq.req)
def __repr__(self) -> str: def __repr__(self) -> str:
return "{class_name}({requirement!r})".format( return f"{self.__class__.__name__}({str(self._ireq.req)!r})"
class_name=self.__class__.__name__,
requirement=str(self._ireq.req),
)
@property @property
def project_name(self) -> NormalizedName: def project_name(self) -> NormalizedName:
@ -116,10 +110,7 @@ class RequiresPythonRequirement(Requirement):
return f"Python {self.specifier}" return f"Python {self.specifier}"
def __repr__(self) -> str: def __repr__(self) -> str:
return "{class_name}({specifier!r})".format( return f"{self.__class__.__name__}({str(self.specifier)!r})"
class_name=self.__class__.__name__,
specifier=str(self.specifier),
)
@property @property
def project_name(self) -> NormalizedName: def project_name(self) -> NormalizedName:
@ -155,10 +146,7 @@ class UnsatisfiableRequirement(Requirement):
return f"{self._name} (unavailable)" return f"{self._name} (unavailable)"
def __repr__(self) -> str: def __repr__(self) -> str:
return "{class_name}({name!r})".format( return f"{self.__class__.__name__}({str(self._name)!r})"
class_name=self.__class__.__name__,
name=str(self._name),
)
@property @property
def project_name(self) -> NormalizedName: def project_name(self) -> NormalizedName:

View File

@ -39,6 +39,15 @@ def _get_statefile_name(key: str) -> str:
return name return name
def _convert_date(isodate: str) -> datetime.datetime:
"""Convert an ISO format string to a date.
Handles the format 2020-01-22T14:24:01Z (trailing Z)
which is not supported by older versions of fromisoformat.
"""
return datetime.datetime.fromisoformat(isodate.replace("Z", "+00:00"))
class SelfCheckState: class SelfCheckState:
def __init__(self, cache_dir: str) -> None: def __init__(self, cache_dir: str) -> None:
self._state: Dict[str, Any] = {} self._state: Dict[str, Any] = {}
@ -73,7 +82,7 @@ class SelfCheckState:
return None return None
# Determine if we need to refresh the state # Determine if we need to refresh the state
last_check = datetime.datetime.fromisoformat(self._state["last_check"]) last_check = _convert_date(self._state["last_check"])
time_since_last_check = current_time - last_check time_since_last_check = current_time - last_check
if time_since_last_check > _WEEK: if time_since_last_check > _WEEK:
return None return None

View File

@ -35,6 +35,7 @@ from typing import (
cast, cast,
) )
from pip._vendor.packaging.requirements import Requirement
from pip._vendor.pyproject_hooks import BuildBackendHookCaller from pip._vendor.pyproject_hooks import BuildBackendHookCaller
from pip._vendor.tenacity import retry, stop_after_delay, wait_fixed from pip._vendor.tenacity import retry, stop_after_delay, wait_fixed
@ -76,11 +77,7 @@ def get_pip_version() -> str:
pip_pkg_dir = os.path.join(os.path.dirname(__file__), "..", "..") pip_pkg_dir = os.path.join(os.path.dirname(__file__), "..", "..")
pip_pkg_dir = os.path.abspath(pip_pkg_dir) pip_pkg_dir = os.path.abspath(pip_pkg_dir)
return "pip {} from {} (python {})".format( return f"pip {__version__} from {pip_pkg_dir} (python {get_major_minor_version()})"
__version__,
pip_pkg_dir,
get_major_minor_version(),
)
def normalize_version_info(py_version_info: Tuple[int, ...]) -> Tuple[int, int, int]: def normalize_version_info(py_version_info: Tuple[int, ...]) -> Tuple[int, int, int]:
@ -144,9 +141,9 @@ def rmtree(
) )
if sys.version_info >= (3, 12): if sys.version_info >= (3, 12):
# See https://docs.python.org/3.12/whatsnew/3.12.html#shutil. # See https://docs.python.org/3.12/whatsnew/3.12.html#shutil.
shutil.rmtree(dir, onexc=handler) shutil.rmtree(dir, onexc=handler) # type: ignore
else: else:
shutil.rmtree(dir, onerror=handler) shutil.rmtree(dir, onerror=handler) # type: ignore
def _onerror_ignore(*_args: Any) -> None: def _onerror_ignore(*_args: Any) -> None:
@ -278,13 +275,13 @@ def strtobool(val: str) -> int:
def format_size(bytes: float) -> str: def format_size(bytes: float) -> str:
if bytes > 1000 * 1000: if bytes > 1000 * 1000:
return "{:.1f} MB".format(bytes / 1000.0 / 1000) return f"{bytes / 1000.0 / 1000:.1f} MB"
elif bytes > 10 * 1000: elif bytes > 10 * 1000:
return "{} kB".format(int(bytes / 1000)) return f"{int(bytes / 1000)} kB"
elif bytes > 1000: elif bytes > 1000:
return "{:.1f} kB".format(bytes / 1000.0) return f"{bytes / 1000.0:.1f} kB"
else: else:
return "{} bytes".format(int(bytes)) return f"{int(bytes)} bytes"
def tabulate(rows: Iterable[Iterable[Any]]) -> Tuple[List[str], List[int]]: def tabulate(rows: Iterable[Iterable[Any]]) -> Tuple[List[str], List[int]]:
@ -521,9 +518,7 @@ def redact_netloc(netloc: str) -> str:
else: else:
user = urllib.parse.quote(user) user = urllib.parse.quote(user)
password = ":****" password = ":****"
return "{user}{password}@{netloc}".format( return f"{user}{password}@{netloc}"
user=user, password=password, netloc=netloc
)
def _transform_url( def _transform_url(
@ -578,13 +573,20 @@ def redact_auth_from_url(url: str) -> str:
return _transform_url(url, _redact_netloc)[0] return _transform_url(url, _redact_netloc)[0]
def redact_auth_from_requirement(req: Requirement) -> str:
"""Replace the password in a given requirement url with ****."""
if not req.url:
return str(req)
return str(req).replace(req.url, redact_auth_from_url(req.url))
class HiddenText: class HiddenText:
def __init__(self, secret: str, redacted: str) -> None: def __init__(self, secret: str, redacted: str) -> None:
self.secret = secret self.secret = secret
self.redacted = redacted self.redacted = redacted
def __repr__(self) -> str: def __repr__(self) -> str:
return "<HiddenText {!r}>".format(str(self)) return f"<HiddenText {str(self)!r}>"
def __str__(self) -> str: def __str__(self) -> str:
return self.redacted return self.redacted

View File

@ -28,7 +28,7 @@ def parse_wheel(wheel_zip: ZipFile, name: str) -> Tuple[str, Message]:
metadata = wheel_metadata(wheel_zip, info_dir) metadata = wheel_metadata(wheel_zip, info_dir)
version = wheel_version(metadata) version = wheel_version(metadata)
except UnsupportedWheel as e: except UnsupportedWheel as e:
raise UnsupportedWheel("{} has an invalid wheel, {}".format(name, str(e))) raise UnsupportedWheel(f"{name} has an invalid wheel, {str(e)}")
check_compatibility(version, name) check_compatibility(version, name)
@ -60,9 +60,7 @@ def wheel_dist_info_dir(source: ZipFile, name: str) -> str:
canonical_name = canonicalize_name(name) canonical_name = canonicalize_name(name)
if not info_dir_name.startswith(canonical_name): if not info_dir_name.startswith(canonical_name):
raise UnsupportedWheel( raise UnsupportedWheel(
".dist-info directory {!r} does not start with {!r}".format( f".dist-info directory {info_dir!r} does not start with {canonical_name!r}"
info_dir, canonical_name
)
) )
return info_dir return info_dir

View File

@ -31,7 +31,7 @@ class Mercurial(VersionControl):
@staticmethod @staticmethod
def get_base_rev_args(rev: str) -> List[str]: def get_base_rev_args(rev: str) -> List[str]:
return [f"-r={rev}"] return [f"--rev={rev}"]
def fetch_new( def fetch_new(
self, dest: str, url: HiddenText, rev_options: RevOptions, verbosity: int self, dest: str, url: HiddenText, rev_options: RevOptions, verbosity: int

View File

@ -405,9 +405,9 @@ class VersionControl:
scheme, netloc, path, query, frag = urllib.parse.urlsplit(url) scheme, netloc, path, query, frag = urllib.parse.urlsplit(url)
if "+" not in scheme: if "+" not in scheme:
raise ValueError( raise ValueError(
"Sorry, {!r} is a malformed VCS url. " f"Sorry, {url!r} is a malformed VCS url. "
"The format is <vcs>+<protocol>://<url>, " "The format is <vcs>+<protocol>://<url>, "
"e.g. svn+http://myrepo/svn/MyApp#egg=MyApp".format(url) "e.g. svn+http://myrepo/svn/MyApp#egg=MyApp"
) )
# Remove the vcs prefix. # Remove the vcs prefix.
scheme = scheme.split("+", 1)[1] scheme = scheme.split("+", 1)[1]
@ -417,9 +417,9 @@ class VersionControl:
path, rev = path.rsplit("@", 1) path, rev = path.rsplit("@", 1)
if not rev: if not rev:
raise InstallationError( raise InstallationError(
"The URL {!r} has an empty revision (after @) " f"The URL {url!r} has an empty revision (after @) "
"which is not supported. Include a revision after @ " "which is not supported. Include a revision after @ "
"or remove @ from the URL.".format(url) "or remove @ from the URL."
) )
url = urllib.parse.urlunsplit((scheme, netloc, path, query, "")) url = urllib.parse.urlunsplit((scheme, netloc, path, query, ""))
return url, rev, user_pass return url, rev, user_pass
@ -566,7 +566,7 @@ class VersionControl:
self.name, self.name,
url, url,
) )
response = ask_path_exists("What to do? {}".format(prompt[0]), prompt[1]) response = ask_path_exists(f"What to do? {prompt[0]}", prompt[1])
if response == "a": if response == "a":
sys.exit(-1) sys.exit(-1)

View File

@ -140,15 +140,15 @@ def _verify_one(req: InstallRequirement, wheel_path: str) -> None:
w = Wheel(os.path.basename(wheel_path)) w = Wheel(os.path.basename(wheel_path))
if canonicalize_name(w.name) != canonical_name: if canonicalize_name(w.name) != canonical_name:
raise InvalidWheelFilename( raise InvalidWheelFilename(
"Wheel has unexpected file name: expected {!r}, " f"Wheel has unexpected file name: expected {canonical_name!r}, "
"got {!r}".format(canonical_name, w.name), f"got {w.name!r}",
) )
dist = get_wheel_distribution(FilesystemWheel(wheel_path), canonical_name) dist = get_wheel_distribution(FilesystemWheel(wheel_path), canonical_name)
dist_verstr = str(dist.version) dist_verstr = str(dist.version)
if canonicalize_version(dist_verstr) != canonicalize_version(w.version): if canonicalize_version(dist_verstr) != canonicalize_version(w.version):
raise InvalidWheelFilename( raise InvalidWheelFilename(
"Wheel has unexpected file name: expected {!r}, " f"Wheel has unexpected file name: expected {dist_verstr!r}, "
"got {!r}".format(dist_verstr, w.version), f"got {w.version!r}",
) )
metadata_version_value = dist.metadata_version metadata_version_value = dist.metadata_version
if metadata_version_value is None: if metadata_version_value is None:
@ -160,8 +160,7 @@ def _verify_one(req: InstallRequirement, wheel_path: str) -> None:
raise UnsupportedWheel(msg) raise UnsupportedWheel(msg)
if metadata_version >= Version("1.2") and not isinstance(dist.version, Version): if metadata_version >= Version("1.2") and not isinstance(dist.version, Version):
raise UnsupportedWheel( raise UnsupportedWheel(
"Metadata 1.2 mandates PEP 440 version, " f"Metadata 1.2 mandates PEP 440 version, but {dist_verstr!r} is not"
"but {!r} is not".format(dist_verstr)
) )

View File

@ -1,2 +1,2 @@
# This file is protected via CODEOWNERS # This file is protected via CODEOWNERS
__version__ = "1.26.16" __version__ = "1.26.17"

View File

@ -1,6 +1,9 @@
from __future__ import absolute_import from __future__ import absolute_import
import sys
from .filepost import encode_multipart_formdata from .filepost import encode_multipart_formdata
from .packages import six
from .packages.six.moves.urllib.parse import urlencode from .packages.six.moves.urllib.parse import urlencode
__all__ = ["RequestMethods"] __all__ = ["RequestMethods"]
@ -168,3 +171,21 @@ class RequestMethods(object):
extra_kw.update(urlopen_kw) extra_kw.update(urlopen_kw)
return self.urlopen(method, url, **extra_kw) return self.urlopen(method, url, **extra_kw)
if not six.PY2:
class RequestModule(sys.modules[__name__].__class__):
def __call__(self, *args, **kwargs):
"""
If user tries to call this module directly urllib3 v2.x style raise an error to the user
suggesting they may need urllib3 v2
"""
raise TypeError(
"'module' object is not callable\n"
"urllib3.request() method is not supported in this release, "
"upgrade to urllib3 v2 to use it\n"
"see https://urllib3.readthedocs.io/en/stable/v2-migration-guide.html"
)
sys.modules[__name__].__class__ = RequestModule

View File

@ -235,7 +235,7 @@ class Retry(object):
RETRY_AFTER_STATUS_CODES = frozenset([413, 429, 503]) RETRY_AFTER_STATUS_CODES = frozenset([413, 429, 503])
#: Default headers to be used for ``remove_headers_on_redirect`` #: Default headers to be used for ``remove_headers_on_redirect``
DEFAULT_REMOVE_HEADERS_ON_REDIRECT = frozenset(["Authorization"]) DEFAULT_REMOVE_HEADERS_ON_REDIRECT = frozenset(["Cookie", "Authorization"])
#: Maximum backoff time. #: Maximum backoff time.
DEFAULT_BACKOFF_MAX = 120 DEFAULT_BACKOFF_MAX = 120

View File

@ -11,7 +11,7 @@ requests==2.31.0
certifi==2023.7.22 certifi==2023.7.22
chardet==5.1.0 chardet==5.1.0
idna==3.4 idna==3.4
urllib3==1.26.16 urllib3==1.26.17
rich==13.4.2 rich==13.4.2
pygments==2.15.1 pygments==2.15.1
typing_extensions==4.7.1 typing_extensions==4.7.1

View File

@ -14,6 +14,7 @@ from hashlib import sha256
from pathlib import Path from pathlib import Path
from textwrap import dedent from textwrap import dedent
from typing import ( from typing import (
TYPE_CHECKING,
Any, Any,
AnyStr, AnyStr,
Callable, Callable,
@ -58,6 +59,9 @@ from tests.lib import (
from tests.lib.server import MockServer, make_mock_server from tests.lib.server import MockServer, make_mock_server
from tests.lib.venv import VirtualEnvironment, VirtualEnvironmentType from tests.lib.venv import VirtualEnvironment, VirtualEnvironmentType
if TYPE_CHECKING:
from pip._vendor.typing_extensions import Self
def pytest_addoption(parser: Parser) -> None: def pytest_addoption(parser: Parser) -> None:
parser.addoption( parser.addoption(
@ -141,7 +145,7 @@ def pytest_collection_modifyitems(config: Config, items: List[pytest.Function])
if "script" in item.fixturenames: if "script" in item.fixturenames:
raise RuntimeError( raise RuntimeError(
"Cannot use the ``script`` funcarg in a unit test: " "Cannot use the ``script`` funcarg in a unit test: "
"(filename = {}, item = {})".format(module_path, item) f"(filename = {module_path}, item = {item})"
) )
else: else:
raise RuntimeError(f"Unknown test type (filename = {module_path})") raise RuntimeError(f"Unknown test type (filename = {module_path})")
@ -941,7 +945,7 @@ def html_index_with_onetime_server(
""" """
class InDirectoryServer(http.server.ThreadingHTTPServer): class InDirectoryServer(http.server.ThreadingHTTPServer):
def finish_request(self, request: Any, client_address: Any) -> None: def finish_request(self: "Self", request: Any, client_address: Any) -> None:
self.RequestHandlerClass( self.RequestHandlerClass(
request, request,
client_address, client_address,

View File

@ -23,7 +23,7 @@ def test_entrypoints_work(entrypoint: str, script: PipTestEnvironment) -> None:
fake_pkg.mkdir() fake_pkg.mkdir()
fake_pkg.joinpath("setup.py").write_text( fake_pkg.joinpath("setup.py").write_text(
dedent( dedent(
""" f"""
from setuptools import setup from setuptools import setup
setup( setup(
@ -31,13 +31,11 @@ def test_entrypoints_work(entrypoint: str, script: PipTestEnvironment) -> None:
version="0.1.0", version="0.1.0",
entry_points={{ entry_points={{
"console_scripts": [ "console_scripts": [
{!r} {entrypoint!r}
] ]
}} }}
) )
""".format( """
entrypoint
)
) )
) )

View File

@ -400,7 +400,7 @@ def test_completion_path_after_option(
def test_completion_uses_same_executable_name( def test_completion_uses_same_executable_name(
autocomplete_script: PipTestEnvironment, flag: str, deprecated_python: bool autocomplete_script: PipTestEnvironment, flag: str, deprecated_python: bool
) -> None: ) -> None:
executable_name = "pip{}".format(sys.version_info[0]) executable_name = f"pip{sys.version_info[0]}"
# Deprecated python versions produce an extra deprecation warning # Deprecated python versions produce an extra deprecation warning
result = autocomplete_script.run( result = autocomplete_script.run(
executable_name, executable_name,

View File

@ -68,7 +68,7 @@ def test_debug__tags(script: PipTestEnvironment, args: List[str]) -> None:
stdout = result.stdout stdout = result.stdout
tags = compatibility_tags.get_supported() tags = compatibility_tags.get_supported()
expected_tag_header = "Compatible tags: {}".format(len(tags)) expected_tag_header = f"Compatible tags: {len(tags)}"
assert expected_tag_header in stdout assert expected_tag_header in stdout
show_verbose_note = "--verbose" not in args show_verbose_note = "--verbose" not in args

View File

@ -166,13 +166,11 @@ def test_freeze_with_invalid_names(script: PipTestEnvironment) -> None:
with open(egg_info_path, "w") as egg_info_file: with open(egg_info_path, "w") as egg_info_file:
egg_info_file.write( egg_info_file.write(
textwrap.dedent( textwrap.dedent(
"""\ f"""\
Metadata-Version: 1.0 Metadata-Version: 1.0
Name: {} Name: {pkgname}
Version: 1.0 Version: 1.0
""".format( """
pkgname
)
) )
) )
@ -221,12 +219,10 @@ def test_freeze_editable_not_vcs(script: PipTestEnvironment) -> None:
# We need to apply os.path.normcase() to the path since that is what # We need to apply os.path.normcase() to the path since that is what
# the freeze code does. # the freeze code does.
expected = textwrap.dedent( expected = textwrap.dedent(
"""\ f"""\
...# Editable install with no version control (version-pkg==0.1) ...# Editable install with no version control (version-pkg==0.1)
-e {} -e {os.path.normcase(pkg_path)}
...""".format( ..."""
os.path.normcase(pkg_path)
)
) )
_check_output(result.stdout, expected) _check_output(result.stdout, expected)
@ -248,12 +244,10 @@ def test_freeze_editable_git_with_no_remote(
# We need to apply os.path.normcase() to the path since that is what # We need to apply os.path.normcase() to the path since that is what
# the freeze code does. # the freeze code does.
expected = textwrap.dedent( expected = textwrap.dedent(
"""\ f"""\
...# Editable Git install with no remote (version-pkg==0.1) ...# Editable Git install with no remote (version-pkg==0.1)
-e {} -e {os.path.normcase(pkg_path)}
...""".format( ..."""
os.path.normcase(pkg_path)
)
) )
_check_output(result.stdout, expected) _check_output(result.stdout, expected)
@ -653,9 +647,9 @@ def test_freeze_with_requirement_option_file_url_egg_not_installed(
expect_stderr=True, expect_stderr=True,
) )
expected_err = ( expected_err = (
"WARNING: Requirement file [requirements.txt] contains {}, " f"WARNING: Requirement file [requirements.txt] contains {url}, "
"but package 'Does.Not-Exist' is not installed\n" "but package 'Does.Not-Exist' is not installed\n"
).format(url) )
if deprecated_python: if deprecated_python:
assert expected_err in result.stderr assert expected_err in result.stderr
else: else:

View File

@ -106,10 +106,10 @@ def test_pep518_refuses_conflicting_requires(
assert ( assert (
result.returncode != 0 result.returncode != 0
and ( and (
"Some build dependencies for {url} conflict " f"Some build dependencies for {project_dir.as_uri()} conflict "
"with PEP 517/518 supported " "with PEP 517/518 supported "
"requirements: setuptools==1.0 is incompatible with " "requirements: setuptools==1.0 is incompatible with "
"setuptools>=40.8.0.".format(url=project_dir.as_uri()) "setuptools>=40.8.0."
) )
in result.stderr in result.stderr
), str(result) ), str(result)
@ -595,8 +595,8 @@ def test_hashed_install_success(
with requirements_file( with requirements_file(
"simple2==1.0 --hash=sha256:9336af72ca661e6336eb87bc7de3e8844d853e" "simple2==1.0 --hash=sha256:9336af72ca661e6336eb87bc7de3e8844d853e"
"3848c2b9bbd2e8bf01db88c2c7\n" "3848c2b9bbd2e8bf01db88c2c7\n"
"{simple} --hash=sha256:393043e672415891885c9a2a0929b1af95fb866d6c" f"{file_url} --hash=sha256:393043e672415891885c9a2a0929b1af95fb866d6c"
"a016b42d2e6ce53619b653".format(simple=file_url), "a016b42d2e6ce53619b653",
tmpdir, tmpdir,
) as reqs_file: ) as reqs_file:
script.pip_install_local("-r", reqs_file.resolve()) script.pip_install_local("-r", reqs_file.resolve())
@ -1735,7 +1735,7 @@ def test_install_builds_wheels(script: PipTestEnvironment, data: TestData) -> No
# into the cache # into the cache
assert wheels != [], str(res) assert wheels != [], str(res)
assert wheels == [ assert wheels == [
"Upper-2.0-py{}-none-any.whl".format(sys.version_info[0]), f"Upper-2.0-py{sys.version_info[0]}-none-any.whl",
] ]
@ -2387,7 +2387,7 @@ def test_install_verify_package_name_normalization(
assert "Successfully installed simple-package" in result.stdout assert "Successfully installed simple-package" in result.stdout
result = script.pip("install", package_name) result = script.pip("install", package_name)
assert "Requirement already satisfied: {}".format(package_name) in result.stdout assert f"Requirement already satisfied: {package_name}" in result.stdout
def test_install_logs_pip_version_in_debug( def test_install_logs_pip_version_in_debug(

View File

@ -184,12 +184,10 @@ def test_config_file_override_stack(
config_file.write_text( config_file.write_text(
textwrap.dedent( textwrap.dedent(
"""\ f"""\
[global] [global]
index-url = {}/simple1 index-url = {base_address}/simple1
""".format( """
base_address
)
) )
) )
script.pip("install", "-vvv", "INITools", expect_error=True) script.pip("install", "-vvv", "INITools", expect_error=True)
@ -197,14 +195,12 @@ def test_config_file_override_stack(
config_file.write_text( config_file.write_text(
textwrap.dedent( textwrap.dedent(
"""\ f"""\
[global] [global]
index-url = {address}/simple1 index-url = {base_address}/simple1
[install] [install]
index-url = {address}/simple2 index-url = {base_address}/simple2
""".format( """
address=base_address
)
) )
) )
script.pip("install", "-vvv", "INITools", expect_error=True) script.pip("install", "-vvv", "INITools", expect_error=True)

View File

@ -41,13 +41,11 @@ def test_find_links_requirements_file_relative_path(
"""Test find-links as a relative path to a reqs file.""" """Test find-links as a relative path to a reqs file."""
script.scratch_path.joinpath("test-req.txt").write_text( script.scratch_path.joinpath("test-req.txt").write_text(
textwrap.dedent( textwrap.dedent(
""" f"""
--no-index --no-index
--find-links={} --find-links={data.packages.as_posix()}
parent==0.1 parent==0.1
""".format( """
data.packages.as_posix()
)
) )
) )
result = script.pip( result = script.pip(

Some files were not shown because too many files have changed in this diff Show More