Merge branch 'main' into main

This commit is contained in:
Darshan 2023-11-23 20:04:57 +05:30 committed by GitHub
commit a69e7c2573
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
276 changed files with 9014 additions and 3770 deletions

36
.git-blame-ignore-revs Normal file
View File

@ -0,0 +1,36 @@
917b41d6d73535c090fc312668dff353cdaef906 # Blacken docs/html/conf.py
ed383dd8afa8fe0250dcf9b8962927ada0e21c89 # Blacken docs/pip_sphinxext.py
228405e62451abe8a66233573035007df4be575f # Blacken noxfile.py
f477a9f490e978177b71c9dbaa5465c51ea21129 # Blacken setup.py
e59ba23468390217479465019f8d78e724a23550 # Blacken src/pip/__main__.py
d7013db084e9a52242354ee5754dc5d19ccf062e # Blacken src/pip/_internal/build_env.py
30e9ffacae75378fc3e3df48f754dabad037edb9 # Blacken src/pip/_internal/cache.py
8341d56b46776a805286218ac5fb0e7850fd9341 # Blacken src/pip/_internal/cli/autocompletion.py
3d3461ed65208656358b3595e25d8c31c5c89470 # Blacken src/pip/_internal/cli/base_command.py
d489b0f1b104bc936b0fb17e6c33633664ebdc0e # Blacken src/pip/_internal/cli/cmdoptions.py
591fe4841aefe9befa0530f2a54f820c4ecbb392 # Blacken src/pip/_internal/cli/command_context.py
9265b28ef7248ae1847a80384dbeeb8119c3e2f5 # Blacken src/pip/_internal/cli/main.py
847a369364878c38d210c90beed2737bb6fb3a85 # Blacken src/pip/_internal/cli/main_parser.py
ec97119067041ae58b963935ff5f0e5d9fead80c # Blacken src/pip/_internal/cli/parser.py
6e3b8de22fa39fa3073599ecf9db61367f4b3b32 # Blacken src/pip/_internal/cli/progress_bars.py
55405227de983c5bd5bf0858ea12dbe537d3e490 # Blacken src/pip/_internal/cli/req_command.py
d5ca5c850cae9a0c64882a8f49d3a318699a7e2e # Blacken src/pip/_internal/cli/spinners.py
9747cb48f8430a7a91b36fe697dd18dbddb319f0 # Blacken src/pip/_internal/commands/__init__.py
1c09fd6f124df08ca36bed68085ad68e89bb1957 # Blacken src/pip/_internal/commands/cache.py
315e93d7eb87cd476afcc4eaf0f01a7b56a5037f # Blacken src/pip/_internal/commands/check.py
8ae3b96ed7d24fd24024ccce4840da0dcf635f26 # Blacken src/pip/_internal/commands/completion.py
42ca4792202f26a293ee48380718743a80bbee37 # Blacken src/pip/_internal/commands/configuration.py
790ad78fcd43d41a5bef9dca34a3c128d05eb02c # Blacken src/pip/_internal/commands/debug.py
a6fcc8f045afe257ce321f4012fc8fcb4be01eb3 # Blacken src/pip/_internal/commands/download.py
920e735dfc60109351fbe2f4c483c2f6ede9e52d # Blacken src/pip/_internal/commands/freeze.py
053004e0fcf0851238b1064fbce13aea87b24e9c # Blacken src/pip/_internal/commands/hash.py
a6b6ae487e52c2242045b64cb8962e0a992cfd76 # Blacken src/pip/_internal/commands/help.py
2495cf95a6c7eb61ccf1f9f0e8b8d736af914e53 # Blacken __main__.py
c7ee560e00b85f7486b452c14ff49e4737996eda # Blacken tools/
8e2e1964a4f0a060f7299a96a911c9e116b2283d # Blacken src/pip/_internal/commands/
1bc0eef05679e87f45540ab0a294667cb3c6a88e # Blacken src/pip/_internal/network/
069b01932a7d64a81c708c6254cc93e1f89e6783 # Blacken src/pip/_internal/req
1897784d59e0d5fcda2dd75fea54ddd8be3d502a # Blacken src/pip/_internal/index
94999255d5ede440c37137d210666fdf64302e75 # Reformat the codebase, with black
585037a80a1177f1fa92e159a7079855782e543e # Cleanup implicit string concatenation
8a6f6ac19b80a6dc35900a47016c851d9fcd2ee2 # Blacken src/pip/_internal/resolution directory

6
.github/dependabot.yml vendored Normal file
View File

@ -0,0 +1,6 @@
version: 2
updates:
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "monthly"

View File

@ -21,7 +21,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- uses: actions/setup-python@v4
with:
python-version: "3.x"
@ -57,7 +57,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- uses: actions/setup-python@v4
with:
python-version: "3.x"
@ -81,7 +81,7 @@ jobs:
github.event_name != 'pull_request'
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- uses: actions/setup-python@v4
with:
python-version: "3.x"
@ -91,7 +91,7 @@ jobs:
- run: git diff --exit-code
tests-unix:
name: tests / ${{ matrix.python }} / ${{ matrix.os }}
name: tests / ${{ matrix.python.key || matrix.python }} / ${{ matrix.os }}
runs-on: ${{ matrix.os }}-latest
needs: [packaging, determine-changes]
@ -109,12 +109,14 @@ jobs:
- "3.9"
- "3.10"
- "3.11"
- "3.12"
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python }}
allow-prereleases: true
- name: Install Ubuntu dependencies
if: matrix.os == 'Ubuntu'
@ -129,12 +131,12 @@ jobs:
# Main check
- name: Run unit tests
run: >-
nox -s test-${{ matrix.python }} --
nox -s test-${{ matrix.python.key || matrix.python }} --
-m unit
--verbose --numprocesses auto --showlocals
- name: Run integration tests
run: >-
nox -s test-${{ matrix.python }} --
nox -s test-${{ matrix.python.key || matrix.python }} --
-m integration
--verbose --numprocesses auto --showlocals
--durations=5
@ -162,29 +164,18 @@ jobs:
group: [1, 2]
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python }}
# We use a RAMDisk on Windows, since filesystem IO is a big slowdown
# for our tests.
- name: Create a RAMDisk
run: ./tools/ci/New-RAMDisk.ps1 -Drive R -Size 1GB
- name: Setup RAMDisk permissions
run: |
mkdir R:\Temp
$acl = Get-Acl "R:\Temp"
$rule = New-Object System.Security.AccessControl.FileSystemAccessRule(
"Everyone", "FullControl", "ContainerInherit,ObjectInherit", "None", "Allow"
)
$acl.AddAccessRule($rule)
Set-Acl "R:\Temp" $acl
# We use C:\Temp (which is already available on the worker)
# as a temporary directory for all of the tests because the
# default value (under the user dir) is more deeply nested
# and causes tests to fail with "path too long" errors.
- run: pip install nox
env:
TEMP: "R:\\Temp"
TEMP: "C:\\Temp"
# Main check
- name: Run unit tests
@ -194,7 +185,7 @@ jobs:
-m unit
--verbose --numprocesses auto --showlocals
env:
TEMP: "R:\\Temp"
TEMP: "C:\\Temp"
- name: Run integration tests (group 1)
if: matrix.group == 1
@ -203,7 +194,7 @@ jobs:
-m integration -k "not test_install"
--verbose --numprocesses auto --showlocals
env:
TEMP: "R:\\Temp"
TEMP: "C:\\Temp"
- name: Run integration tests (group 2)
if: matrix.group == 2
@ -212,7 +203,7 @@ jobs:
-m integration -k "test_install"
--verbose --numprocesses auto --showlocals
env:
TEMP: "R:\\Temp"
TEMP: "C:\\Temp"
tests-zipapp:
name: tests / zipapp
@ -224,7 +215,7 @@ jobs:
github.event_name != 'pull_request'
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- uses: actions/setup-python@v4
with:
python-version: "3.10"

View File

@ -17,7 +17,7 @@ jobs:
if: github.repository_owner == 'pypa'
runs-on: ubuntu-latest
steps:
- uses: dessant/lock-threads@v3
- uses: dessant/lock-threads@v4
with:
issue-inactive-days: '30'
pr-inactive-days: '15'

View File

@ -10,7 +10,7 @@ jobs:
runs-on: ubuntu-20.04
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
# `towncrier check` runs `git diff --name-only origin/main...`, which
# needs a non-shallow clone.

View File

@ -1,19 +0,0 @@
name: No Response
# Both `issue_comment` and `scheduled` event types are required for this Action
# to work properly.
on:
issue_comment:
types: [created]
schedule:
# Schedule for five minutes after the hour, every hour
- cron: '5 * * * *'
jobs:
noResponse:
runs-on: ubuntu-latest
steps:
- uses: lee-dohm/no-response@v0.5.0
with:
token: ${{ github.token }}
responseRequiredLabel: "S: awaiting response"

View File

@ -18,7 +18,7 @@ jobs:
runs-on: ubuntu-latest
environment: RTD Deploys
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- uses: actions/setup-python@v4
with:
python-version: "3.11"

View File

@ -17,42 +17,31 @@ repos:
exclude: .patch
- repo: https://github.com/psf/black
rev: 23.1.0
rev: 23.7.0
hooks:
- id: black
- repo: https://github.com/PyCQA/flake8
rev: 6.0.0
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.1.4
hooks:
- id: flake8
additional_dependencies: [
'flake8-bugbear',
'flake8-logging-format',
'flake8-implicit-str-concat',
]
exclude: tests/data
- repo: https://github.com/PyCQA/isort
rev: 5.12.0
hooks:
- id: isort
files: \.py$
- id: ruff
args: [--fix, --exit-non-zero-on-fix]
- repo: https://github.com/pre-commit/mirrors-mypy
rev: v0.961
rev: v1.6.1
hooks:
- id: mypy
exclude: tests/data
args: ["--pretty", "--show-error-codes"]
additional_dependencies: [
'keyring==23.0.1',
'nox==2021.6.12',
'keyring==24.2.0',
'nox==2023.4.22',
'pytest',
'types-docutils==0.18.3',
'types-setuptools==57.4.14',
'types-freezegun==1.1.9',
'types-six==1.16.15',
'types-pyyaml==6.0.12.2',
'types-docutils==0.20.0.3',
'types-setuptools==68.2.0.0',
'types-freezegun==1.1.10',
'types-six==1.16.21.9',
'types-pyyaml==6.0.12.12',
]
- repo: https://github.com/pre-commit/pygrep-hooks

View File

@ -1,10 +1,14 @@
version: 2
build:
os: ubuntu-22.04
tools:
python: "3.11"
sphinx:
builder: htmldir
builder: dirhtml
configuration: docs/html/conf.py
python:
version: 3.8
install:
- requirements: docs/requirements.txt

View File

@ -20,6 +20,7 @@ Albert-Guan
albertg
Alberto Sottile
Aleks Bunin
Ales Erjavec
Alethea Flowers
Alex Gaynor
Alex Grönholm
@ -30,6 +31,7 @@ Alex Stachowiak
Alexander Shtyrov
Alexandre Conrad
Alexey Popravka
Aleš Erjavec
Alli
Ami Fischman
Ananya Maiti
@ -71,6 +73,7 @@ atse
Atsushi Odagiri
Avinash Karhana
Avner Cohen
Awit (Ah-Wit) Ghirmai
Baptiste Mispelon
Barney Gale
barneygale
@ -126,6 +129,7 @@ Chih-Hsuan Yen
Chris Brinker
Chris Hunt
Chris Jerdonek
Chris Kuehl
Chris McDonough
Chris Pawley
Chris Pryer
@ -194,9 +198,11 @@ David Runge
David Tucker
David Wales
Davidovich
ddelange
Deepak Sharma
Deepyaman Datta
Denise Yu
dependabot[bot]
derwolfe
Desetude
Devesh Kumar Singh
@ -310,6 +316,7 @@ Ilya Baryshev
Inada Naoki
Ionel Cristian Mărieș
Ionel Maries Cristian
Itamar Turner-Trauring
Ivan Pozdeev
Jacob Kim
Jacob Walls
@ -330,10 +337,13 @@ Jarek Potiuk
jarondl
Jason Curtis
Jason R. Coombs
JasonMo
JasonMo1
Jay Graves
Jean-Christophe Fillion-Robin
Jeff Barber
Jeff Dairiki
Jeff Widman
Jelmer Vernooij
jenix21
Jeremy Stanley
@ -344,6 +354,7 @@ Jim Fisher
Jim Garrison
Jiun Bae
Jivan Amara
Joe Bylund
Joe Michelini
John Paton
John T. Wodder II
@ -362,6 +373,7 @@ Joseph Long
Josh Bronson
Josh Hansen
Josh Schneier
Joshua
Juan Luis Cano Rodríguez
Juanjo Bazán
Judah Rand
@ -392,6 +404,7 @@ KOLANICH
kpinc
Krishna Oza
Kumar McMillan
Kurt McKee
Kyle Persohn
lakshmanaram
Laszlo Kiss-Kollar
@ -408,6 +421,7 @@ lorddavidiii
Loren Carvalho
Lucas Cimon
Ludovic Gasc
Lukas Geiger
Lukas Juhrich
Luke Macken
Luo Jiebin
@ -441,6 +455,7 @@ Matthew Einhorn
Matthew Feickert
Matthew Gilliard
Matthew Iversen
Matthew Treinish
Matthew Trumbell
Matthew Willson
Matthias Bussonnier
@ -523,6 +538,7 @@ Patrick Jenkins
Patrick Lawson
patricktokeeffe
Patrik Kopkan
Paul Ganssle
Paul Kehrer
Paul Moore
Paul Nasrat
@ -582,6 +598,7 @@ Rishi
RobberPhex
Robert Collins
Robert McGibbon
Robert Pollak
Robert T. McGibbon
robin elisha robinson
Roey Berman
@ -602,6 +619,7 @@ ryneeverett
Sachi King
Salvatore Rinchiera
sandeepkiran-js
Sander Van Balen
Savio Jomton
schlamar
Scott Kitterman
@ -614,6 +632,8 @@ SeongSoo Cho
Sergey Vasilyev
Seth Michael Larson
Seth Woodworth
Shahar Epstein
Shantanu
shireenrao
Shivansh-007
Shlomi Fish
@ -638,7 +658,9 @@ Steve Barnes
Steve Dower
Steve Kowalik
Steven Myint
Steven Silvester
stonebig
studioj
Stéphane Bidoul
Stéphane Bidoul (ACSONE)
Stéphane Klein
@ -707,6 +729,7 @@ Wilson Mo
wim glenn
Winson Luk
Wolfgang Maier
Wu Zhenyu
XAMES3
Xavier Fernandez
xoviat

View File

@ -14,6 +14,7 @@ recursive-include src/pip/_vendor *COPYING*
include docs/docutils.conf
include docs/requirements.txt
exclude .git-blame-ignore-revs
exclude .coveragerc
exclude .mailmap
exclude .appveyor.yml

136
NEWS.rst
View File

@ -9,6 +9,136 @@
.. towncrier release notes start
23.3.1 (2023-10-21)
===================
Bug Fixes
---------
- Handle a timezone indicator of Z when parsing dates in the self check. (`#12338 <https://github.com/pypa/pip/issues/12338>`_)
- Fix bug where installing the same package at the same time with multiple pip processes could fail. (`#12361 <https://github.com/pypa/pip/issues/12361>`_)
23.3 (2023-10-15)
=================
Process
-------
- Added reference to `vulnerability reporting guidelines <https://www.python.org/dev/security/>`_ to pip's security policy.
Deprecations and Removals
-------------------------
- Drop a fallback to using SecureTransport on macOS. It was useful when pip detected OpenSSL older than 1.0.1, but the current pip does not support any Python version supporting such old OpenSSL versions. (`#12175 <https://github.com/pypa/pip/issues/12175>`_)
Features
--------
- Improve extras resolution for multiple constraints on same base package. (`#11924 <https://github.com/pypa/pip/issues/11924>`_)
- Improve use of datastructures to make candidate selection 1.6x faster. (`#12204 <https://github.com/pypa/pip/issues/12204>`_)
- Allow ``pip install --dry-run`` to use platform and ABI overriding options. (`#12215 <https://github.com/pypa/pip/issues/12215>`_)
- Add ``is_yanked`` boolean entry to the installation report (``--report``) to indicate whether the requirement was yanked from the index, but was still selected by pip conform to :pep:`592`. (`#12224 <https://github.com/pypa/pip/issues/12224>`_)
Bug Fixes
---------
- Ignore errors in temporary directory cleanup (show a warning instead). (`#11394 <https://github.com/pypa/pip/issues/11394>`_)
- Normalize extras according to :pep:`685` from package metadata in the resolver
for comparison. This ensures extras are correctly compared and merged as long
as the package providing the extra(s) is built with values normalized according
to the standard. Note, however, that this *does not* solve cases where the
package itself contains unnormalized extra values in the metadata. (`#11649 <https://github.com/pypa/pip/issues/11649>`_)
- Prevent downloading sdists twice when :pep:`658` metadata is present. (`#11847 <https://github.com/pypa/pip/issues/11847>`_)
- Include all requested extras in the install report (``--report``). (`#11924 <https://github.com/pypa/pip/issues/11924>`_)
- Removed uses of ``datetime.datetime.utcnow`` from non-vendored code. (`#12005 <https://github.com/pypa/pip/issues/12005>`_)
- Consistently report whether a dependency comes from an extra. (`#12095 <https://github.com/pypa/pip/issues/12095>`_)
- Fix completion script for zsh (`#12166 <https://github.com/pypa/pip/issues/12166>`_)
- Fix improper handling of the new onexc argument of ``shutil.rmtree()`` in Python 3.12. (`#12187 <https://github.com/pypa/pip/issues/12187>`_)
- Filter out yanked links from the available versions error message: "(from versions: 1.0, 2.0, 3.0)" will not contain yanked versions conform PEP 592. The yanked versions (if any) will be mentioned in a separate error message. (`#12225 <https://github.com/pypa/pip/issues/12225>`_)
- Fix crash when the git version number contains something else than digits and dots. (`#12280 <https://github.com/pypa/pip/issues/12280>`_)
- Use ``-r=...`` instead of ``-r ...`` to specify references with Mercurial. (`#12306 <https://github.com/pypa/pip/issues/12306>`_)
- Redact password from URLs in some additional places. (`#12350 <https://github.com/pypa/pip/issues/12350>`_)
- pip uses less memory when caching large packages. As a result, there is a new on-disk cache format stored in a new directory ($PIP_CACHE_DIR/http-v2). (`#2984 <https://github.com/pypa/pip/issues/2984>`_)
Vendored Libraries
------------------
- Upgrade certifi to 2023.7.22
- Add truststore 0.8.0
- Upgrade urllib3 to 1.26.17
Improved Documentation
----------------------
- Document that ``pip search`` support has been removed from PyPI (`#12059 <https://github.com/pypa/pip/issues/12059>`_)
- Clarify --prefer-binary in CLI and docs (`#12122 <https://github.com/pypa/pip/issues/12122>`_)
- Document that using OS-provided Python can cause pip's test suite to report false failures. (`#12334 <https://github.com/pypa/pip/issues/12334>`_)
23.2.1 (2023-07-22)
===================
Bug Fixes
---------
- Disable :pep:`658` metadata fetching with the legacy resolver. (`#12156 <https://github.com/pypa/pip/issues/12156>`_)
23.2 (2023-07-15)
=================
Process
-------
- Deprecate support for eggs for Python 3.11 or later, when the new ``importlib.metadata`` backend is used to load distribution metadata. This only affects the egg *distribution format* (with the ``.egg`` extension); distributions using the ``.egg-info`` *metadata format* (but are not actually eggs) are not affected. For more information about eggs, see `relevant section in the setuptools documentation <https://setuptools.pypa.io/en/stable/deprecated/python_eggs.html>`__.
Deprecations and Removals
-------------------------
- Deprecate legacy version and version specifiers that don't conform to `PEP 440
<https://peps.python.org/pep-0440/>`_ (`#12063 <https://github.com/pypa/pip/issues/12063>`_)
- ``freeze`` no longer excludes the ``setuptools``, ``distribute``, and ``wheel``
from the output when running on Python 3.12 or later, where they are not
included in a virtual environment by default. Use ``--exclude`` if you wish to
exclude any of these packages. (`#4256 <https://github.com/pypa/pip/issues/4256>`_)
Features
--------
- make rejection messages slightly different between 1 and 8, so the user can make the difference. (`#12040 <https://github.com/pypa/pip/issues/12040>`_)
Bug Fixes
---------
- Fix ``pip completion --zsh``. (`#11417 <https://github.com/pypa/pip/issues/11417>`_)
- Prevent downloading files twice when :pep:`658` metadata is present (`#11847 <https://github.com/pypa/pip/issues/11847>`_)
- Add permission check before configuration (`#11920 <https://github.com/pypa/pip/issues/11920>`_)
- Fix deprecation warnings in Python 3.12 for usage of shutil.rmtree (`#11957 <https://github.com/pypa/pip/issues/11957>`_)
- Ignore invalid or unreadable ``origin.json`` files in the cache of locally built wheels. (`#11985 <https://github.com/pypa/pip/issues/11985>`_)
- Fix installation of packages with :pep:`658` metadata using non-canonicalized names (`#12038 <https://github.com/pypa/pip/issues/12038>`_)
- Correctly parse ``dist-info-metadata`` values from JSON-format index data. (`#12042 <https://github.com/pypa/pip/issues/12042>`_)
- Fail with an error if the ``--python`` option is specified after the subcommand name. (`#12067 <https://github.com/pypa/pip/issues/12067>`_)
- Fix slowness when using ``importlib.metadata`` (the default way for pip to read metadata in Python 3.11+) and there is a large overlap between already installed and to-be-installed packages. (`#12079 <https://github.com/pypa/pip/issues/12079>`_)
- Pass the ``-r`` flag to mercurial to be explicit that a revision is passed and protect
against ``hg`` options injection as part of VCS URLs. Users that do not have control on
VCS URLs passed to pip are advised to upgrade. (`#12119 <https://github.com/pypa/pip/issues/12119>`_)
Vendored Libraries
------------------
- Upgrade certifi to 2023.5.7
- Upgrade platformdirs to 3.8.1
- Upgrade pygments to 2.15.1
- Upgrade pyparsing to 3.1.0
- Upgrade Requests to 2.31.0
- Upgrade rich to 13.4.2
- Upgrade setuptools to 68.0.0
- Updated typing_extensions to 4.6.0
- Upgrade typing_extensions to 4.7.1
- Upgrade urllib3 to 1.26.16
23.1.2 (2023-04-26)
===================
@ -53,7 +183,7 @@ Deprecations and Removals
``--config-settings``. (`#11859 <https://github.com/pypa/pip/issues/11859>`_)
- Using ``--config-settings`` with projects that don't have a ``pyproject.toml`` now prints
a deprecation warning. In the future the presence of config settings will automatically
enable the default build backend for legacy projects and pass the setttings to it. (`#11915 <https://github.com/pypa/pip/issues/11915>`_)
enable the default build backend for legacy projects and pass the settings to it. (`#11915 <https://github.com/pypa/pip/issues/11915>`_)
- Remove ``setup.py install`` fallback when building a wheel failed for projects without
``pyproject.toml``. (`#8368 <https://github.com/pypa/pip/issues/8368>`_)
- When the ``wheel`` package is not installed, pip now uses the default build backend
@ -157,7 +287,7 @@ Features
- Change the hashes in the installation report to be a mapping. Emit the
``archive_info.hashes`` dictionary in ``direct_url.json``. (`#11312 <https://github.com/pypa/pip/issues/11312>`_)
- Implement logic to read the ``EXTERNALLY-MANAGED`` file as specified in PEP 668.
- Implement logic to read the ``EXTERNALLY-MANAGED`` file as specified in :pep:`668`.
This allows a downstream Python distributor to prevent users from using pip to
modify the externally managed environment. (`#11381 <https://github.com/pypa/pip/issues/11381>`_)
- Enable the use of ``keyring`` found on ``PATH``. This allows ``keyring``
@ -173,7 +303,7 @@ Bug Fixes
- Use the "venv" scheme if available to obtain prefixed lib paths. (`#11598 <https://github.com/pypa/pip/issues/11598>`_)
- Deprecated a historical ambiguity in how ``egg`` fragments in URL-style
requirements are formatted and handled. ``egg`` fragments that do not look
like PEP 508 names now produce a deprecation warning. (`#11617 <https://github.com/pypa/pip/issues/11617>`_)
like :pep:`508` names now produce a deprecation warning. (`#11617 <https://github.com/pypa/pip/issues/11617>`_)
- Fix scripts path in isolated build environment on Debian. (`#11623 <https://github.com/pypa/pip/issues/11623>`_)
- Make ``pip show`` show the editable location if package is editable (`#11638 <https://github.com/pypa/pip/issues/11638>`_)
- Stop checking that ``wheel`` is present when ``build-system.requires``

View File

@ -3,9 +3,15 @@ pip - The Python Package Installer
.. image:: https://img.shields.io/pypi/v/pip.svg
:target: https://pypi.org/project/pip/
:alt: PyPI
.. image:: https://img.shields.io/pypi/pyversions/pip
:target: https://pypi.org/project/pip
:alt: PyPI - Python Version
.. image:: https://readthedocs.org/projects/pip/badge/?version=latest
:target: https://pip.pypa.io/en/latest
:alt: Documentation
pip is the `package installer`_ for Python. You can use pip to install packages from the `Python Package Index`_ and other indexes.
@ -19,10 +25,6 @@ We release updates regularly, with a new version every 3 months. Find more detai
* `Release notes`_
* `Release process`_
In pip 20.3, we've `made a big improvement to the heart of pip`_; `learn more`_. We want your input, so `sign up for our user experience research studies`_ to help us do it right.
**Note**: pip 21.0, in January 2021, removed Python 2 support, per pip's `Python 2 support policy`_. Please migrate to Python 3.
If you find bugs, need help, or want to talk to the developers, please use our mailing lists or chat rooms:
* `Issue tracking`_
@ -49,10 +51,6 @@ rooms, and mailing lists is expected to follow the `PSF Code of Conduct`_.
.. _Release process: https://pip.pypa.io/en/latest/development/release-process/
.. _GitHub page: https://github.com/pypa/pip
.. _Development documentation: https://pip.pypa.io/en/latest/development
.. _made a big improvement to the heart of pip: https://pyfound.blogspot.com/2020/11/pip-20-3-new-resolver.html
.. _learn more: https://pip.pypa.io/en/latest/user_guide/#changes-to-the-pip-dependency-resolver-in-20-3-2020
.. _sign up for our user experience research studies: https://pyfound.blogspot.com/2020/03/new-pip-resolver-to-roll-out-this-year.html
.. _Python 2 support policy: https://pip.pypa.io/en/latest/development/release-process/#python-2-support
.. _Issue tracking: https://github.com/pypa/pip/issues
.. _Discourse channel: https://discuss.python.org/c/packaging
.. _User IRC: https://kiwiirc.com/nextclient/#ircs://irc.libera.chat:+6697/pypa

View File

@ -1,3 +1,10 @@
# Security and Vulnerability Reporting
# Security Policy
If you find any security issues, please report to [security@python.org](mailto:security@python.org)
## Reporting a Vulnerability
Please read the guidelines on reporting security issues [on the
official website](https://www.python.org/dev/security/) for
instructions on how to report a security-related problem to
the Python Security Response Team responsibly.
To reach the response team, email `security at python dot org`.

View File

@ -21,6 +21,12 @@ Usage
Description
===========
.. attention::
PyPI no longer supports ``pip search`` (or XML-RPC search). Please use https://pypi.org/search (via a browser)
instead. See https://warehouse.pypa.io/api-reference/xml-rpc.html#deprecated-methods for more information.
However, XML-RPC search (and this command) may still be supported by indexes other than PyPI.
.. pip-command-description:: search

View File

@ -112,7 +112,7 @@ the ``news/`` directory with the extension of ``.trivial.rst``. If you are on a
POSIX like operating system, one can be added by running
``touch news/$(uuidgen).trivial.rst``. On Windows, the same result can be
achieved in Powershell using ``New-Item "news/$([guid]::NewGuid()).trivial.rst"``.
Core committers may also add a "trivial" label to the PR which will accomplish
Core committers may also add a "skip news" label to the PR which will accomplish
the same thing.
Upgrading, removing, or adding a new vendored library gets a special mention

View File

@ -73,7 +73,7 @@ pip's tests are written using the :pypi:`pytest` test framework and
:mod:`unittest.mock`. :pypi:`nox` is used to automate the setup and execution
of pip's tests.
It is preferable to run the tests in parallel for better experience during development,
It is preferable to run the tests in parallel for a better experience during development,
since the tests can take a long time to finish when run sequentially.
To run tests:
@ -104,6 +104,15 @@ can select tests using the various ways that pytest provides:
$ # Using keywords
$ nox -s test-3.10 -- -k "install and not wheel"
.. note::
When running pip's tests with OS distribution Python versions, be aware that some
functional tests may fail due to potential patches introduced by the distribution.
For all tests to pass consider:
- Installing Python from `python.org`_ or compile from source
- Or, using `pyenv`_ to assist with source compilation
Running pip's entire test suite requires supported version control tools
(subversion, bazaar, git, and mercurial) to be installed. If you are missing
any of these VCS, those tests should be skipped automatically. You can also
@ -114,6 +123,9 @@ explicitly tell pytest to skip those tests:
$ nox -s test-3.10 -- -k "not svn"
$ nox -s test-3.10 -- -k "not (svn or git)"
.. _python.org: https://www.python.org/downloads/
.. _pyenv: https://github.com/pyenv/pyenv
Running Linters
===============

View File

@ -145,8 +145,8 @@ Creating a new release
#. Push the tag created by ``prepare-release``.
#. Regenerate the ``get-pip.py`` script in the `get-pip repository`_ (as
documented there) and commit the results.
#. Submit a Pull Request to `CPython`_ adding the new version of pip (and upgrading
setuptools) to ``Lib/ensurepip/_bundled``, removing the existing version, and
#. Submit a Pull Request to `CPython`_ adding the new version of pip
to ``Lib/ensurepip/_bundled``, removing the existing version, and
adjusting the versions listed in ``Lib/ensurepip/__init__.py``.

View File

@ -103,7 +103,7 @@ $ pip install --upgrade pip
The current version of pip works on:
- Windows, Linux and MacOS.
- CPython 3.7, 3.8, 3.9, 3.10 and latest PyPy3.
- CPython 3.7, 3.8, 3.9, 3.10, 3.11, 3.12, and latest PyPy3.
pip is tested to work on the latest patch version of the Python interpreter,
for each of the minor versions listed above. Previous patch versions are

View File

@ -56,6 +56,9 @@ package with the following properties:
URL reference. `false` if the requirements was provided as a name and version
specifier.
- `is_yanked`: `true` if the requirement was yanked from the index, but was still
selected by pip conform to [PEP 592](https://peps.python.org/pep-0592/#installers).
- `download_info`: Information about the artifact (to be) downloaded for installation,
using the [direct URL data
structure](https://packaging.python.org/en/latest/specifications/direct-url-data-structure/).
@ -106,6 +109,7 @@ will produce an output similar to this (metadata abriged for brevity):
}
},
"is_direct": false,
"is_yanked": false,
"requested": true,
"metadata": {
"name": "pydantic",
@ -133,6 +137,7 @@ will produce an output similar to this (metadata abriged for brevity):
}
},
"is_direct": true,
"is_yanked": false,
"requested": true,
"metadata": {
"name": "packaging",

View File

@ -68,7 +68,7 @@ man pages][netrc-docs].
pip supports loading credentials stored in your keyring using the
{pypi}`keyring` library, which can be enabled py passing `--keyring-provider`
with a value of `auto`, `disabled`, `import`, or `subprocess`. The default
value `auto` respects `--no-input` and not query keyring at all if the option
value `auto` respects `--no-input` and does not query keyring at all if the option
is used; otherwise it tries the `import`, `subprocess`, and `disabled`
providers (in this order) and uses the first one that works.

View File

@ -27,6 +27,13 @@ While this cache attempts to minimize network activity, it does not prevent
network access altogether. If you want a local install solution that
circumvents accessing PyPI, see {ref}`Installing from local packages`.
```{versionchanged} 23.3
A new cache format is now used, stored in a directory called `http-v2` (see
below for this directory's location). Previously this cache was stored in a
directory called `http` in the main cache directory. If you have completely
switched to newer versions of `pip`, you may wish to delete the old directory.
```
(wheel-caching)=
### Locally built wheels
@ -124,11 +131,11 @@ The {ref}`pip cache` command can be used to manage pip's cache.
### Removing a single package
`pip cache remove setuptools` removes all wheel files related to setuptools from pip's cache.
`pip cache remove setuptools` removes all wheel files related to setuptools from pip's cache. HTTP cache files are not removed at this time.
### Removing the cache
`pip cache purge` will clear all wheel files from pip's cache.
`pip cache purge` will clear all files from pip's wheel and HTTP caches.
### Listing cached files

View File

@ -19,8 +19,8 @@ and how they are related to pip's various command line options.
## Configuration Files
Configuration files can change the default values for command line option.
They are written using a standard INI style configuration files.
Configuration files can change the default values for command line options.
The files are written using standard INI format.
pip has 3 "levels" of configuration files:
@ -28,11 +28,15 @@ pip has 3 "levels" of configuration files:
- `user`: per-user configuration file.
- `site`: per-environment configuration file; i.e. per-virtualenv.
Additionally, environment variables can be specified which will override any of the above.
### Location
pip's configuration files are located in fairly standard locations. This
location is different on different operating systems, and has some additional
complexity for backwards compatibility reasons.
complexity for backwards compatibility reasons. Note that if user config files
exist in both the legacy and current locations, values in the current file
will override values in the legacy file.
```{tab} Unix
@ -88,9 +92,10 @@ Site
### `PIP_CONFIG_FILE`
Additionally, the environment variable `PIP_CONFIG_FILE` can be used to specify
a configuration file that's loaded first, and whose values are overridden by
the values set in the aforementioned files. Setting this to {any}`os.devnull`
disables the loading of _all_ configuration files.
a configuration file that's loaded last, and whose values override the values
set in the aforementioned files. Setting this to {any}`os.devnull`
disables the loading of _all_ configuration files. Note that if a file exists
at the location that this is set to, the user config file will not be loaded.
(config-precedence)=
@ -99,10 +104,10 @@ disables the loading of _all_ configuration files.
When multiple configuration files are found, pip combines them in the following
order:
- `PIP_CONFIG_FILE`, if given.
- Global
- User
- Site
- `PIP_CONFIG_FILE`, if given.
Each file read overrides any values read from previous files, so if the
global timeout is specified in both the global file and the per-user file

View File

@ -28,19 +28,9 @@ It is possible to use the system trust store, instead of the bundled certifi
certificates for verifying HTTPS certificates. This approach will typically
support corporate proxy certificates without additional configuration.
In order to use system trust stores, you need to:
- Use Python 3.10 or newer.
- Install the {pypi}`truststore` package, in the Python environment you're
running pip in.
This is typically done by installing this package using a system package
manager or by using pip in {ref}`Hash-checking mode` for this package and
trusting the network using the `--trusted-host` flag.
In order to use system trust stores, you need to use Python 3.10 or newer.
```{pip-cli}
$ python -m pip install truststore
[...]
$ python -m pip install SomePackage --use-feature=truststore
[...]
Successfully installed SomePackage

View File

@ -8,7 +8,7 @@ and this article is intended to help readers understand what is happening
```{note}
This document is a work in progress. The details included are accurate (at the
time of writing), but there is additional information, in particular around
pip's interface with resolvelib, which have not yet been included.
pip's interface with resolvelib, which has not yet been included.
Contributions to improve this document are welcome.
```
@ -26,7 +26,7 @@ The practical implication of that is that there will always be some situations
where pip cannot determine what to install in a reasonable length of time. We
make every effort to ensure that such situations happen rarely, but eliminating
them altogether isn't even theoretically possible. We'll discuss what options
yopu have if you hit a problem situation like this a little later.
you have if you hit a problem situation like this a little later.
## Python specific issues
@ -136,7 +136,7 @@ operations:
that satisfy them. This is essentially where the finder interacts with the
resolver.
* `is_satisfied_by` - checks if a candidate satisfies a requirement. This is
basically the implementation of what a requirement meams.
basically the implementation of what a requirement means.
* `get_dependencies` - get the dependency metadata for a candidate. This is
the implementation of the process of getting and reading package metadata.

View File

@ -194,22 +194,17 @@ class PipReqFileOptionsReference(PipOptions):
opt = option()
opt_name = opt._long_opts[0]
if opt._short_opts:
short_opt_name = "{}, ".format(opt._short_opts[0])
short_opt_name = f"{opt._short_opts[0]}, "
else:
short_opt_name = ""
if option in cmdoptions.general_group["options"]:
prefix = ""
else:
prefix = "{}_".format(self.determine_opt_prefix(opt_name))
prefix = f"{self.determine_opt_prefix(opt_name)}_"
self.view_list.append(
"* :ref:`{short}{long}<{prefix}{opt_name}>`".format(
short=short_opt_name,
long=opt_name,
prefix=prefix,
opt_name=opt_name,
),
f"* :ref:`{short_opt_name}{opt_name}<{prefix}{opt_name}>`",
"\n",
)

View File

@ -1,4 +1,4 @@
sphinx ~= 6.0
sphinx ~= 7.0
towncrier
furo
myst_parser

1
news/11815.doc.rst Normal file
View File

@ -0,0 +1 @@
Fix explanation of how PIP_CONFIG_FILE works

1
news/12389.bugfix.rst Normal file
View File

@ -0,0 +1 @@
Update mypy to 1.6.1 and fix/ignore types

1
news/12390.trivial.rst Normal file
View File

@ -0,0 +1 @@
Update ruff versions and config for dev

1
news/12393.trivial.rst Normal file
View File

@ -0,0 +1 @@
Enforce and update code to use f-strings via Ruff rule UP032

View File

@ -0,0 +1 @@
Fix mercurial revision "parse error": use ``--rev={ref}`` instead of ``-r={ref}``

View File

@ -1,2 +0,0 @@
Added seperate instructions for installing ``nox`` in the ``docs/development/getting-started.rst`` doc. and slight update
to the below ``Running pip From Source Tree`` section.

View File

@ -67,7 +67,7 @@ def should_update_common_wheels() -> bool:
# -----------------------------------------------------------------------------
# Development Commands
# -----------------------------------------------------------------------------
@nox.session(python=["3.7", "3.8", "3.9", "3.10", "3.11", "pypy3"])
@nox.session(python=["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "pypy3"])
def test(session: nox.Session) -> None:
# Get the common wheels.
if should_update_common_wheels():
@ -89,6 +89,7 @@ def test(session: nox.Session) -> None:
shutil.rmtree(sdist_dir, ignore_errors=True)
# fmt: off
session.install("setuptools")
session.run(
"python", "setup.py", "sdist", "--formats=zip", "--dist-dir", sdist_dir,
silent=True,
@ -183,6 +184,12 @@ def lint(session: nox.Session) -> None:
# git reset --hard origin/main
@nox.session
def vendoring(session: nox.Session) -> None:
# Ensure that the session Python is running 3.10+
# so that truststore can be installed correctly.
session.run(
"python", "-c", "import sys; sys.exit(1 if sys.version_info < (3, 10) else 0)"
)
session.install("vendoring~=1.2.0")
parser = argparse.ArgumentParser(prog="nox -s vendoring")
@ -219,7 +226,7 @@ def vendoring(session: nox.Session) -> None:
new_version = old_version
for inner_name, inner_version in pinned_requirements(vendor_txt):
if inner_name == name:
# this is a dedicated assignment, to make flake8 happy
# this is a dedicated assignment, to make lint happy
new_version = inner_version
break
else:
@ -315,7 +322,7 @@ def build_release(session: nox.Session) -> None:
)
session.log("# Install dependencies")
session.install("setuptools", "wheel", "twine")
session.install("build", "twine")
with release.isolated_temporary_checkout(session, version) as build_dir:
session.log(
@ -351,7 +358,7 @@ def build_dists(session: nox.Session) -> List[str]:
)
session.log("# Build distributions")
session.run("python", "setup.py", "sdist", "bdist_wheel", silent=True)
session.run("python", "-m", "build", silent=True)
produced_dists = glob.glob("dist/*")
session.log(f"# Verify distributions: {', '.join(produced_dists)}")

View File

@ -71,3 +71,57 @@ setuptools = "pkg_resources"
CacheControl = "https://raw.githubusercontent.com/ionrock/cachecontrol/v0.12.6/LICENSE.txt"
distlib = "https://bitbucket.org/pypa/distlib/raw/master/LICENSE.txt"
webencodings = "https://github.com/SimonSapin/python-webencodings/raw/master/LICENSE"
[tool.ruff]
extend-exclude = [
"_vendor",
"./build",
".scratch",
"data",
]
ignore = [
"B019",
"B020",
"B904", # Ruff enables opinionated warnings by default
"B905", # Ruff enables opinionated warnings by default
]
target-version = "py37"
line-length = 88
select = [
"ASYNC",
"B",
"C4",
"C90",
"E",
"F",
"G",
"I",
"ISC",
"PERF",
"PLE",
"PLR0",
"W",
"RUF100",
"UP032",
]
[tool.ruff.isort]
# We need to explicitly make pip "first party" as it's imported by code in
# the docs and tests directories.
known-first-party = ["pip"]
known-third-party = ["pip._vendor"]
[tool.ruff.mccabe]
max-complexity = 33 # default is 10
[tool.ruff.per-file-ignores]
"noxfile.py" = ["G"]
"src/pip/_internal/*" = ["PERF203"]
"tests/*" = ["B011"]
"tests/unit/test_finder.py" = ["C414"]
[tool.ruff.pylint]
max-args = 15 # default is 5
max-branches = 28 # default is 12
max-returns = 13 # default is 6
max-statements = 134 # default is 50

View File

@ -1,46 +1,13 @@
[isort]
profile = black
skip =
./build,
.nox,
.tox,
.scratch,
_vendor,
data
known_third_party =
pip._vendor
[flake8]
max-line-length = 88
exclude =
./build,
.nox,
.tox,
.scratch,
_vendor,
data
enable-extensions = G
extend-ignore =
G200, G202,
# black adds spaces around ':'
E203,
# using a cache
B019,
# reassigning variables in a loop
B020,
per-file-ignores =
# G: The plugin logging-format treats every .log and .error as logging.
noxfile.py: G
# B011: Do not call assert False since python -O removes these calls
tests/*: B011
[mypy]
mypy_path = $MYPY_CONFIG_FILE_DIR/src
strict = True
no_implicit_reexport = False
allow_subclassing_any = True
allow_untyped_calls = True
warn_return_any = False
ignore_missing_imports = True
disallow_untyped_defs = True
disallow_any_generics = True
warn_unused_ignores = True
no_implicit_optional = True
[mypy-pip._internal.utils._jaraco_text]
ignore_errors = True
@ -51,12 +18,8 @@ ignore_errors = True
# These vendored libraries use runtime magic to populate things and don't sit
# well with static typing out of the box. Eventually we should provide correct
# typing information for their public interface and remove these configs.
[mypy-pip._vendor.colorama]
follow_imports = skip
[mypy-pip._vendor.pkg_resources]
follow_imports = skip
[mypy-pip._vendor.progress.*]
follow_imports = skip
[mypy-pip._vendor.requests.*]
follow_imports = skip

View File

@ -42,6 +42,7 @@ setup(
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
],
@ -76,7 +77,7 @@ setup(
entry_points={
"console_scripts": [
"pip=pip._internal.cli.main:main",
"pip{}=pip._internal.cli.main:main".format(sys.version_info[0]),
f"pip{sys.version_info[0]}=pip._internal.cli.main:main",
"pip{}.{}=pip._internal.cli.main:main".format(*sys.version_info[:2]),
],
},

View File

@ -1,6 +1,6 @@
from typing import List, Optional
__version__ = "23.2.dev0"
__version__ = "24.0.dev0"
def main(args: Optional[List[str]] = None) -> int:

View File

@ -1,6 +1,5 @@
import os
import sys
import warnings
# Remove '' and current working directory from the first entry
# of sys.path, if present to avoid using current directory
@ -20,12 +19,6 @@ if __package__ == "":
sys.path.insert(0, path)
if __name__ == "__main__":
# Work around the error reported in #9540, pending a proper fix.
# Note: It is essential the warning filter is set *before* importing
# pip, as the deprecation happens at import time, not runtime.
warnings.filterwarnings(
"ignore", category=DeprecationWarning, module=".*packaging\\.version"
)
from pip._internal.cli.main import main as _main
sys.exit(_main())

View File

@ -1,6 +1,5 @@
from typing import List, Optional
import pip._internal.utils.inject_securetransport # noqa
from pip._internal.utils import _log
# init_logging() must be called before any call to logging.getLogger()

View File

@ -78,12 +78,10 @@ class Cache:
if can_not_cache:
return []
candidates = []
path = self.get_path_for_link(link)
if os.path.isdir(path):
for candidate in os.listdir(path):
candidates.append((candidate, path))
return candidates
return [(candidate, path) for candidate in os.listdir(path)]
return []
def get_path_for_link(self, link: Link) -> str:
"""Return a directory to store cached items in for link."""
@ -194,7 +192,17 @@ class CacheEntry:
self.origin: Optional[DirectUrl] = None
origin_direct_url_path = Path(self.link.file_path).parent / ORIGIN_JSON_NAME
if origin_direct_url_path.exists():
self.origin = DirectUrl.from_json(origin_direct_url_path.read_text())
try:
self.origin = DirectUrl.from_json(
origin_direct_url_path.read_text(encoding="utf-8")
)
except Exception as e:
logger.warning(
"Ignoring invalid cache entry origin file %s for %s (%s)",
origin_direct_url_path,
link.filename,
e,
)
class WheelCache(Cache):
@ -257,16 +265,26 @@ class WheelCache(Cache):
@staticmethod
def record_download_origin(cache_dir: str, download_info: DirectUrl) -> None:
origin_path = Path(cache_dir) / ORIGIN_JSON_NAME
if origin_path.is_file():
origin = DirectUrl.from_json(origin_path.read_text())
# TODO: use DirectUrl.equivalent when https://github.com/pypa/pip/pull/10564
# is merged.
if origin.url != download_info.url:
if origin_path.exists():
try:
origin = DirectUrl.from_json(origin_path.read_text(encoding="utf-8"))
except Exception as e:
logger.warning(
"Origin URL %s in cache entry %s does not match download URL %s. "
"This is likely a pip bug or a cache corruption issue.",
origin.url,
cache_dir,
download_info.url,
"Could not read origin file %s in cache entry (%s). "
"Will attempt to overwrite it.",
origin_path,
e,
)
else:
# TODO: use DirectUrl.equivalent when
# https://github.com/pypa/pip/pull/10564 is merged.
if origin.url != download_info.url:
logger.warning(
"Origin URL %s in cache entry %s does not match download URL "
"%s. This is likely a pip bug or a cache corruption issue. "
"Will overwrite it with the new value.",
origin.url,
cache_dir,
download_info.url,
)
origin_path.write_text(download_info.to_json(), encoding="utf-8")

View File

@ -71,8 +71,9 @@ def autocomplete() -> None:
for opt in subcommand.parser.option_list_all:
if opt.help != optparse.SUPPRESS_HELP:
for opt_str in opt._long_opts + opt._short_opts:
options.append((opt_str, opt.nargs))
options += [
(opt_str, opt.nargs) for opt_str in opt._long_opts + opt._short_opts
]
# filter out previously specified options from available options
prev_opts = [x.split("=")[0] for x in cwords[1 : cword - 1]]

View File

@ -131,6 +131,17 @@ class Command(CommandContextMixIn):
", ".join(sorted(always_enabled_features)),
)
# Make sure that the --python argument isn't specified after the
# subcommand. We can tell, because if --python was specified,
# we should only reach this point if we're running in the created
# subprocess, which has the _PIP_RUNNING_IN_SUBPROCESS environment
# variable set.
if options.python and "_PIP_RUNNING_IN_SUBPROCESS" not in os.environ:
logger.critical(
"The --python option must be placed before the pip subcommand name"
)
sys.exit(ERROR)
# TODO: Try to get these passing down from the command?
# without resorting to os.environ to hold these.
# This also affects isolated builds and it should.
@ -170,7 +181,7 @@ class Command(CommandContextMixIn):
assert isinstance(status, int)
return status
except DiagnosticPipError as exc:
logger.error("[present-rich] %s", exc)
logger.error("%s", exc, extra={"rich": True})
logger.debug("Exception information:", exc_info=True)
return ERROR

View File

@ -92,10 +92,10 @@ def check_dist_restriction(options: Values, check_target: bool = False) -> None:
)
if check_target:
if dist_restriction_set and not options.target_dir:
if not options.dry_run and dist_restriction_set and not options.target_dir:
raise CommandError(
"Can not use any platform or abi specific options unless "
"installing via '--target'"
"installing via '--target' or using '--dry-run'"
)
@ -582,10 +582,7 @@ def _handle_python_version(
"""
version_info, error_msg = _convert_python_version(value)
if error_msg is not None:
msg = "invalid --python-version value: {!r}: {}".format(
value,
error_msg,
)
msg = f"invalid --python-version value: {value!r}: {error_msg}"
raise_option_error(parser, option=option, msg=msg)
parser.values.python_version = version_info
@ -670,7 +667,10 @@ def prefer_binary() -> Option:
dest="prefer_binary",
action="store_true",
default=False,
help="Prefer older binary packages over newer source packages.",
help=(
"Prefer binary packages over source packages, even if the "
"source packages are newer."
),
)
@ -823,7 +823,7 @@ def _handle_config_settings(
) -> None:
key, sep, val = value.partition("=")
if sep != "=":
parser.error(f"Arguments to {opt_str} must be of the form KEY=VAL") # noqa
parser.error(f"Arguments to {opt_str} must be of the form KEY=VAL")
dest = getattr(parser.values, option.dest)
if dest is None:
dest = {}
@ -918,13 +918,13 @@ def _handle_merge_hash(
algo, digest = value.split(":", 1)
except ValueError:
parser.error(
"Arguments to {} must be a hash name " # noqa
f"Arguments to {opt_str} must be a hash name "
"followed by a value, like --hash=sha256:"
"abcde...".format(opt_str)
"abcde..."
)
if algo not in STRONG_HASHES:
parser.error(
"Allowed hash algorithms for {} are {}.".format( # noqa
"Allowed hash algorithms for {} are {}.".format(
opt_str, ", ".join(STRONG_HASHES)
)
)

View File

@ -229,9 +229,9 @@ class ConfigOptionParser(CustomOptionParser):
val = strtobool(val)
except ValueError:
self.error(
"{} is not a valid value for {} option, " # noqa
f"{val} is not a valid value for {key} option, "
"please specify a boolean value like yes/no, "
"true/false or 1/0 instead.".format(val, key)
"true/false or 1/0 instead."
)
elif option.action == "count":
with suppress(ValueError):
@ -240,10 +240,10 @@ class ConfigOptionParser(CustomOptionParser):
val = int(val)
if not isinstance(val, int) or val < 0:
self.error(
"{} is not a valid value for {} option, " # noqa
f"{val} is not a valid value for {key} option, "
"please instead specify either a non-negative integer "
"or a boolean value like yes/no or false/true "
"which is equivalent to 1/0.".format(val, key)
"which is equivalent to 1/0."
)
elif option.action == "append":
val = val.split()

View File

@ -58,12 +58,9 @@ def _create_truststore_ssl_context() -> Optional["SSLContext"]:
return None
try:
import truststore
except ImportError:
raise CommandError(
"To use the truststore feature, 'truststore' must be installed into "
"pip's current environment."
)
from pip._vendor import truststore
except ImportError as e:
raise CommandError(f"The truststore feature is unavailable: {e}")
return truststore.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
@ -123,7 +120,7 @@ class SessionCommandMixin(CommandContextMixIn):
ssl_context = None
session = PipSession(
cache=os.path.join(cache_dir, "http") if cache_dir else None,
cache=os.path.join(cache_dir, "http-v2") if cache_dir else None,
retries=retries if retries is not None else options.retries,
trusted_hosts=options.trusted_hosts,
index_urls=self._get_index_urls(options),
@ -268,7 +265,7 @@ class RequirementCommand(IndexGroupCommand):
if "legacy-resolver" in options.deprecated_features_enabled:
return "legacy"
return "2020-resolver"
return "resolvelib"
@classmethod
def make_requirement_preparer(
@ -287,9 +284,10 @@ class RequirementCommand(IndexGroupCommand):
"""
temp_build_dir_path = temp_build_dir.path
assert temp_build_dir_path is not None
legacy_resolver = False
resolver_variant = cls.determine_resolver_variant(options)
if resolver_variant == "2020-resolver":
if resolver_variant == "resolvelib":
lazy_wheel = "fast-deps" in options.features_enabled
if lazy_wheel:
logger.warning(
@ -300,6 +298,7 @@ class RequirementCommand(IndexGroupCommand):
"production."
)
else:
legacy_resolver = True
lazy_wheel = False
if "fast-deps" in options.features_enabled:
logger.warning(
@ -320,6 +319,7 @@ class RequirementCommand(IndexGroupCommand):
use_user_site=use_user_site,
lazy_wheel=lazy_wheel,
verbosity=verbosity,
legacy_resolver=legacy_resolver,
)
@classmethod
@ -349,7 +349,7 @@ class RequirementCommand(IndexGroupCommand):
# The long import name and duplicated invocation is needed to convince
# Mypy into correctly typechecking. Otherwise it would complain the
# "Resolver" class being redefined.
if resolver_variant == "2020-resolver":
if resolver_variant == "resolvelib":
import pip._internal.resolution.resolvelib.resolver
return pip._internal.resolution.resolvelib.resolver.Resolver(

View File

@ -3,10 +3,10 @@ import textwrap
from optparse import Values
from typing import Any, List
import pip._internal.utils.filesystem as filesystem
from pip._internal.cli.base_command import Command
from pip._internal.cli.status_codes import ERROR, SUCCESS
from pip._internal.exceptions import CommandError, PipError
from pip._internal.utils import filesystem
from pip._internal.utils.logging import getLogger
logger = getLogger(__name__)
@ -93,24 +93,30 @@ class CacheCommand(Command):
num_http_files = len(self._find_http_files(options))
num_packages = len(self._find_wheels(options, "*"))
http_cache_location = self._cache_dir(options, "http")
http_cache_location = self._cache_dir(options, "http-v2")
old_http_cache_location = self._cache_dir(options, "http")
wheels_cache_location = self._cache_dir(options, "wheels")
http_cache_size = filesystem.format_directory_size(http_cache_location)
http_cache_size = filesystem.format_size(
filesystem.directory_size(http_cache_location)
+ filesystem.directory_size(old_http_cache_location)
)
wheels_cache_size = filesystem.format_directory_size(wheels_cache_location)
message = (
textwrap.dedent(
"""
Package index page cache location: {http_cache_location}
Package index page cache location (pip v23.3+): {http_cache_location}
Package index page cache location (older pips): {old_http_cache_location}
Package index page cache size: {http_cache_size}
Number of HTTP files: {num_http_files}
Locally built wheels location: {wheels_cache_location}
Locally built wheels size: {wheels_cache_size}
Number of locally built wheels: {package_count}
"""
""" # noqa: E501
)
.format(
http_cache_location=http_cache_location,
old_http_cache_location=old_http_cache_location,
http_cache_size=http_cache_size,
num_http_files=num_http_files,
wheels_cache_location=wheels_cache_location,
@ -151,14 +157,8 @@ class CacheCommand(Command):
logger.info("\n".join(sorted(results)))
def format_for_abspath(self, files: List[str]) -> None:
if not files:
return
results = []
for filename in files:
results.append(filename)
logger.info("\n".join(sorted(results)))
if files:
logger.info("\n".join(sorted(files)))
def remove_cache_items(self, options: Values, args: List[Any]) -> None:
if len(args) > 1:
@ -175,7 +175,7 @@ class CacheCommand(Command):
files += self._find_http_files(options)
else:
# Add the pattern to the log message
no_matching_msg += ' for pattern "{}"'.format(args[0])
no_matching_msg += f' for pattern "{args[0]}"'
if not files:
logger.warning(no_matching_msg)
@ -195,8 +195,11 @@ class CacheCommand(Command):
return os.path.join(options.cache_dir, subdir)
def _find_http_files(self, options: Values) -> List[str]:
http_dir = self._cache_dir(options, "http")
return filesystem.find_files(http_dir, "*")
old_http_dir = self._cache_dir(options, "http")
new_http_dir = self._cache_dir(options, "http-v2")
return filesystem.find_files(old_http_dir, "*") + filesystem.find_files(
new_http_dir, "*"
)
def _find_wheels(self, options: Values, pattern: str) -> List[str]:
wheel_dir = self._cache_dir(options, "wheels")

View File

@ -7,6 +7,7 @@ from pip._internal.cli.status_codes import ERROR, SUCCESS
from pip._internal.operations.check import (
check_package_set,
create_package_set_from_installed,
warn_legacy_versions_and_specifiers,
)
from pip._internal.utils.misc import write_output
@ -21,6 +22,7 @@ class CheckCommand(Command):
def run(self, options: Values, args: List[str]) -> int:
package_set, parsing_probs = create_package_set_from_installed()
warn_legacy_versions_and_specifiers(package_set)
missing, conflicting = check_package_set(package_set)
for project_name in missing:

View File

@ -22,15 +22,19 @@ COMPLETION_SCRIPTS = {
complete -o default -F _pip_completion {prog}
""",
"zsh": """
function _pip_completion {{
local words cword
read -Ac words
read -cn cword
reply=( $( COMP_WORDS="$words[*]" \\
COMP_CWORD=$(( cword-1 )) \\
PIP_AUTO_COMPLETE=1 $words[1] 2>/dev/null ))
#compdef -P pip[0-9.]#
__pip() {{
compadd $( COMP_WORDS="$words[*]" \\
COMP_CWORD=$((CURRENT-1)) \\
PIP_AUTO_COMPLETE=1 $words[1] 2>/dev/null )
}}
compctl -K _pip_completion {prog}
if [[ $zsh_eval_context[-1] == loadautofunc ]]; then
# autoload from fpath, call function directly
__pip "$@"
else
# eval/source/. command, register function for later
compdef __pip -P 'pip[0-9.]#'
fi
""",
"fish": """
function __fish_complete_pip

View File

@ -242,17 +242,15 @@ class ConfigurationCommand(Command):
e.filename = editor
raise
except subprocess.CalledProcessError as e:
raise PipError(
"Editor Subprocess exited with exit code {}".format(e.returncode)
)
raise PipError(f"Editor Subprocess exited with exit code {e.returncode}")
def _get_n_args(self, args: List[str], example: str, n: int) -> Any:
"""Helper to make sure the command got the right number of arguments"""
if len(args) != n:
msg = (
"Got unexpected number of arguments, expected {}. "
'(example: "{} config {}")'
).format(n, get_prog(), example)
f"Got unexpected number of arguments, expected {n}. "
f'(example: "{get_prog()} config {example}")'
)
raise PipError(msg)
if n == 1:

View File

@ -46,22 +46,29 @@ def create_vendor_txt_map() -> Dict[str, str]:
return dict(line.split("==", 1) for line in lines)
def get_module_from_module_name(module_name: str) -> ModuleType:
def get_module_from_module_name(module_name: str) -> Optional[ModuleType]:
# Module name can be uppercase in vendor.txt for some reason...
module_name = module_name.lower().replace("-", "_")
# PATCH: setuptools is actually only pkg_resources.
if module_name == "setuptools":
module_name = "pkg_resources"
__import__(f"pip._vendor.{module_name}", globals(), locals(), level=0)
return getattr(pip._vendor, module_name)
try:
__import__(f"pip._vendor.{module_name}", globals(), locals(), level=0)
return getattr(pip._vendor, module_name)
except ImportError:
# We allow 'truststore' to fail to import due
# to being unavailable on Python 3.9 and earlier.
if module_name == "truststore" and sys.version_info < (3, 10):
return None
raise
def get_vendor_version_from_module(module_name: str) -> Optional[str]:
module = get_module_from_module_name(module_name)
version = getattr(module, "__version__", None)
if not version:
if module and not version:
# Try to find version in debundled module info.
assert module.__file__ is not None
env = get_environment([os.path.dirname(module.__file__)])
@ -88,7 +95,7 @@ def show_actual_vendor_versions(vendor_txt_versions: Dict[str, str]) -> None:
elif parse_version(actual_version) != parse_version(expected_version):
extra_message = (
" (CONFLICT: vendor.txt suggests version should"
" be {})".format(expected_version)
f" be {expected_version})"
)
logger.info("%s==%s%s", module_name, actual_version, extra_message)
@ -105,7 +112,7 @@ def show_tags(options: Values) -> None:
tag_limit = 10
target_python = make_target_python(options)
tags = target_python.get_tags()
tags = target_python.get_sorted_tags()
# Display the target options that were explicitly provided.
formatted_target = target_python.format_given()
@ -113,7 +120,7 @@ def show_tags(options: Values) -> None:
if formatted_target:
suffix = f" (target: {formatted_target})"
msg = "Compatible tags: {}{}".format(len(tags), suffix)
msg = f"Compatible tags: {len(tags)}{suffix}"
logger.info(msg)
if options.verbose < 1 and len(tags) > tag_limit:
@ -127,17 +134,12 @@ def show_tags(options: Values) -> None:
logger.info(str(tag))
if tags_limited:
msg = (
"...\n[First {tag_limit} tags shown. Pass --verbose to show all.]"
).format(tag_limit=tag_limit)
msg = f"...\n[First {tag_limit} tags shown. Pass --verbose to show all.]"
logger.info(msg)
def ca_bundle_info(config: Configuration) -> str:
levels = set()
for key, _ in config.items():
levels.add(key.split(".")[0])
levels = {key.split(".", 1)[0] for key, _ in config.items()}
if not levels:
return "Not specified"

View File

@ -137,6 +137,10 @@ class DownloadCommand(RequirementCommand):
assert req.name is not None
preparer.save_linked_requirement(req)
downloaded.append(req.name)
preparer.prepare_linked_requirements_more(requirement_set.requirements.values())
requirement_set.warn_legacy_versions_and_specifiers()
if downloaded:
write_output("Successfully downloaded %s", " ".join(downloaded))

View File

@ -1,6 +1,6 @@
import sys
from optparse import Values
from typing import List
from typing import AbstractSet, List
from pip._internal.cli import cmdoptions
from pip._internal.cli.base_command import Command
@ -8,7 +8,18 @@ from pip._internal.cli.status_codes import SUCCESS
from pip._internal.operations.freeze import freeze
from pip._internal.utils.compat import stdlib_pkgs
DEV_PKGS = {"pip", "setuptools", "distribute", "wheel"}
def _should_suppress_build_backends() -> bool:
return sys.version_info < (3, 12)
def _dev_pkgs() -> AbstractSet[str]:
pkgs = {"pip"}
if _should_suppress_build_backends():
pkgs |= {"setuptools", "distribute", "wheel"}
return pkgs
class FreezeCommand(Command):
@ -61,7 +72,7 @@ class FreezeCommand(Command):
action="store_true",
help=(
"Do not skip these packages in the output:"
" {}".format(", ".join(DEV_PKGS))
" {}".format(", ".join(_dev_pkgs()))
),
)
self.cmd_opts.add_option(
@ -77,7 +88,7 @@ class FreezeCommand(Command):
def run(self, options: Values, args: List[str]) -> int:
skip = set(stdlib_pkgs)
if not options.freeze_all:
skip.update(DEV_PKGS)
skip.update(_dev_pkgs())
if options.excludes:
skip.update(options.excludes)

View File

@ -128,12 +128,12 @@ class IndexCommand(IndexGroupCommand):
if not versions:
raise DistributionNotFound(
"No matching distribution found for {}".format(query)
f"No matching distribution found for {query}"
)
formatted_versions = [str(ver) for ver in sorted(versions, reverse=True)]
latest = formatted_versions[0]
write_output("{} ({})".format(query, latest))
write_output(f"{query} ({latest})")
write_output("Available versions: {}".format(", ".join(formatted_versions)))
print_dist_installation_info(query, latest)

View File

@ -387,6 +387,9 @@ class InstallCommand(RequirementCommand):
json.dump(report.to_dict(), f, indent=2, ensure_ascii=False)
if options.dry_run:
# In non dry-run mode, the legacy versions and specifiers check
# will be done as part of conflict detection.
requirement_set.warn_legacy_versions_and_specifiers()
would_install_items = sorted(
(r.metadata["name"], r.metadata["version"])
for r in requirement_set.requirements_to_install
@ -498,7 +501,7 @@ class InstallCommand(RequirementCommand):
show_traceback,
options.use_user_site,
)
logger.error(message, exc_info=show_traceback) # noqa
logger.error(message, exc_info=show_traceback)
return ERROR
@ -592,7 +595,7 @@ class InstallCommand(RequirementCommand):
"source of the following dependency conflicts."
)
else:
assert resolver_variant == "2020-resolver"
assert resolver_variant == "resolvelib"
parts.append(
"pip's dependency resolver does not currently take into account "
"all the packages that are installed. This behaviour is the "
@ -604,12 +607,8 @@ class InstallCommand(RequirementCommand):
version = package_set[project_name][0]
for dependency in missing[project_name]:
message = (
"{name} {version} requires {requirement}, "
f"{project_name} {version} requires {dependency[1]}, "
"which is not installed."
).format(
name=project_name,
version=version,
requirement=dependency[1],
)
parts.append(message)
@ -625,7 +624,7 @@ class InstallCommand(RequirementCommand):
requirement=req,
dep_name=dep_name,
dep_version=dep_version,
you=("you" if resolver_variant == "2020-resolver" else "you'll"),
you=("you" if resolver_variant == "resolvelib" else "you'll"),
)
parts.append(message)

View File

@ -103,7 +103,10 @@ class ListCommand(IndexGroupCommand):
dest="list_format",
default="columns",
choices=("columns", "freeze", "json"),
help="Select the output format among: columns (default), freeze, or json",
help=(
"Select the output format among: columns (default), freeze, or json. "
"The 'freeze' format cannot be used with the --outdated option."
),
)
self.cmd_opts.add_option(
@ -157,7 +160,7 @@ class ListCommand(IndexGroupCommand):
if options.outdated and options.list_format == "freeze":
raise CommandError(
"List format 'freeze' can not be used with the --outdated option."
"List format 'freeze' cannot be used with the --outdated option."
)
cmdoptions.check_list_path_option(options)
@ -294,7 +297,7 @@ class ListCommand(IndexGroupCommand):
# Create and add a separator.
if len(data) > 0:
pkg_strings.insert(1, " ".join(map(lambda x: "-" * x, sizes)))
pkg_strings.insert(1, " ".join("-" * x for x in sizes))
for val in pkg_strings:
write_output(val)

View File

@ -153,6 +153,9 @@ class WheelCommand(RequirementCommand):
elif should_build_for_wheel_command(req):
reqs_to_build.append(req)
preparer.prepare_linked_requirements_more(requirement_set.requirements.values())
requirement_set.warn_legacy_versions_and_specifiers()
# build wheels
build_successes, build_failures = build(
reqs_to_build,

View File

@ -59,8 +59,8 @@ def _disassemble_key(name: str) -> List[str]:
if "." not in name:
error_message = (
"Key does not contain dot separated section and key. "
"Perhaps you wanted to use 'global.{}' instead?"
).format(name)
f"Perhaps you wanted to use 'global.{name}' instead?"
)
raise ConfigurationError(error_message)
return name.split(".", 1)
@ -210,8 +210,15 @@ class Configuration:
# Ensure directory exists.
ensure_dir(os.path.dirname(fname))
with open(fname, "w") as f:
parser.write(f)
# Ensure directory's permission(need to be writeable)
try:
with open(fname, "w") as f:
parser.write(f)
except OSError as error:
raise ConfigurationError(
f"An error occurred while writing to the configuration file "
f"{fname}: {error}"
)
#
# Private routines
@ -320,33 +327,35 @@ class Configuration:
def iter_config_files(self) -> Iterable[Tuple[Kind, List[str]]]:
"""Yields variant and configuration files associated with it.
This should be treated like items of a dictionary.
This should be treated like items of a dictionary. The order
here doesn't affect what gets overridden. That is controlled
by OVERRIDE_ORDER. However this does control the order they are
displayed to the user. It's probably most ergononmic to display
things in the same order as OVERRIDE_ORDER
"""
# SMELL: Move the conditions out of this function
# environment variables have the lowest priority
config_file = os.environ.get("PIP_CONFIG_FILE", None)
if config_file is not None:
yield kinds.ENV, [config_file]
else:
yield kinds.ENV, []
env_config_file = os.environ.get("PIP_CONFIG_FILE", None)
config_files = get_configuration_files()
# at the base we have any global configuration
yield kinds.GLOBAL, config_files[kinds.GLOBAL]
# per-user configuration next
# per-user config is not loaded when env_config_file exists
should_load_user_config = not self.isolated and not (
config_file and os.path.exists(config_file)
env_config_file and os.path.exists(env_config_file)
)
if should_load_user_config:
# The legacy config file is overridden by the new config file
yield kinds.USER, config_files[kinds.USER]
# finally virtualenv configuration first trumping others
# virtualenv config
yield kinds.SITE, config_files[kinds.SITE]
if env_config_file is not None:
yield kinds.ENV, [env_config_file]
else:
yield kinds.ENV, []
def get_values_in_config(self, variant: Kind) -> Dict[str, Any]:
"""Get values present in a config file"""
return self._config[variant]

View File

@ -1,4 +1,5 @@
import abc
from typing import Optional
from pip._internal.index.package_finder import PackageFinder
from pip._internal.metadata.base import BaseDistribution
@ -19,12 +20,23 @@ class AbstractDistribution(metaclass=abc.ABCMeta):
- we must be able to create a Distribution object exposing the
above metadata.
- if we need to do work in the build tracker, we must be able to generate a unique
string to identify the requirement in the build tracker.
"""
def __init__(self, req: InstallRequirement) -> None:
super().__init__()
self.req = req
@abc.abstractproperty
def build_tracker_id(self) -> Optional[str]:
"""A string that uniquely identifies this requirement to the build tracker.
If None, then this dist has no work to do in the build tracker, and
``.prepare_distribution_metadata()`` will not be called."""
raise NotImplementedError()
@abc.abstractmethod
def get_metadata_distribution(self) -> BaseDistribution:
raise NotImplementedError()

View File

@ -1,3 +1,5 @@
from typing import Optional
from pip._internal.distributions.base import AbstractDistribution
from pip._internal.index.package_finder import PackageFinder
from pip._internal.metadata import BaseDistribution
@ -10,6 +12,10 @@ class InstalledDistribution(AbstractDistribution):
been computed.
"""
@property
def build_tracker_id(self) -> Optional[str]:
return None
def get_metadata_distribution(self) -> BaseDistribution:
assert self.req.satisfied_by is not None, "not actually installed"
return self.req.satisfied_by

View File

@ -1,5 +1,5 @@
import logging
from typing import Iterable, Set, Tuple
from typing import Iterable, Optional, Set, Tuple
from pip._internal.build_env import BuildEnvironment
from pip._internal.distributions.base import AbstractDistribution
@ -18,6 +18,12 @@ class SourceDistribution(AbstractDistribution):
generated, either using PEP 517 or using the legacy `setup.py egg_info`.
"""
@property
def build_tracker_id(self) -> Optional[str]:
"""Identify this requirement uniquely by its link."""
assert self.req.link
return self.req.link.url_without_fragment
def get_metadata_distribution(self) -> BaseDistribution:
return self.req.get_dist()

View File

@ -1,3 +1,5 @@
from typing import Optional
from pip._vendor.packaging.utils import canonicalize_name
from pip._internal.distributions.base import AbstractDistribution
@ -15,6 +17,10 @@ class WheelDistribution(AbstractDistribution):
This does not need any preparation as wheels can be directly unpacked.
"""
@property
def build_tracker_id(self) -> Optional[str]:
return None
def get_metadata_distribution(self) -> BaseDistribution:
"""Loads the metadata from the wheel file into memory and returns a
Distribution that uses it, not relying on the wheel file or

View File

@ -247,10 +247,7 @@ class NoneMetadataError(PipError):
def __str__(self) -> str:
# Use `dist` in the error message because its stringification
# includes more information, like the version and location.
return "None {} metadata found for distribution: {}".format(
self.metadata_name,
self.dist,
)
return f"None {self.metadata_name} metadata found for distribution: {self.dist}"
class UserInstallationInvalid(InstallationError):
@ -544,7 +541,7 @@ class HashMissing(HashError):
# so the output can be directly copied into the requirements file.
package = (
self.req.original_link
if self.req.original_link
if self.req.is_direct
# In case someone feeds something downright stupid
# to InstallRequirement's constructor.
else getattr(self.req, "req", None)
@ -594,7 +591,7 @@ class HashMismatch(HashError):
self.gots = gots
def body(self) -> str:
return " {}:\n{}".format(self._requirement_name(), self._hash_comparison())
return f" {self._requirement_name()}:\n{self._hash_comparison()}"
def _hash_comparison(self) -> str:
"""
@ -616,11 +613,9 @@ class HashMismatch(HashError):
lines: List[str] = []
for hash_name, expecteds in self.allowed.items():
prefix = hash_then_or(hash_name)
lines.extend(
(" Expected {} {}".format(next(prefix), e)) for e in expecteds
)
lines.extend((f" Expected {next(prefix)} {e}") for e in expecteds)
lines.append(
" Got {}\n".format(self.gots[hash_name].hexdigest())
f" Got {self.gots[hash_name].hexdigest()}\n"
)
return "\n".join(lines)

View File

@ -198,7 +198,7 @@ class LinkEvaluator:
reason = f"wrong project name (not {self.project_name})"
return (LinkType.different_project, reason)
supported_tags = self._target_python.get_tags()
supported_tags = self._target_python.get_unsorted_tags()
if not wheel.supported(supported_tags):
# Include the wheel's tags in the reason string to
# simplify troubleshooting compatibility issues.
@ -414,7 +414,7 @@ class CandidateEvaluator:
if specifier is None:
specifier = specifiers.SpecifierSet()
supported_tags = target_python.get_tags()
supported_tags = target_python.get_sorted_tags()
return cls(
project_name=project_name,
@ -533,8 +533,8 @@ class CandidateEvaluator:
)
except ValueError:
raise UnsupportedWheel(
"{} is not a supported wheel for this platform. It "
"can't be sorted.".format(wheel.filename)
f"{wheel.filename} is not a supported wheel for this platform. It "
"can't be sorted."
)
if self._prefer_binary:
binary_preference = 1
@ -939,9 +939,7 @@ class PackageFinder:
_format_versions(best_candidate_result.iter_all()),
)
raise DistributionNotFound(
"No matching distribution found for {}".format(req)
)
raise DistributionNotFound(f"No matching distribution found for {req}")
def _should_install_candidate(
candidate: Optional[InstallationCandidate],

View File

@ -56,8 +56,7 @@ def distutils_scheme(
try:
d.parse_config_files()
except UnicodeDecodeError:
# Typeshed does not include find_config_files() for some reason.
paths = d.find_config_files() # type: ignore
paths = d.find_config_files()
logger.warning(
"Ignore distutils configs in %s due to encoding errors.",
", ".join(os.path.basename(p) for p in paths),
@ -89,7 +88,7 @@ def distutils_scheme(
# finalize_options(); we only want to override here if the user
# has explicitly requested it hence going back to the config
if "install_lib" in d.get_option_dict("install"):
scheme.update(dict(purelib=i.install_lib, platlib=i.install_lib))
scheme.update({"purelib": i.install_lib, "platlib": i.install_lib})
if running_under_virtualenv():
if home:

View File

@ -9,7 +9,7 @@ from pip._internal.utils.misc import strtobool
from .base import BaseDistribution, BaseEnvironment, FilesystemWheel, MemoryWheel, Wheel
if TYPE_CHECKING:
from typing import Protocol
from typing import Literal, Protocol
else:
Protocol = object
@ -50,6 +50,7 @@ def _should_use_importlib_metadata() -> bool:
class Backend(Protocol):
NAME: 'Literal["importlib", "pkg_resources"]'
Distribution: Type[BaseDistribution]
Environment: Type[BaseEnvironment]

View File

@ -64,10 +64,10 @@ def msg_to_json(msg: Message) -> Dict[str, Any]:
key = json_name(field)
if multi:
value: Union[str, List[str]] = [
sanitise_header(v) for v in msg.get_all(field)
sanitise_header(v) for v in msg.get_all(field) # type: ignore
]
else:
value = sanitise_header(msg.get(field))
value = sanitise_header(msg.get(field)) # type: ignore
if key == "keywords":
# Accept both comma-separated and space-separated
# forms, for better compatibility with old data.

View File

@ -24,7 +24,7 @@ from typing import (
from pip._vendor.packaging.requirements import Requirement
from pip._vendor.packaging.specifiers import InvalidSpecifier, SpecifierSet
from pip._vendor.packaging.utils import NormalizedName
from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
from pip._vendor.packaging.version import LegacyVersion, Version
from pip._internal.exceptions import NoneMetadataError
@ -37,7 +37,6 @@ from pip._internal.models.direct_url import (
from pip._internal.utils.compat import stdlib_pkgs # TODO: Move definition here.
from pip._internal.utils.egg_link import egg_link_path_from_sys_path
from pip._internal.utils.misc import is_local, normalize_path
from pip._internal.utils.packaging import safe_extra
from pip._internal.utils.urls import url_to_path
from ._json import msg_to_json
@ -460,6 +459,19 @@ class BaseDistribution(Protocol):
For modern .dist-info distributions, this is the collection of
"Provides-Extra:" entries in distribution metadata.
The return value of this function is not particularly useful other than
display purposes due to backward compatibility issues and the extra
names being poorly normalized prior to PEP 685. If you want to perform
logic operations on extras, use :func:`is_extra_provided` instead.
"""
raise NotImplementedError()
def is_extra_provided(self, extra: str) -> bool:
"""Check whether an extra is provided by this distribution.
This is needed mostly for compatibility issues with pkg_resources not
following the extra normalization rules defined in PEP 685.
"""
raise NotImplementedError()
@ -537,10 +549,11 @@ class BaseDistribution(Protocol):
"""Get extras from the egg-info directory."""
known_extras = {""}
for entry in self._iter_requires_txt_entries():
if entry.extra in known_extras:
extra = canonicalize_name(entry.extra)
if extra in known_extras:
continue
known_extras.add(entry.extra)
yield entry.extra
known_extras.add(extra)
yield extra
def _iter_egg_info_dependencies(self) -> Iterable[str]:
"""Get distribution dependencies from the egg-info directory.
@ -556,10 +569,11 @@ class BaseDistribution(Protocol):
all currently available PEP 517 backends, although not standardized.
"""
for entry in self._iter_requires_txt_entries():
if entry.extra and entry.marker:
marker = f'({entry.marker}) and extra == "{safe_extra(entry.extra)}"'
elif entry.extra:
marker = f'extra == "{safe_extra(entry.extra)}"'
extra = canonicalize_name(entry.extra)
if extra and entry.marker:
marker = f'({entry.marker}) and extra == "{extra}"'
elif extra:
marker = f'extra == "{extra}"'
elif entry.marker:
marker = entry.marker
else:

View File

@ -1,4 +1,6 @@
from ._dists import Distribution
from ._envs import Environment
__all__ = ["Distribution", "Environment"]
__all__ = ["NAME", "Distribution", "Environment"]
NAME = "importlib"

View File

@ -27,7 +27,6 @@ from pip._internal.metadata.base import (
Wheel,
)
from pip._internal.utils.misc import normalize_path
from pip._internal.utils.packaging import safe_extra
from pip._internal.utils.temp_dir import TempDirectory
from pip._internal.utils.wheel import parse_wheel, read_wheel_metadata_file
@ -208,12 +207,16 @@ class Distribution(BaseDistribution):
return cast(email.message.Message, self._dist.metadata)
def iter_provided_extras(self) -> Iterable[str]:
return (
safe_extra(extra) for extra in self.metadata.get_all("Provides-Extra", [])
return self.metadata.get_all("Provides-Extra", [])
def is_extra_provided(self, extra: str) -> bool:
return any(
canonicalize_name(provided_extra) == canonicalize_name(extra)
for provided_extra in self.metadata.get_all("Provides-Extra", [])
)
def iter_dependencies(self, extras: Collection[str] = ()) -> Iterable[Requirement]:
contexts: Sequence[Dict[str, str]] = [{"extra": safe_extra(e)} for e in extras]
contexts: Sequence[Dict[str, str]] = [{"extra": e} for e in extras]
for req_string in self.metadata.get_all("Requires-Dist", []):
req = Requirement(req_string)
if not req.marker:

View File

@ -151,7 +151,8 @@ def _emit_egg_deprecation(location: Optional[str]) -> None:
deprecated(
reason=f"Loading egg at {location} is deprecated.",
replacement="to use pip for package installation.",
gone_in=None,
gone_in="24.3",
issue=12330,
)
@ -174,7 +175,7 @@ class Environment(BaseEnvironment):
for location in self._paths:
yield from finder.find(location)
for dist in finder.find_eggs(location):
# _emit_egg_deprecation(dist.location) # TODO: Enable this.
_emit_egg_deprecation(dist.location)
yield dist
# This must go last because that's how pkg_resources tie-breaks.
yield from finder.find_linked(location)

View File

@ -24,8 +24,12 @@ from .base import (
Wheel,
)
__all__ = ["NAME", "Distribution", "Environment"]
logger = logging.getLogger(__name__)
NAME = "pkg_resources"
class EntryPoint(NamedTuple):
name: str
@ -212,12 +216,16 @@ class Distribution(BaseDistribution):
def iter_dependencies(self, extras: Collection[str] = ()) -> Iterable[Requirement]:
if extras: # pkg_resources raises on invalid extras, so we sanitize.
extras = frozenset(extras).intersection(self._dist.extras)
extras = frozenset(pkg_resources.safe_extra(e) for e in extras)
extras = extras.intersection(self._dist.extras)
return self._dist.requires(extras)
def iter_provided_extras(self) -> Iterable[str]:
return self._dist.extras
def is_extra_provided(self, extra: str) -> bool:
return pkg_resources.safe_extra(extra) in self._dist.extras
class Environment(BaseEnvironment):
def __init__(self, ws: pkg_resources.WorkingSet) -> None:

View File

@ -27,8 +27,4 @@ class InstallationCandidate(KeyBasedCompareMixin):
)
def __str__(self) -> str:
return "{!r} candidate (version {} at {})".format(
self.name,
self.version,
self.link,
)
return f"{self.name!r} candidate (version {self.version} at {self.link})"

View File

@ -31,9 +31,7 @@ def _get(
value = d[key]
if not isinstance(value, expected_type):
raise DirectUrlValidationError(
"{!r} has unexpected type for {} (expected {})".format(
value, key, expected_type
)
f"{value!r} has unexpected type for {key} (expected {expected_type})"
)
return value

View File

@ -33,9 +33,7 @@ class FormatControl:
return all(getattr(self, k) == getattr(other, k) for k in self.__slots__)
def __repr__(self) -> str:
return "{}({}, {})".format(
self.__class__.__name__, self.no_binary, self.only_binary
)
return f"{self.__class__.__name__}({self.no_binary}, {self.only_binary})"
@staticmethod
def handle_mutual_excludes(value: str, target: Set[str], other: Set[str]) -> None:

View File

@ -22,7 +22,10 @@ class InstallationReport:
# is_direct is true if the requirement was a direct URL reference (which
# includes editable requirements), and false if the requirement was
# downloaded from a PEP 503 index or --find-links.
"is_direct": bool(ireq.original_link),
"is_direct": ireq.is_direct,
# is_yanked is true if the requirement was yanked from the index, but
# was still selected by pip to conform to PEP 592.
"is_yanked": ireq.link.is_yanked if ireq.link else False,
# requested is true if the requirement was specified by the user (aka
# top level requirement), and false if it was installed as a dependency of a
# requirement. https://peps.python.org/pep-0376/#requested
@ -33,7 +36,7 @@ class InstallationReport:
}
if ireq.user_supplied and ireq.extras:
# For top level requirements, the list of requested extras, if any.
res["requested_extras"] = list(sorted(ireq.extras))
res["requested_extras"] = sorted(ireq.extras)
return res
def to_dict(self) -> Dict[str, Any]:

View File

@ -69,18 +69,6 @@ class LinkHash:
def __post_init__(self) -> None:
assert self.name in _SUPPORTED_HASHES
@classmethod
def parse_pep658_hash(cls, dist_info_metadata: str) -> Optional["LinkHash"]:
"""Parse a PEP 658 data-dist-info-metadata hash."""
if dist_info_metadata == "true":
return None
name, sep, value = dist_info_metadata.partition("=")
if not sep:
return None
if name not in _SUPPORTED_HASHES:
return None
return cls(name=name, value=value)
@classmethod
@functools.lru_cache(maxsize=None)
def find_hash_url_fragment(cls, url: str) -> Optional["LinkHash"]:
@ -107,6 +95,28 @@ class LinkHash:
return hashes.is_hash_allowed(self.name, hex_digest=self.value)
@dataclass(frozen=True)
class MetadataFile:
"""Information about a core metadata file associated with a distribution."""
hashes: Optional[Dict[str, str]]
def __post_init__(self) -> None:
if self.hashes is not None:
assert all(name in _SUPPORTED_HASHES for name in self.hashes)
def supported_hashes(hashes: Optional[Dict[str, str]]) -> Optional[Dict[str, str]]:
# Remove any unsupported hash types from the mapping. If this leaves no
# supported hashes, return None
if hashes is None:
return None
hashes = {n: v for n, v in hashes.items() if n in _SUPPORTED_HASHES}
if not hashes:
return None
return hashes
def _clean_url_path_part(part: str) -> str:
"""
Clean a "part" of a URL path (i.e. after splitting on "@" characters).
@ -179,7 +189,7 @@ class Link(KeyBasedCompareMixin):
"comes_from",
"requires_python",
"yanked_reason",
"dist_info_metadata",
"metadata_file_data",
"cache_link_parsing",
"egg_fragment",
]
@ -190,7 +200,7 @@ class Link(KeyBasedCompareMixin):
comes_from: Optional[Union[str, "IndexContent"]] = None,
requires_python: Optional[str] = None,
yanked_reason: Optional[str] = None,
dist_info_metadata: Optional[str] = None,
metadata_file_data: Optional[MetadataFile] = None,
cache_link_parsing: bool = True,
hashes: Optional[Mapping[str, str]] = None,
) -> None:
@ -208,11 +218,10 @@ class Link(KeyBasedCompareMixin):
a simple repository HTML link. If the file has been yanked but
no reason was provided, this should be the empty string. See
PEP 592 for more information and the specification.
:param dist_info_metadata: the metadata attached to the file, or None if no such
metadata is provided. This is the value of the "data-dist-info-metadata"
attribute, if present, in a simple repository HTML link. This may be parsed
into its own `Link` by `self.metadata_link()`. See PEP 658 for more
information and the specification.
:param metadata_file_data: the metadata attached to the file, or None if
no such metadata is provided. This argument, if not None, indicates
that a separate metadata file exists, and also optionally supplies
hashes for that file.
:param cache_link_parsing: A flag that is used elsewhere to determine
whether resources retrieved from this link should be cached. PyPI
URLs should generally have this set to False, for example.
@ -220,6 +229,10 @@ class Link(KeyBasedCompareMixin):
determine the validity of a download.
"""
# The comes_from, requires_python, and metadata_file_data arguments are
# only used by classmethods of this class, and are not used in client
# code directly.
# url can be a UNC windows share
if url.startswith("\\\\"):
url = path_to_url(url)
@ -239,7 +252,7 @@ class Link(KeyBasedCompareMixin):
self.comes_from = comes_from
self.requires_python = requires_python if requires_python else None
self.yanked_reason = yanked_reason
self.dist_info_metadata = dist_info_metadata
self.metadata_file_data = metadata_file_data
super().__init__(key=url, defining_class=Link)
@ -262,9 +275,25 @@ class Link(KeyBasedCompareMixin):
url = _ensure_quoted_url(urllib.parse.urljoin(page_url, file_url))
pyrequire = file_data.get("requires-python")
yanked_reason = file_data.get("yanked")
dist_info_metadata = file_data.get("dist-info-metadata")
hashes = file_data.get("hashes", {})
# PEP 714: Indexes must use the name core-metadata, but
# clients should support the old name as a fallback for compatibility.
metadata_info = file_data.get("core-metadata")
if metadata_info is None:
metadata_info = file_data.get("dist-info-metadata")
# The metadata info value may be a boolean, or a dict of hashes.
if isinstance(metadata_info, dict):
# The file exists, and hashes have been supplied
metadata_file_data = MetadataFile(supported_hashes(metadata_info))
elif metadata_info:
# The file exists, but there are no hashes
metadata_file_data = MetadataFile(None)
else:
# False or not present: the file does not exist
metadata_file_data = None
# The Link.yanked_reason expects an empty string instead of a boolean.
if yanked_reason and not isinstance(yanked_reason, str):
yanked_reason = ""
@ -278,7 +307,7 @@ class Link(KeyBasedCompareMixin):
requires_python=pyrequire,
yanked_reason=yanked_reason,
hashes=hashes,
dist_info_metadata=dist_info_metadata,
metadata_file_data=metadata_file_data,
)
@classmethod
@ -298,14 +327,39 @@ class Link(KeyBasedCompareMixin):
url = _ensure_quoted_url(urllib.parse.urljoin(base_url, href))
pyrequire = anchor_attribs.get("data-requires-python")
yanked_reason = anchor_attribs.get("data-yanked")
dist_info_metadata = anchor_attribs.get("data-dist-info-metadata")
# PEP 714: Indexes must use the name data-core-metadata, but
# clients should support the old name as a fallback for compatibility.
metadata_info = anchor_attribs.get("data-core-metadata")
if metadata_info is None:
metadata_info = anchor_attribs.get("data-dist-info-metadata")
# The metadata info value may be the string "true", or a string of
# the form "hashname=hashval"
if metadata_info == "true":
# The file exists, but there are no hashes
metadata_file_data = MetadataFile(None)
elif metadata_info is None:
# The file does not exist
metadata_file_data = None
else:
# The file exists, and hashes have been supplied
hashname, sep, hashval = metadata_info.partition("=")
if sep == "=":
metadata_file_data = MetadataFile(supported_hashes({hashname: hashval}))
else:
# Error - data is wrong. Treat as no hashes supplied.
logger.debug(
"Index returned invalid data-dist-info-metadata value: %s",
metadata_info,
)
metadata_file_data = MetadataFile(None)
return cls(
url,
comes_from=page_url,
requires_python=pyrequire,
yanked_reason=yanked_reason,
dist_info_metadata=dist_info_metadata,
metadata_file_data=metadata_file_data,
)
def __str__(self) -> str:
@ -314,9 +368,7 @@ class Link(KeyBasedCompareMixin):
else:
rp = ""
if self.comes_from:
return "{} (from {}){}".format(
redact_auth_from_url(self._url), self.comes_from, rp
)
return f"{redact_auth_from_url(self._url)} (from {self.comes_from}){rp}"
else:
return redact_auth_from_url(str(self._url))
@ -407,17 +459,13 @@ class Link(KeyBasedCompareMixin):
return match.group(1)
def metadata_link(self) -> Optional["Link"]:
"""Implementation of PEP 658 parsing."""
# Note that Link.from_element() parsing the "data-dist-info-metadata" attribute
# from an HTML anchor tag is typically how the Link.dist_info_metadata attribute
# gets set.
if self.dist_info_metadata is None:
"""Return a link to the associated core metadata file (if any)."""
if self.metadata_file_data is None:
return None
metadata_url = f"{self.url_without_fragment}.metadata"
metadata_link_hash = LinkHash.parse_pep658_hash(self.dist_info_metadata)
if metadata_link_hash is None:
if self.metadata_file_data.hashes is None:
return Link(metadata_url)
return Link(metadata_url, hashes=metadata_link_hash.as_dict())
return Link(metadata_url, hashes=self.metadata_file_data.hashes)
def as_hashes(self) -> Hashes:
return Hashes({k: [v] for k, v in self._hashes.items()})

View File

@ -1,5 +1,5 @@
import sys
from typing import List, Optional, Tuple
from typing import List, Optional, Set, Tuple
from pip._vendor.packaging.tags import Tag
@ -22,6 +22,7 @@ class TargetPython:
"py_version",
"py_version_info",
"_valid_tags",
"_valid_tags_set",
]
def __init__(
@ -61,8 +62,9 @@ class TargetPython:
self.py_version = py_version
self.py_version_info = py_version_info
# This is used to cache the return value of get_tags().
# This is used to cache the return value of get_(un)sorted_tags.
self._valid_tags: Optional[List[Tag]] = None
self._valid_tags_set: Optional[Set[Tag]] = None
def format_given(self) -> str:
"""
@ -84,7 +86,7 @@ class TargetPython:
f"{key}={value!r}" for key, value in key_values if value is not None
)
def get_tags(self) -> List[Tag]:
def get_sorted_tags(self) -> List[Tag]:
"""
Return the supported PEP 425 tags to check wheel candidates against.
@ -108,3 +110,13 @@ class TargetPython:
self._valid_tags = tags
return self._valid_tags
def get_unsorted_tags(self) -> Set[Tag]:
"""Exactly the same as get_sorted_tags, but returns a set.
This is important for performance.
"""
if self._valid_tags_set is None:
self._valid_tags_set = set(self.get_sorted_tags())
return self._valid_tags_set

View File

@ -514,7 +514,9 @@ class MultiDomainBasicAuth(AuthBase):
# Consume content and release the original connection to allow our new
# request to reuse the same one.
resp.content
# The result of the assignment isn't used, it's just needed to consume
# the content.
_ = resp.content
resp.raw.release_conn()
# Add our new username and password to the request

View File

@ -3,10 +3,11 @@
import os
from contextlib import contextmanager
from typing import Generator, Optional
from datetime import datetime
from typing import BinaryIO, Generator, Optional, Union
from pip._vendor.cachecontrol.cache import BaseCache
from pip._vendor.cachecontrol.caches import FileCache
from pip._vendor.cachecontrol.cache import SeparateBodyBaseCache
from pip._vendor.cachecontrol.caches import SeparateBodyFileCache
from pip._vendor.requests.models import Response
from pip._internal.utils.filesystem import adjacent_tmp_file, replace
@ -28,10 +29,22 @@ def suppressed_cache_errors() -> Generator[None, None, None]:
pass
class SafeFileCache(BaseCache):
class SafeFileCache(SeparateBodyBaseCache):
"""
A file based cache which is safe to use even when the target directory may
not be accessible or writable.
There is a race condition when two processes try to write and/or read the
same entry at the same time, since each entry consists of two separate
files (https://github.com/psf/cachecontrol/issues/324). We therefore have
additional logic that makes sure that both files to be present before
returning an entry; this fixes the read side of the race condition.
For the write side, we assume that the server will only ever return the
same data for the same URL, which ought to be the case for files pip is
downloading. PyPI does not have a mechanism to swap out a wheel for
another wheel, for example. If this assumption is not true, the
CacheControl issue will need to be fixed.
"""
def __init__(self, directory: str) -> None:
@ -43,27 +56,51 @@ class SafeFileCache(BaseCache):
# From cachecontrol.caches.file_cache.FileCache._fn, brought into our
# class for backwards-compatibility and to avoid using a non-public
# method.
hashed = FileCache.encode(name)
hashed = SeparateBodyFileCache.encode(name)
parts = list(hashed[:5]) + [hashed]
return os.path.join(self.directory, *parts)
def get(self, key: str) -> Optional[bytes]:
path = self._get_cache_path(key)
# The cache entry is only valid if both metadata and body exist.
metadata_path = self._get_cache_path(key)
body_path = metadata_path + ".body"
if not (os.path.exists(metadata_path) and os.path.exists(body_path)):
return None
with suppressed_cache_errors():
with open(path, "rb") as f:
with open(metadata_path, "rb") as f:
return f.read()
def set(self, key: str, value: bytes, expires: Optional[int] = None) -> None:
path = self._get_cache_path(key)
def _write(self, path: str, data: bytes) -> None:
with suppressed_cache_errors():
ensure_dir(os.path.dirname(path))
with adjacent_tmp_file(path) as f:
f.write(value)
f.write(data)
replace(f.name, path)
def set(
self, key: str, value: bytes, expires: Union[int, datetime, None] = None
) -> None:
path = self._get_cache_path(key)
self._write(path, value)
def delete(self, key: str) -> None:
path = self._get_cache_path(key)
with suppressed_cache_errors():
os.remove(path)
with suppressed_cache_errors():
os.remove(path + ".body")
def get_body(self, key: str) -> Optional[BinaryIO]:
# The cache entry is only valid if both metadata and body exist.
metadata_path = self._get_cache_path(key)
body_path = metadata_path + ".body"
if not (os.path.exists(metadata_path) and os.path.exists(body_path)):
return None
with suppressed_cache_errors():
return open(body_path, "rb")
def set_body(self, key: str, body: bytes) -> None:
path = self._get_cache_path(key) + ".body"
self._write(path, body)

View File

@ -42,7 +42,7 @@ def _prepare_download(
logged_url = redact_auth_from_url(url)
if total_length:
logged_url = "{} ({})".format(logged_url, format_size(total_length))
logged_url = f"{logged_url} ({format_size(total_length)})"
if is_from_cache(resp):
logger.info("Using cached %s", logged_url)

View File

@ -419,15 +419,17 @@ class PipSession(requests.Session):
msg += f" (from {source})"
logger.info(msg)
host_port = parse_netloc(host)
if host_port not in self.pip_trusted_origins:
self.pip_trusted_origins.append(host_port)
parsed_host, parsed_port = parse_netloc(host)
if parsed_host is None:
raise ValueError(f"Trusted host URL must include a host part: {host!r}")
if (parsed_host, parsed_port) not in self.pip_trusted_origins:
self.pip_trusted_origins.append((parsed_host, parsed_port))
self.mount(
build_url_from_netloc(host, scheme="http") + "/", self._trusted_host_adapter
)
self.mount(build_url_from_netloc(host) + "/", self._trusted_host_adapter)
if not host_port[1]:
if not parsed_port:
self.mount(
build_url_from_netloc(host, scheme="http") + ":",
self._trusted_host_adapter,

View File

@ -13,6 +13,8 @@ from pip._internal.network.utils import raise_for_status
if TYPE_CHECKING:
from xmlrpc.client import _HostType, _Marshallable
from _typeshed import SizedBuffer
logger = logging.getLogger(__name__)
@ -33,7 +35,7 @@ class PipXmlrpcTransport(xmlrpc.client.Transport):
self,
host: "_HostType",
handler: str,
request_body: bytes,
request_body: "SizedBuffer",
verbose: bool = False,
) -> Tuple["_Marshallable", ...]:
assert isinstance(host, str)

View File

@ -51,10 +51,22 @@ def get_build_tracker() -> Generator["BuildTracker", None, None]:
yield tracker
class TrackerId(str):
"""Uniquely identifying string provided to the build tracker."""
class BuildTracker:
"""Ensure that an sdist cannot request itself as a setup requirement.
When an sdist is prepared, it identifies its setup requirements in the
context of ``BuildTracker.track()``. If a requirement shows up recursively, this
raises an exception.
This stops fork bombs embedded in malicious packages."""
def __init__(self, root: str) -> None:
self._root = root
self._entries: Set[InstallRequirement] = set()
self._entries: Dict[TrackerId, InstallRequirement] = {}
logger.debug("Created build tracker: %s", self._root)
def __enter__(self) -> "BuildTracker":
@ -69,16 +81,15 @@ class BuildTracker:
) -> None:
self.cleanup()
def _entry_path(self, link: Link) -> str:
hashed = hashlib.sha224(link.url_without_fragment.encode()).hexdigest()
def _entry_path(self, key: TrackerId) -> str:
hashed = hashlib.sha224(key.encode()).hexdigest()
return os.path.join(self._root, hashed)
def add(self, req: InstallRequirement) -> None:
def add(self, req: InstallRequirement, key: TrackerId) -> None:
"""Add an InstallRequirement to build tracking."""
assert req.link
# Get the file to write information about this requirement.
entry_path = self._entry_path(req.link)
entry_path = self._entry_path(key)
# Try reading from the file. If it exists and can be read from, a build
# is already in progress, so a LookupError is raised.
@ -92,33 +103,37 @@ class BuildTracker:
raise LookupError(message)
# If we're here, req should really not be building already.
assert req not in self._entries
assert key not in self._entries
# Start tracking this requirement.
with open(entry_path, "w", encoding="utf-8") as fp:
fp.write(str(req))
self._entries.add(req)
self._entries[key] = req
logger.debug("Added %s to build tracker %r", req, self._root)
def remove(self, req: InstallRequirement) -> None:
def remove(self, req: InstallRequirement, key: TrackerId) -> None:
"""Remove an InstallRequirement from build tracking."""
assert req.link
# Delete the created file and the corresponding entries.
os.unlink(self._entry_path(req.link))
self._entries.remove(req)
# Delete the created file and the corresponding entry.
os.unlink(self._entry_path(key))
del self._entries[key]
logger.debug("Removed %s from build tracker %r", req, self._root)
def cleanup(self) -> None:
for req in set(self._entries):
self.remove(req)
for key, req in list(self._entries.items()):
self.remove(req, key)
logger.debug("Removed build tracker: %r", self._root)
@contextlib.contextmanager
def track(self, req: InstallRequirement) -> Generator[None, None, None]:
self.add(req)
def track(self, req: InstallRequirement, key: str) -> Generator[None, None, None]:
"""Ensure that `key` cannot install itself as a setup requirement.
:raises LookupError: If `key` was already provided in a parent invocation of
the context introduced by this method."""
tracker_id = TrackerId(key)
self.add(req, tracker_id)
yield
self.remove(req)
self.remove(req, tracker_id)

View File

@ -5,12 +5,15 @@ import logging
from typing import Callable, Dict, List, NamedTuple, Optional, Set, Tuple
from pip._vendor.packaging.requirements import Requirement
from pip._vendor.packaging.specifiers import LegacySpecifier
from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
from pip._vendor.packaging.version import LegacyVersion
from pip._internal.distributions import make_distribution_for_install_requirement
from pip._internal.metadata import get_default_environment
from pip._internal.metadata.base import DistributionVersion
from pip._internal.req.req_install import InstallRequirement
from pip._internal.utils.deprecation import deprecated
logger = logging.getLogger(__name__)
@ -57,6 +60,8 @@ def check_package_set(
package name and returns a boolean.
"""
warn_legacy_versions_and_specifiers(package_set)
missing = {}
conflicting = {}
@ -147,3 +152,36 @@ def _create_whitelist(
break
return packages_affected
def warn_legacy_versions_and_specifiers(package_set: PackageSet) -> None:
for project_name, package_details in package_set.items():
if isinstance(package_details.version, LegacyVersion):
deprecated(
reason=(
f"{project_name} {package_details.version} "
f"has a non-standard version number."
),
replacement=(
f"to upgrade to a newer version of {project_name} "
f"or contact the author to suggest that they "
f"release a version with a conforming version number"
),
issue=12063,
gone_in="24.0",
)
for dep in package_details.dependencies:
if any(isinstance(spec, LegacySpecifier) for spec in dep.specifier):
deprecated(
reason=(
f"{project_name} {package_details.version} "
f"has a non-standard dependency specifier {dep}."
),
replacement=(
f"to upgrade to a newer version of {project_name} "
f"or contact the author to suggest that they "
f"release a version with a conforming dependency specifiers"
),
issue=12063,
gone_in="24.0",
)

View File

@ -164,16 +164,14 @@ def message_about_scripts_not_on_PATH(scripts: Sequence[str]) -> Optional[str]:
for parent_dir, dir_scripts in warn_for.items():
sorted_scripts: List[str] = sorted(dir_scripts)
if len(sorted_scripts) == 1:
start_text = "script {} is".format(sorted_scripts[0])
start_text = f"script {sorted_scripts[0]} is"
else:
start_text = "scripts {} are".format(
", ".join(sorted_scripts[:-1]) + " and " + sorted_scripts[-1]
)
msg_lines.append(
"The {} installed in '{}' which is not on PATH.".format(
start_text, parent_dir
)
f"The {start_text} installed in '{parent_dir}' which is not on PATH."
)
last_line_fmt = (
@ -267,9 +265,9 @@ def get_csv_rows_for_installed(
path = _fs_to_record_path(f, lib_dir)
digest, length = rehash(f)
installed_rows.append((path, digest, length))
for installed_record_path in installed.values():
installed_rows.append((installed_record_path, "", ""))
return installed_rows
return installed_rows + [
(installed_record_path, "", "") for installed_record_path in installed.values()
]
def get_console_script_specs(console: Dict[str, str]) -> List[str]:
@ -321,9 +319,7 @@ def get_console_script_specs(console: Dict[str, str]) -> List[str]:
scripts_to_generate.append("pip = " + pip_script)
if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall":
scripts_to_generate.append(
"pip{} = {}".format(sys.version_info[0], pip_script)
)
scripts_to_generate.append(f"pip{sys.version_info[0]} = {pip_script}")
scripts_to_generate.append(f"pip{get_major_minor_version()} = {pip_script}")
# Delete any other versioned pip entry points
@ -336,9 +332,7 @@ def get_console_script_specs(console: Dict[str, str]) -> List[str]:
scripts_to_generate.append("easy_install = " + easy_install_script)
scripts_to_generate.append(
"easy_install-{} = {}".format(
get_major_minor_version(), easy_install_script
)
f"easy_install-{get_major_minor_version()} = {easy_install_script}"
)
# Delete any other versioned easy_install entry points
easy_install_ep = [
@ -408,10 +402,10 @@ class ScriptFile:
class MissingCallableSuffix(InstallationError):
def __init__(self, entry_point: str) -> None:
super().__init__(
"Invalid script entry point: {} - A callable "
f"Invalid script entry point: {entry_point} - A callable "
"suffix is required. Cf https://packaging.python.org/"
"specifications/entry-points/#use-for-scripts for more "
"information.".format(entry_point)
"information."
)
@ -712,7 +706,7 @@ def req_error_context(req_description: str) -> Generator[None, None, None]:
try:
yield
except InstallationError as e:
message = "For req: {}. {}".format(req_description, e.args[0])
message = f"For req: {req_description}. {e.args[0]}"
raise InstallationError(message) from e

View File

@ -4,10 +4,10 @@
# The following comment should be removed at some point in the future.
# mypy: strict-optional=False
import logging
import mimetypes
import os
import shutil
from pathlib import Path
from typing import Dict, Iterable, List, Optional
from pip._vendor.packaging.utils import canonicalize_name
@ -21,7 +21,6 @@ from pip._internal.exceptions import (
InstallationError,
MetadataInconsistent,
NetworkConnectionError,
PreviousBuildDirError,
VcsHashUnsupported,
)
from pip._internal.index.package_finder import PackageFinder
@ -37,6 +36,7 @@ from pip._internal.network.lazy_wheel import (
from pip._internal.network.session import PipSession
from pip._internal.operations.build.build_tracker import BuildTracker
from pip._internal.req.req_install import InstallRequirement
from pip._internal.utils._log import getLogger
from pip._internal.utils.direct_url_helpers import (
direct_url_for_editable,
direct_url_from_link,
@ -47,13 +47,13 @@ from pip._internal.utils.misc import (
display_path,
hash_file,
hide_url,
is_installable_dir,
redact_auth_from_requirement,
)
from pip._internal.utils.temp_dir import TempDirectory
from pip._internal.utils.unpacking import unpack_file
from pip._internal.vcs import vcs
logger = logging.getLogger(__name__)
logger = getLogger(__name__)
def _get_prepared_distribution(
@ -65,10 +65,12 @@ def _get_prepared_distribution(
) -> BaseDistribution:
"""Prepare a distribution for installation."""
abstract_dist = make_distribution_for_install_requirement(req)
with build_tracker.track(req):
abstract_dist.prepare_distribution_metadata(
finder, build_isolation, check_build_deps
)
tracker_id = abstract_dist.build_tracker_id
if tracker_id is not None:
with build_tracker.track(req, tracker_id):
abstract_dist.prepare_distribution_metadata(
finder, build_isolation, check_build_deps
)
return abstract_dist.get_metadata_distribution()
@ -226,6 +228,7 @@ class RequirementPreparer:
use_user_site: bool,
lazy_wheel: bool,
verbosity: int,
legacy_resolver: bool,
) -> None:
super().__init__()
@ -259,6 +262,9 @@ class RequirementPreparer:
# How verbose should underlying tooling be?
self.verbosity = verbosity
# Are we using the legacy resolver?
self.legacy_resolver = legacy_resolver
# Memoized downloaded files, as mapping of url: path.
self._downloaded: Dict[str, str] = {}
@ -272,7 +278,7 @@ class RequirementPreparer:
information = str(display_path(req.link.file_path))
else:
message = "Collecting %s"
information = str(req.req or req)
information = redact_auth_from_requirement(req.req) if req.req else str(req)
# If we used req.req, inject requirement source if available (this
# would already be included if we used req directly)
@ -313,21 +319,7 @@ class RequirementPreparer:
autodelete=True,
parallel_builds=parallel_builds,
)
# If a checkout exists, it's unwise to keep going. version
# inconsistencies are logged later, but do not fail the
# installation.
# FIXME: this won't upgrade when there's an existing
# package unpacked in `req.source_dir`
# TODO: this check is now probably dead code
if is_installable_dir(req.source_dir):
raise PreviousBuildDirError(
"pip can't proceed with requirements '{}' due to a"
"pre-existing build directory ({}). This is likely "
"due to a previous installation that failed . pip is "
"being responsible and not assuming it can delete this. "
"Please delete it and try again.".format(req, req.source_dir)
)
req.ensure_pristine_source_checkout()
def _get_linked_req_hashes(self, req: InstallRequirement) -> Hashes:
# By the time this is called, the requirement's link should have
@ -352,7 +344,7 @@ class RequirementPreparer:
# a surprising hash mismatch in the future.
# file:/// URLs aren't pinnable, so don't complain about them
# not being pinned.
if req.original_link is None and not req.is_pinned:
if not req.is_direct and not req.is_pinned:
raise HashUnpinned()
# If known-good hashes are missing for this requirement,
@ -365,6 +357,11 @@ class RequirementPreparer:
self,
req: InstallRequirement,
) -> Optional[BaseDistribution]:
if self.legacy_resolver:
logger.debug(
"Metadata-only fetching is not used in the legacy resolver",
)
return None
if self.require_hashes:
logger.debug(
"Metadata-only fetching is not used as hash checking is required",
@ -385,7 +382,7 @@ class RequirementPreparer:
if metadata_link is None:
return None
assert req.req is not None
logger.info(
logger.verbose(
"Obtaining dependency information for %s from %s",
req.req,
metadata_link,
@ -410,7 +407,7 @@ class RequirementPreparer:
# NB: raw_name will fall back to the name from the install requirement if
# the Name: field is not present, but it's noted in the raw_name docstring
# that that should NEVER happen anyway.
if metadata_dist.raw_name != req.req.name:
if canonicalize_name(metadata_dist.raw_name) != canonicalize_name(req.req.name):
raise MetadataInconsistent(
req, "Name", req.req.name, metadata_dist.raw_name
)
@ -470,7 +467,19 @@ class RequirementPreparer:
for link, (filepath, _) in batch_download:
logger.debug("Downloading link %s to %s", link, filepath)
req = links_to_fully_download[link]
# Record the downloaded file path so wheel reqs can extract a Distribution
# in .get_dist().
req.local_file_path = filepath
# Record that the file is downloaded so we don't do it again in
# _prepare_linked_requirement().
self._downloaded[req.link.url] = filepath
# If this is an sdist, we need to unpack it after downloading, but the
# .source_dir won't be set up until we are in _prepare_linked_requirement().
# Add the downloaded archive to the install requirement to unpack after
# preparing the source dir.
if not req.is_wheel:
req.needs_unpacked_archive(Path(filepath))
# This step is necessary to ensure all lazy wheels are processed
# successfully by the 'download', 'wheel', and 'install' commands.
@ -594,8 +603,8 @@ class RequirementPreparer:
)
except NetworkConnectionError as exc:
raise InstallationError(
"Could not install requirement {} because of HTTP "
"error {} for URL {}".format(req, exc, link)
f"Could not install requirement {req} because of HTTP "
f"error {exc} for URL {link}"
)
else:
file_path = self._downloaded[link.url]
@ -675,9 +684,9 @@ class RequirementPreparer:
with indent_log():
if self.require_hashes:
raise InstallationError(
"The editable requirement {} cannot be installed when "
f"The editable requirement {req} cannot be installed when "
"requiring hashes, because there is no single file to "
"hash.".format(req)
"hash."
)
req.ensure_has_source_dir(self.src_dir)
req.update_editable()
@ -705,7 +714,7 @@ class RequirementPreparer:
assert req.satisfied_by, "req should have been satisfied but isn't"
assert skip_reason is not None, (
"did not get skip reason skipped but req.satisfied_by "
"is set to {}".format(req.satisfied_by)
f"is set to {req.satisfied_by}"
)
logger.info(
"Requirement %s: %s (%s)", skip_reason, req, req.satisfied_by.version

View File

@ -8,10 +8,11 @@ These are meant to be used elsewhere within pip to create instances of
InstallRequirement.
"""
import copy
import logging
import os
import re
from typing import Dict, List, Optional, Set, Tuple, Union
from typing import Collection, Dict, List, Optional, Set, Tuple, Union
from pip._vendor.packaging.markers import Marker
from pip._vendor.packaging.requirements import InvalidRequirement, Requirement
@ -57,6 +58,31 @@ def convert_extras(extras: Optional[str]) -> Set[str]:
return get_requirement("placeholder" + extras.lower()).extras
def _set_requirement_extras(req: Requirement, new_extras: Set[str]) -> Requirement:
"""
Returns a new requirement based on the given one, with the supplied extras. If the
given requirement already has extras those are replaced (or dropped if no new extras
are given).
"""
match: Optional[re.Match[str]] = re.fullmatch(
# see https://peps.python.org/pep-0508/#complete-grammar
r"([\w\t .-]+)(\[[^\]]*\])?(.*)",
str(req),
flags=re.ASCII,
)
# ireq.req is a valid requirement so the regex should always match
assert (
match is not None
), f"regex match on requirement {req} failed, this should never happen"
pre: Optional[str] = match.group(1)
post: Optional[str] = match.group(3)
assert (
pre is not None and post is not None
), f"regex group selection for requirement {req} failed, this should never happen"
extras: str = "[%s]" % ",".join(sorted(new_extras)) if new_extras else ""
return Requirement(f"{pre}{extras}{post}")
def parse_editable(editable_req: str) -> Tuple[Optional[str], str, Set[str]]:
"""Parses an editable requirement into:
- a requirement name
@ -436,7 +462,7 @@ def install_req_from_req_string(
raise InstallationError(
"Packages installed from PyPI cannot depend on packages "
"which are not also hosted on PyPI.\n"
"{} depends on {} ".format(comes_from.name, req)
f"{comes_from.name} depends on {req} "
)
return InstallRequirement(
@ -504,3 +530,47 @@ def install_req_from_link_and_ireq(
config_settings=ireq.config_settings,
user_supplied=ireq.user_supplied,
)
def install_req_drop_extras(ireq: InstallRequirement) -> InstallRequirement:
"""
Creates a new InstallationRequirement using the given template but without
any extras. Sets the original requirement as the new one's parent
(comes_from).
"""
return InstallRequirement(
req=(
_set_requirement_extras(ireq.req, set()) if ireq.req is not None else None
),
comes_from=ireq,
editable=ireq.editable,
link=ireq.link,
markers=ireq.markers,
use_pep517=ireq.use_pep517,
isolated=ireq.isolated,
global_options=ireq.global_options,
hash_options=ireq.hash_options,
constraint=ireq.constraint,
extras=[],
config_settings=ireq.config_settings,
user_supplied=ireq.user_supplied,
permit_editable_wheels=ireq.permit_editable_wheels,
)
def install_req_extend_extras(
ireq: InstallRequirement,
extras: Collection[str],
) -> InstallRequirement:
"""
Returns a copy of an installation requirement with some additional extras.
Makes a shallow copy of the ireq object.
"""
result = copy.copy(ireq)
result.extras = {*ireq.extras, *extras}
result.req = (
_set_requirement_extras(ireq.req, result.extras)
if ireq.req is not None
else None
)
return result

View File

@ -1,6 +1,3 @@
# The following comment should be removed at some point in the future.
# mypy: strict-optional=False
import functools
import logging
import os
@ -9,6 +6,7 @@ import sys
import uuid
import zipfile
from optparse import Values
from pathlib import Path
from typing import Any, Collection, Dict, Iterable, List, Optional, Sequence, Union
from pip._vendor.packaging.markers import Marker
@ -20,7 +18,7 @@ from pip._vendor.packaging.version import parse as parse_version
from pip._vendor.pyproject_hooks import BuildBackendHookCaller
from pip._internal.build_env import BuildEnvironment, NoOpBuildEnvironment
from pip._internal.exceptions import InstallationError
from pip._internal.exceptions import InstallationError, PreviousBuildDirError
from pip._internal.locations import get_scheme
from pip._internal.metadata import (
BaseDistribution,
@ -50,11 +48,14 @@ from pip._internal.utils.misc import (
backup_dir,
display_path,
hide_url,
is_installable_dir,
redact_auth_from_requirement,
redact_auth_from_url,
)
from pip._internal.utils.packaging import safe_extra
from pip._internal.utils.subprocess import runner_with_spinner_message
from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
from pip._internal.utils.unpacking import unpack_file
from pip._internal.utils.virtualenv import running_under_virtualenv
from pip._internal.vcs import vcs
@ -104,6 +105,8 @@ class InstallRequirement:
if link.is_file:
self.source_dir = os.path.normpath(os.path.abspath(link.file_path))
# original_link is the direct URL that was provided by the user for the
# requirement, either directly or via a constraints file.
if link is None and req and req.url:
# PEP 508 URL requirement
link = Link(req.url)
@ -126,7 +129,7 @@ class InstallRequirement:
if extras:
self.extras = extras
elif req:
self.extras = {safe_extra(extra) for extra in req.extras}
self.extras = req.extras
else:
self.extras = set()
if markers is None and req:
@ -181,11 +184,14 @@ class InstallRequirement:
# This requirement needs more preparation before it can be built
self.needs_more_preparation = False
# This requirement needs to be unpacked before it can be installed.
self._archive_source: Optional[Path] = None
def __str__(self) -> str:
if self.req:
s = str(self.req)
s = redact_auth_from_requirement(self.req)
if self.link:
s += " from {}".format(redact_auth_from_url(self.link.url))
s += f" from {redact_auth_from_url(self.link.url)}"
elif self.link:
s = redact_auth_from_url(self.link.url)
else:
@ -215,7 +221,7 @@ class InstallRequirement:
attributes = vars(self)
names = sorted(attributes)
state = ("{}={!r}".format(attr, attributes[attr]) for attr in sorted(names))
state = (f"{attr}={attributes[attr]!r}" for attr in sorted(names))
return "<{name} object: {{{state}}}>".format(
name=self.__class__.__name__,
state=", ".join(state),
@ -242,15 +248,22 @@ class InstallRequirement:
@property
def specifier(self) -> SpecifierSet:
assert self.req is not None
return self.req.specifier
@property
def is_direct(self) -> bool:
"""Whether this requirement was specified as a direct URL."""
return self.original_link is not None
@property
def is_pinned(self) -> bool:
"""Return whether I am pinned to an exact version.
For example, some-package==1.2 is pinned; some-package>1.2 is not.
"""
specifiers = self.specifier
assert self.req is not None
specifiers = self.req.specifier
return len(specifiers) == 1 and next(iter(specifiers)).operator in {"==", "==="}
def match_markers(self, extras_requested: Optional[Iterable[str]] = None) -> bool:
@ -260,7 +273,12 @@ class InstallRequirement:
extras_requested = ("",)
if self.markers is not None:
return any(
self.markers.evaluate({"extra": extra}) for extra in extras_requested
self.markers.evaluate({"extra": extra})
# TODO: Remove these two variants when packaging is upgraded to
# support the marker comparison logic specified in PEP 685.
or self.markers.evaluate({"extra": safe_extra(extra)})
or self.markers.evaluate({"extra": canonicalize_name(extra)})
for extra in extras_requested
)
else:
return True
@ -293,11 +311,12 @@ class InstallRequirement:
good_hashes = self.hash_options.copy()
if trust_internet:
link = self.link
elif self.original_link and self.user_supplied:
elif self.is_direct and self.user_supplied:
link = self.original_link
else:
link = None
if link and link.hash:
assert link.hash_name is not None
good_hashes.setdefault(link.hash_name, []).append(link.hash)
return Hashes(good_hashes)
@ -307,6 +326,7 @@ class InstallRequirement:
return None
s = str(self.req)
if self.comes_from:
comes_from: Optional[str]
if isinstance(self.comes_from, str):
comes_from = self.comes_from
else:
@ -338,7 +358,7 @@ class InstallRequirement:
# When parallel builds are enabled, add a UUID to the build directory
# name so multiple builds do not interfere with each other.
dir_name: str = canonicalize_name(self.name)
dir_name: str = canonicalize_name(self.req.name)
if parallel_builds:
dir_name = f"{dir_name}_{uuid.uuid4().hex}"
@ -381,6 +401,7 @@ class InstallRequirement:
)
def warn_on_mismatching_name(self) -> None:
assert self.req is not None
metadata_name = canonicalize_name(self.metadata["Name"])
if canonicalize_name(self.req.name) == metadata_name:
# Everything is fine.
@ -450,6 +471,7 @@ class InstallRequirement:
# Things valid for sdists
@property
def unpacked_source_directory(self) -> str:
assert self.source_dir, f"No source dir for {self}"
return os.path.join(
self.source_dir, self.link and self.link.subdirectory_fragment or ""
)
@ -493,7 +515,7 @@ class InstallRequirement:
"to use --use-pep517 or add a "
"pyproject.toml file to the project"
),
gone_in="23.3",
gone_in="24.0",
)
self.use_pep517 = False
return
@ -536,7 +558,7 @@ class InstallRequirement:
Under PEP 517 and PEP 660, call the backend hook to prepare the metadata.
Under legacy processing, call setup.py egg-info.
"""
assert self.source_dir
assert self.source_dir, f"No source dir for {self}"
details = self.name or f"from {self.link}"
if self.use_pep517:
@ -585,8 +607,10 @@ class InstallRequirement:
if self.metadata_directory:
return get_directory_distribution(self.metadata_directory)
elif self.local_file_path and self.is_wheel:
assert self.req is not None
return get_wheel_distribution(
FilesystemWheel(self.local_file_path), canonicalize_name(self.name)
FilesystemWheel(self.local_file_path),
canonicalize_name(self.req.name),
)
raise AssertionError(
f"InstallRequirement {self} has no metadata directory and no wheel: "
@ -594,9 +618,9 @@ class InstallRequirement:
)
def assert_source_matches_version(self) -> None:
assert self.source_dir
assert self.source_dir, f"No source dir for {self}"
version = self.metadata["version"]
if self.req.specifier and version not in self.req.specifier:
if self.req and self.req.specifier and version not in self.req.specifier:
logger.warning(
"Requested %s, but installing version %s",
self,
@ -633,6 +657,27 @@ class InstallRequirement:
parallel_builds=parallel_builds,
)
def needs_unpacked_archive(self, archive_source: Path) -> None:
assert self._archive_source is None
self._archive_source = archive_source
def ensure_pristine_source_checkout(self) -> None:
"""Ensure the source directory has not yet been built in."""
assert self.source_dir is not None
if self._archive_source is not None:
unpack_file(str(self._archive_source), self.source_dir)
elif is_installable_dir(self.source_dir):
# If a checkout exists, it's unwise to keep going.
# version inconsistencies are logged later, but do not fail
# the installation.
raise PreviousBuildDirError(
f"pip can't proceed with requirements '{self}' due to a "
f"pre-existing build directory ({self.source_dir}). This is likely "
"due to a previous installation that failed . pip is "
"being responsible and not assuming it can delete this. "
"Please delete it and try again."
)
# For editable installations
def update_editable(self) -> None:
if not self.link:
@ -690,9 +735,10 @@ class InstallRequirement:
name = name.replace(os.path.sep, "/")
return name
assert self.req is not None
path = os.path.join(parentdir, path)
name = _clean_zip_name(path, rootdir)
return self.name + "/" + name
return self.req.name + "/" + name
def archive(self, build_dir: Optional[str]) -> None:
"""Saves archive to provided build_dir.
@ -709,8 +755,8 @@ class InstallRequirement:
if os.path.exists(archive_path):
response = ask_path_exists(
"The file {} exists. (i)gnore, (w)ipe, "
"(b)ackup, (a)bort ".format(display_path(archive_path)),
f"The file {display_path(archive_path)} exists. (i)gnore, (w)ipe, "
"(b)ackup, (a)bort ",
("i", "w", "b", "a"),
)
if response == "i":
@ -771,8 +817,9 @@ class InstallRequirement:
use_user_site: bool = False,
pycompile: bool = True,
) -> None:
assert self.req is not None
scheme = get_scheme(
self.name,
self.req.name,
user=use_user_site,
home=home,
root=root,
@ -786,7 +833,7 @@ class InstallRequirement:
prefix=prefix,
home=home,
use_user_site=use_user_site,
name=self.name,
name=self.req.name,
setup_py_path=self.setup_py_path,
isolated=self.isolated,
build_env=self.build_env,
@ -799,13 +846,13 @@ class InstallRequirement:
assert self.local_file_path
install_wheel(
self.name,
self.req.name,
self.local_file_path,
scheme=scheme,
req_description=str(self.req),
pycompile=pycompile,
warn_script_location=warn_script_location,
direct_url=self.download_info if self.original_link else None,
direct_url=self.download_info if self.is_direct else None,
requested=self.user_supplied,
)
self.install_succeeded = True
@ -859,7 +906,7 @@ def check_legacy_setup_py_options(
reason="--build-option and --global-option are deprecated.",
issue=11859,
replacement="to use --config-settings",
gone_in="23.3",
gone_in="24.0",
)
logger.warning(
"Implying --no-binary=:all: due to the presence of "

View File

@ -2,9 +2,12 @@ import logging
from collections import OrderedDict
from typing import Dict, List
from pip._vendor.packaging.specifiers import LegacySpecifier
from pip._vendor.packaging.utils import canonicalize_name
from pip._vendor.packaging.version import LegacyVersion
from pip._internal.req.req_install import InstallRequirement
from pip._internal.utils.deprecation import deprecated
logger = logging.getLogger(__name__)
@ -80,3 +83,37 @@ class RequirementSet:
for install_req in self.all_requirements
if not install_req.constraint and not install_req.satisfied_by
]
def warn_legacy_versions_and_specifiers(self) -> None:
for req in self.requirements_to_install:
version = req.get_dist().version
if isinstance(version, LegacyVersion):
deprecated(
reason=(
f"pip has selected the non standard version {version} "
f"of {req}. In the future this version will be "
f"ignored as it isn't standard compliant."
),
replacement=(
"set or update constraints to select another version "
"or contact the package author to fix the version number"
),
issue=12063,
gone_in="24.0",
)
for dep in req.get_dist().iter_dependencies():
if any(isinstance(spec, LegacySpecifier) for spec in dep.specifier):
deprecated(
reason=(
f"pip has selected {req} {version} which has non "
f"standard dependency specifier {dep}. "
f"In the future this version of {req} will be "
f"ignored as it isn't standard compliant."
),
replacement=(
"set or update constraints to select another version "
"or contact the package author to fix the version number"
),
issue=12063,
gone_in="24.0",
)

View File

@ -71,16 +71,16 @@ def uninstallation_paths(dist: BaseDistribution) -> Generator[str, None, None]:
entries = dist.iter_declared_entries()
if entries is None:
msg = "Cannot uninstall {dist}, RECORD file not found.".format(dist=dist)
msg = f"Cannot uninstall {dist}, RECORD file not found."
installer = dist.installer
if not installer or installer == "pip":
dep = "{}=={}".format(dist.raw_name, dist.version)
dep = f"{dist.raw_name}=={dist.version}"
msg += (
" You might be able to recover from this via: "
"'pip install --force-reinstall --no-deps {}'.".format(dep)
f"'pip install --force-reinstall --no-deps {dep}'."
)
else:
msg += " Hint: The package was installed by {}.".format(installer)
msg += f" Hint: The package was installed by {installer}."
raise UninstallationError(msg)
for entry in entries:
@ -274,7 +274,7 @@ class StashedUninstallPathSet:
def commit(self) -> None:
"""Commits the uninstall by removing stashed files."""
for _, save_dir in self._save_dirs.items():
for save_dir in self._save_dirs.values():
save_dir.cleanup()
self._moves = []
self._save_dirs = {}

View File

@ -231,9 +231,7 @@ class Resolver(BaseResolver):
tags = compatibility_tags.get_supported()
if requirement_set.check_supported_wheels and not wheel.supported(tags):
raise InstallationError(
"{} is not a supported wheel on this platform.".format(
wheel.filename
)
f"{wheel.filename} is not a supported wheel on this platform."
)
# This next bit is really a sanity check.
@ -287,9 +285,9 @@ class Resolver(BaseResolver):
)
if does_not_satisfy_constraint:
raise InstallationError(
"Could not satisfy constraints for '{}': "
f"Could not satisfy constraints for '{install_req.name}': "
"installation from path or url cannot be "
"constrained to a version".format(install_req.name)
"constrained to a version"
)
# If we're now installing a constraint, mark the existing
# object for real installation.
@ -398,9 +396,9 @@ class Resolver(BaseResolver):
# "UnicodeEncodeError: 'ascii' codec can't encode character"
# in Python 2 when the reason contains non-ascii characters.
"The candidate selected for download or install is a "
"yanked version: {candidate}\n"
"Reason for being yanked: {reason}"
).format(candidate=best_candidate, reason=reason)
f"yanked version: {best_candidate}\n"
f"Reason for being yanked: {reason}"
)
logger.warning(msg)
return link

View File

@ -1,7 +1,7 @@
from typing import FrozenSet, Iterable, Optional, Tuple, Union
from pip._vendor.packaging.specifiers import SpecifierSet
from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
from pip._vendor.packaging.utils import NormalizedName
from pip._vendor.packaging.version import LegacyVersion, Version
from pip._internal.models.link import Link, links_equivalent
@ -12,11 +12,11 @@ CandidateLookup = Tuple[Optional["Candidate"], Optional[InstallRequirement]]
CandidateVersion = Union[LegacyVersion, Version]
def format_name(project: str, extras: FrozenSet[str]) -> str:
def format_name(project: NormalizedName, extras: FrozenSet[NormalizedName]) -> str:
if not extras:
return project
canonical_extras = sorted(canonicalize_name(e) for e in extras)
return "{}[{}]".format(project, ",".join(canonical_extras))
extras_expr = ",".join(sorted(extras))
return f"{project}[{extras_expr}]"
class Constraint:

View File

@ -159,10 +159,7 @@ class _InstallRequirementBackedCandidate(Candidate):
return f"{self.name} {self.version}"
def __repr__(self) -> str:
return "{class_name}({link!r})".format(
class_name=self.__class__.__name__,
link=str(self._link),
)
return f"{self.__class__.__name__}({str(self._link)!r})"
def __hash__(self) -> int:
return hash((self.__class__, self._link))
@ -240,7 +237,7 @@ class _InstallRequirementBackedCandidate(Candidate):
def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]:
requires = self.dist.iter_dependencies() if with_requires else ()
for r in requires:
yield self._factory.make_requirement_from_spec(str(r), self._ireq)
yield from self._factory.make_requirements_from_spec(str(r), self._ireq)
yield self._factory.make_requires_python_requirement(self.dist.requires_python)
def get_install_requirement(self) -> Optional[InstallRequirement]:
@ -341,6 +338,7 @@ class AlreadyInstalledCandidate(Candidate):
self.dist = dist
self._ireq = _make_install_req_from_dist(dist, template)
self._factory = factory
self._version = None
# This is just logging some messages, so we can do it eagerly.
# The returned dist would be exactly the same as self.dist because we
@ -353,10 +351,7 @@ class AlreadyInstalledCandidate(Candidate):
return str(self.dist)
def __repr__(self) -> str:
return "{class_name}({distribution!r})".format(
class_name=self.__class__.__name__,
distribution=self.dist,
)
return f"{self.__class__.__name__}({self.dist!r})"
def __hash__(self) -> int:
return hash((self.__class__, self.name, self.version))
@ -376,7 +371,9 @@ class AlreadyInstalledCandidate(Candidate):
@property
def version(self) -> CandidateVersion:
return self.dist.version
if self._version is None:
self._version = self.dist.version
return self._version
@property
def is_editable(self) -> bool:
@ -389,7 +386,7 @@ class AlreadyInstalledCandidate(Candidate):
if not with_requires:
return
for r in self.dist.iter_dependencies():
yield self._factory.make_requirement_from_spec(str(r), self._ireq)
yield from self._factory.make_requirements_from_spec(str(r), self._ireq)
def get_install_requirement(self) -> Optional[InstallRequirement]:
return None
@ -424,20 +421,35 @@ class ExtrasCandidate(Candidate):
self,
base: BaseCandidate,
extras: FrozenSet[str],
*,
comes_from: Optional[InstallRequirement] = None,
) -> None:
"""
:param comes_from: the InstallRequirement that led to this candidate if it
differs from the base's InstallRequirement. This will often be the
case in the sense that this candidate's requirement has the extras
while the base's does not. Unlike the InstallRequirement backed
candidates, this requirement is used solely for reporting purposes,
it does not do any leg work.
"""
self.base = base
self.extras = extras
self.extras = frozenset(canonicalize_name(e) for e in extras)
# If any extras are requested in their non-normalized forms, keep track
# of their raw values. This is needed when we look up dependencies
# since PEP 685 has not been implemented for marker-matching, and using
# the non-normalized extra for lookup ensures the user can select a
# non-normalized extra in a package with its non-normalized form.
# TODO: Remove this attribute when packaging is upgraded to support the
# marker comparison logic specified in PEP 685.
self._unnormalized_extras = extras.difference(self.extras)
self._comes_from = comes_from if comes_from is not None else self.base._ireq
def __str__(self) -> str:
name, rest = str(self.base).split(" ", 1)
return "{}[{}] {}".format(name, ",".join(self.extras), rest)
def __repr__(self) -> str:
return "{class_name}(base={base!r}, extras={extras!r})".format(
class_name=self.__class__.__name__,
base=self.base,
extras=self.extras,
)
return f"{self.__class__.__name__}(base={self.base!r}, extras={self.extras!r})"
def __hash__(self) -> int:
return hash((self.base, self.extras))
@ -477,6 +489,50 @@ class ExtrasCandidate(Candidate):
def source_link(self) -> Optional[Link]:
return self.base.source_link
def _warn_invalid_extras(
self,
requested: FrozenSet[str],
valid: FrozenSet[str],
) -> None:
"""Emit warnings for invalid extras being requested.
This emits a warning for each requested extra that is not in the
candidate's ``Provides-Extra`` list.
"""
invalid_extras_to_warn = frozenset(
extra
for extra in requested
if extra not in valid
# If an extra is requested in an unnormalized form, skip warning
# about the normalized form being missing.
and extra in self.extras
)
if not invalid_extras_to_warn:
return
for extra in sorted(invalid_extras_to_warn):
logger.warning(
"%s %s does not provide the extra '%s'",
self.base.name,
self.version,
extra,
)
def _calculate_valid_requested_extras(self) -> FrozenSet[str]:
"""Get a list of valid extras requested by this candidate.
The user (or upstream dependant) may have specified extras that the
candidate doesn't support. Any unsupported extras are dropped, and each
cause a warning to be logged here.
"""
requested_extras = self.extras.union(self._unnormalized_extras)
valid_extras = frozenset(
extra
for extra in requested_extras
if self.base.dist.is_extra_provided(extra)
)
self._warn_invalid_extras(requested_extras, valid_extras)
return valid_extras
def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]:
factory = self.base._factory
@ -486,24 +542,13 @@ class ExtrasCandidate(Candidate):
if not with_requires:
return
# The user may have specified extras that the candidate doesn't
# support. We ignore any unsupported extras here.
valid_extras = self.extras.intersection(self.base.dist.iter_provided_extras())
invalid_extras = self.extras.difference(self.base.dist.iter_provided_extras())
for extra in sorted(invalid_extras):
logger.warning(
"%s %s does not provide the extra '%s'",
self.base.name,
self.version,
extra,
)
valid_extras = self._calculate_valid_requested_extras()
for r in self.base.dist.iter_dependencies(valid_extras):
requirement = factory.make_requirement_from_spec(
str(r), self.base._ireq, valid_extras
yield from factory.make_requirements_from_spec(
str(r),
self._comes_from,
valid_extras,
)
if requirement:
yield requirement
def get_install_requirement(self) -> Optional[InstallRequirement]:
# We don't return anything here, because we always

View File

@ -62,6 +62,7 @@ from .requirements import (
ExplicitRequirement,
RequiresPythonRequirement,
SpecifierRequirement,
SpecifierWithoutExtrasRequirement,
UnsatisfiableRequirement,
)
@ -112,7 +113,7 @@ class Factory:
self._editable_candidate_cache: Cache[EditableCandidate] = {}
self._installed_candidate_cache: Dict[str, AlreadyInstalledCandidate] = {}
self._extras_candidate_cache: Dict[
Tuple[int, FrozenSet[str]], ExtrasCandidate
Tuple[int, FrozenSet[NormalizedName]], ExtrasCandidate
] = {}
if not ignore_installed:
@ -132,19 +133,23 @@ class Factory:
if not link.is_wheel:
return
wheel = Wheel(link.filename)
if wheel.supported(self._finder.target_python.get_tags()):
if wheel.supported(self._finder.target_python.get_unsorted_tags()):
return
msg = f"{link.filename} is not a supported wheel on this platform."
raise UnsupportedWheel(msg)
def _make_extras_candidate(
self, base: BaseCandidate, extras: FrozenSet[str]
self,
base: BaseCandidate,
extras: FrozenSet[str],
*,
comes_from: Optional[InstallRequirement] = None,
) -> ExtrasCandidate:
cache_key = (id(base), extras)
cache_key = (id(base), frozenset(canonicalize_name(e) for e in extras))
try:
candidate = self._extras_candidate_cache[cache_key]
except KeyError:
candidate = ExtrasCandidate(base, extras)
candidate = ExtrasCandidate(base, extras, comes_from=comes_from)
self._extras_candidate_cache[cache_key] = candidate
return candidate
@ -161,7 +166,7 @@ class Factory:
self._installed_candidate_cache[dist.canonical_name] = base
if not extras:
return base
return self._make_extras_candidate(base, extras)
return self._make_extras_candidate(base, extras, comes_from=template)
def _make_candidate_from_link(
self,
@ -223,7 +228,7 @@ class Factory:
if not extras:
return base
return self._make_extras_candidate(base, extras)
return self._make_extras_candidate(base, extras, comes_from=template)
def _iter_found_candidates(
self,
@ -385,16 +390,21 @@ class Factory:
if ireq is not None:
ireqs.append(ireq)
# If the current identifier contains extras, add explicit candidates
# from entries from extra-less identifier.
# If the current identifier contains extras, add requires and explicit
# candidates from entries from extra-less identifier.
with contextlib.suppress(InvalidRequirement):
parsed_requirement = get_requirement(identifier)
explicit_candidates.update(
self._iter_explicit_candidates_from_base(
requirements.get(parsed_requirement.name, ()),
frozenset(parsed_requirement.extras),
),
)
if parsed_requirement.name != identifier:
explicit_candidates.update(
self._iter_explicit_candidates_from_base(
requirements.get(parsed_requirement.name, ()),
frozenset(parsed_requirement.extras),
),
)
for req in requirements.get(parsed_requirement.name, []):
_, ireq = req.get_candidate_lookup()
if ireq is not None:
ireqs.append(ireq)
# Add explicit candidates from constraints. We only do this if there are
# known ireqs, which represent requirements not already explicit. If
@ -437,37 +447,49 @@ class Factory:
and all(req.is_satisfied_by(c) for req in requirements[identifier])
)
def _make_requirement_from_install_req(
def _make_requirements_from_install_req(
self, ireq: InstallRequirement, requested_extras: Iterable[str]
) -> Optional[Requirement]:
) -> Iterator[Requirement]:
"""
Returns requirement objects associated with the given InstallRequirement. In
most cases this will be a single object but the following special cases exist:
- the InstallRequirement has markers that do not apply -> result is empty
- the InstallRequirement has both a constraint and extras -> result is split
in two requirement objects: one with the constraint and one with the
extra. This allows centralized constraint handling for the base,
resulting in fewer candidate rejections.
"""
if not ireq.match_markers(requested_extras):
logger.info(
"Ignoring %s: markers '%s' don't match your environment",
ireq.name,
ireq.markers,
)
return None
if not ireq.link:
return SpecifierRequirement(ireq)
self._fail_if_link_is_unsupported_wheel(ireq.link)
cand = self._make_candidate_from_link(
ireq.link,
extras=frozenset(ireq.extras),
template=ireq,
name=canonicalize_name(ireq.name) if ireq.name else None,
version=None,
)
if cand is None:
# There's no way we can satisfy a URL requirement if the underlying
# candidate fails to build. An unnamed URL must be user-supplied, so
# we fail eagerly. If the URL is named, an unsatisfiable requirement
# can make the resolver do the right thing, either backtrack (and
# maybe find some other requirement that's buildable) or raise a
# ResolutionImpossible eventually.
if not ireq.name:
raise self._build_failures[ireq.link]
return UnsatisfiableRequirement(canonicalize_name(ireq.name))
return self.make_requirement_from_candidate(cand)
elif not ireq.link:
if ireq.extras and ireq.req is not None and ireq.req.specifier:
yield SpecifierWithoutExtrasRequirement(ireq)
yield SpecifierRequirement(ireq)
else:
self._fail_if_link_is_unsupported_wheel(ireq.link)
cand = self._make_candidate_from_link(
ireq.link,
extras=frozenset(ireq.extras),
template=ireq,
name=canonicalize_name(ireq.name) if ireq.name else None,
version=None,
)
if cand is None:
# There's no way we can satisfy a URL requirement if the underlying
# candidate fails to build. An unnamed URL must be user-supplied, so
# we fail eagerly. If the URL is named, an unsatisfiable requirement
# can make the resolver do the right thing, either backtrack (and
# maybe find some other requirement that's buildable) or raise a
# ResolutionImpossible eventually.
if not ireq.name:
raise self._build_failures[ireq.link]
yield UnsatisfiableRequirement(canonicalize_name(ireq.name))
else:
yield self.make_requirement_from_candidate(cand)
def collect_root_requirements(
self, root_ireqs: List[InstallRequirement]
@ -488,15 +510,27 @@ class Factory:
else:
collected.constraints[name] = Constraint.from_ireq(ireq)
else:
req = self._make_requirement_from_install_req(
ireq,
requested_extras=(),
reqs = list(
self._make_requirements_from_install_req(
ireq,
requested_extras=(),
)
)
if req is None:
if not reqs:
continue
if ireq.user_supplied and req.name not in collected.user_requested:
collected.user_requested[req.name] = i
collected.requirements.append(req)
template = reqs[0]
if ireq.user_supplied and template.name not in collected.user_requested:
collected.user_requested[template.name] = i
collected.requirements.extend(reqs)
# Put requirements with extras at the end of the root requires. This does not
# affect resolvelib's picking preference but it does affect its initial criteria
# population: by putting extras at the end we enable the candidate finder to
# present resolvelib with a smaller set of candidates to resolvelib, already
# taking into account any non-transient constraints on the associated base. This
# means resolvelib will have fewer candidates to visit and reject.
# Python's list sort is stable, meaning relative order is kept for objects with
# the same key.
collected.requirements.sort(key=lambda r: r.name != r.project_name)
return collected
def make_requirement_from_candidate(
@ -504,14 +538,23 @@ class Factory:
) -> ExplicitRequirement:
return ExplicitRequirement(candidate)
def make_requirement_from_spec(
def make_requirements_from_spec(
self,
specifier: str,
comes_from: Optional[InstallRequirement],
requested_extras: Iterable[str] = (),
) -> Optional[Requirement]:
) -> Iterator[Requirement]:
"""
Returns requirement objects associated with the given specifier. In most cases
this will be a single object but the following special cases exist:
- the specifier has markers that do not apply -> result is empty
- the specifier has both a constraint and extras -> result is split
in two requirement objects: one with the constraint and one with the
extra. This allows centralized constraint handling for the base,
resulting in fewer candidate rejections.
"""
ireq = self._make_install_req_from_spec(specifier, comes_from)
return self._make_requirement_from_install_req(ireq, requested_extras)
return self._make_requirements_from_install_req(ireq, requested_extras)
def make_requires_python_requirement(
self,
@ -603,8 +646,26 @@ class Factory:
cands = self._finder.find_all_candidates(req.project_name)
skipped_by_requires_python = self._finder.requires_python_skipped_reasons()
versions = [str(v) for v in sorted({c.version for c in cands})]
versions_set: Set[CandidateVersion] = set()
yanked_versions_set: Set[CandidateVersion] = set()
for c in cands:
is_yanked = c.link.is_yanked if c.link else False
if is_yanked:
yanked_versions_set.add(c.version)
else:
versions_set.add(c.version)
versions = [str(v) for v in sorted(versions_set)]
yanked_versions = [str(v) for v in sorted(yanked_versions_set)]
if yanked_versions:
# Saying "version X is yanked" isn't entirely accurate.
# https://github.com/pypa/pip/issues/11745#issuecomment-1402805842
logger.critical(
"Ignored the following yanked versions: %s",
", ".join(yanked_versions) or "none",
)
if skipped_by_requires_python:
logger.critical(
"Ignored the following versions that require a different python "
@ -692,8 +753,8 @@ class Factory:
info = "the requested packages"
msg = (
"Cannot install {} because these package versions "
"have conflicting dependencies.".format(info)
f"Cannot install {info} because these package versions "
"have conflicting dependencies."
)
logger.critical(msg)
msg = "\nThe conflict is caused by:"

View File

@ -20,7 +20,7 @@ class PipReporter(BaseReporter):
"requirements. This could take a while."
),
8: (
"pip is looking at multiple versions of {package_name} to "
"pip is still looking at multiple versions of {package_name} to "
"determine which version is compatible with other "
"requirements. This could take a while."
),

View File

@ -1,6 +1,7 @@
from pip._vendor.packaging.specifiers import SpecifierSet
from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
from pip._internal.req.constructors import install_req_drop_extras
from pip._internal.req.req_install import InstallRequirement
from .base import Candidate, CandidateLookup, Requirement, format_name
@ -14,10 +15,7 @@ class ExplicitRequirement(Requirement):
return str(self.candidate)
def __repr__(self) -> str:
return "{class_name}({candidate!r})".format(
class_name=self.__class__.__name__,
candidate=self.candidate,
)
return f"{self.__class__.__name__}({self.candidate!r})"
@property
def project_name(self) -> NormalizedName:
@ -43,16 +41,13 @@ class SpecifierRequirement(Requirement):
def __init__(self, ireq: InstallRequirement) -> None:
assert ireq.link is None, "This is a link, not a specifier"
self._ireq = ireq
self._extras = frozenset(ireq.extras)
self._extras = frozenset(canonicalize_name(e) for e in self._ireq.extras)
def __str__(self) -> str:
return str(self._ireq.req)
def __repr__(self) -> str:
return "{class_name}({requirement!r})".format(
class_name=self.__class__.__name__,
requirement=str(self._ireq.req),
)
return f"{self.__class__.__name__}({str(self._ireq.req)!r})"
@property
def project_name(self) -> NormalizedName:
@ -92,6 +87,18 @@ class SpecifierRequirement(Requirement):
return spec.contains(candidate.version, prereleases=True)
class SpecifierWithoutExtrasRequirement(SpecifierRequirement):
"""
Requirement backed by an install requirement on a base package.
Trims extras from its install requirement if there are any.
"""
def __init__(self, ireq: InstallRequirement) -> None:
assert ireq.link is None, "This is a link, not a specifier"
self._ireq = install_req_drop_extras(ireq)
self._extras = frozenset(canonicalize_name(e) for e in self._ireq.extras)
class RequiresPythonRequirement(Requirement):
"""A requirement representing Requires-Python metadata."""
@ -103,10 +110,7 @@ class RequiresPythonRequirement(Requirement):
return f"Python {self.specifier}"
def __repr__(self) -> str:
return "{class_name}({specifier!r})".format(
class_name=self.__class__.__name__,
specifier=str(self.specifier),
)
return f"{self.__class__.__name__}({str(self.specifier)!r})"
@property
def project_name(self) -> NormalizedName:
@ -142,10 +146,7 @@ class UnsatisfiableRequirement(Requirement):
return f"{self._name} (unavailable)"
def __repr__(self) -> str:
return "{class_name}({name!r})".format(
class_name=self.__class__.__name__,
name=str(self._name),
)
return f"{self.__class__.__name__}({str(self._name)!r})"
@property
def project_name(self) -> NormalizedName:

View File

@ -1,3 +1,4 @@
import contextlib
import functools
import logging
import os
@ -11,6 +12,7 @@ from pip._vendor.resolvelib.structs import DirectedGraph
from pip._internal.cache import WheelCache
from pip._internal.index.package_finder import PackageFinder
from pip._internal.operations.prepare import RequirementPreparer
from pip._internal.req.constructors import install_req_extend_extras
from pip._internal.req.req_install import InstallRequirement
from pip._internal.req.req_set import RequirementSet
from pip._internal.resolution.base import BaseResolver, InstallRequirementProvider
@ -19,6 +21,7 @@ from pip._internal.resolution.resolvelib.reporter import (
PipDebuggingReporter,
PipReporter,
)
from pip._internal.utils.packaging import get_requirement
from .base import Candidate, Requirement
from .factory import Factory
@ -101,9 +104,24 @@ class Resolver(BaseResolver):
raise error from e
req_set = RequirementSet(check_supported_wheels=check_supported_wheels)
for candidate in result.mapping.values():
# process candidates with extras last to ensure their base equivalent is
# already in the req_set if appropriate.
# Python's sort is stable so using a binary key function keeps relative order
# within both subsets.
for candidate in sorted(
result.mapping.values(), key=lambda c: c.name != c.project_name
):
ireq = candidate.get_install_requirement()
if ireq is None:
if candidate.name != candidate.project_name:
# extend existing req's extras
with contextlib.suppress(KeyError):
req = req_set.get_requirement(candidate.project_name)
req_set.add_named_requirement(
install_req_extend_extras(
req, get_requirement(candidate.name).extras
)
)
continue
# Check if there is already an installation under the same name,
@ -159,6 +177,9 @@ class Resolver(BaseResolver):
reqs = req_set.all_requirements
self.factory.preparer.prepare_linked_requirements_more(reqs)
for req in reqs:
req.prepared = True
req.needs_more_preparation = False
return req_set
def get_installation_order(

View File

@ -28,8 +28,7 @@ from pip._internal.utils.entrypoints import (
from pip._internal.utils.filesystem import adjacent_tmp_file, check_path_owner, replace
from pip._internal.utils.misc import ensure_dir
_DATE_FMT = "%Y-%m-%dT%H:%M:%SZ"
_WEEK = datetime.timedelta(days=7)
logger = logging.getLogger(__name__)
@ -40,6 +39,15 @@ def _get_statefile_name(key: str) -> str:
return name
def _convert_date(isodate: str) -> datetime.datetime:
"""Convert an ISO format string to a date.
Handles the format 2020-01-22T14:24:01Z (trailing Z)
which is not supported by older versions of fromisoformat.
"""
return datetime.datetime.fromisoformat(isodate.replace("Z", "+00:00"))
class SelfCheckState:
def __init__(self, cache_dir: str) -> None:
self._state: Dict[str, Any] = {}
@ -73,12 +81,10 @@ class SelfCheckState:
if "pypi_version" not in self._state:
return None
seven_days_in_seconds = 7 * 24 * 60 * 60
# Determine if we need to refresh the state
last_check = datetime.datetime.strptime(self._state["last_check"], _DATE_FMT)
seconds_since_last_check = (current_time - last_check).total_seconds()
if seconds_since_last_check > seven_days_in_seconds:
last_check = _convert_date(self._state["last_check"])
time_since_last_check = current_time - last_check
if time_since_last_check > _WEEK:
return None
return self._state["pypi_version"]
@ -100,7 +106,7 @@ class SelfCheckState:
# Include the key so it's easy to tell which pip wrote the
# file.
"key": self.key,
"last_check": current_time.strftime(_DATE_FMT),
"last_check": current_time.isoformat(),
"pypi_version": pypi_version,
}
@ -229,14 +235,14 @@ def pip_self_version_check(session: PipSession, options: optparse.Values) -> Non
try:
upgrade_prompt = _self_version_check_logic(
state=SelfCheckState(cache_dir=options.cache_dir),
current_time=datetime.datetime.utcnow(),
current_time=datetime.datetime.now(datetime.timezone.utc),
local_version=installed_dist.version,
get_remote_version=functools.partial(
_get_current_remote_pip_version, session, options
),
)
if upgrade_prompt is not None:
logger.warning("[present-rich] %s", upgrade_prompt)
logger.warning("%s", upgrade_prompt, extra={"rich": True})
except Exception:
logger.warning("There was an error checking the latest version of pip.")
logger.debug("See below for error", exc_info=True)

View File

@ -1,6 +1,3 @@
# The following comment should be removed at some point in the future.
# mypy: strict-optional=False
import os
import sys
from typing import Optional, Tuple
@ -20,8 +17,11 @@ def glibc_version_string_confstr() -> Optional[str]:
if sys.platform == "win32":
return None
try:
gnu_libc_version = os.confstr("CS_GNU_LIBC_VERSION")
if gnu_libc_version is None:
return None
# os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17":
_, version = os.confstr("CS_GNU_LIBC_VERSION").split()
_, version = gnu_libc_version.split()
except (AttributeError, OSError, ValueError):
# os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)...
return None

Some files were not shown because too many files have changed in this diff Show More