mirror of
https://github.com/pypa/pip
synced 2023-12-13 21:30:23 +01:00
Merge branch 'main' into no-import-from-conftest
This commit is contained in:
commit
2fad07e6e2
250 changed files with 7964 additions and 3168 deletions
36
.git-blame-ignore-revs
Normal file
36
.git-blame-ignore-revs
Normal file
|
@ -0,0 +1,36 @@
|
|||
917b41d6d73535c090fc312668dff353cdaef906 # Blacken docs/html/conf.py
|
||||
ed383dd8afa8fe0250dcf9b8962927ada0e21c89 # Blacken docs/pip_sphinxext.py
|
||||
228405e62451abe8a66233573035007df4be575f # Blacken noxfile.py
|
||||
f477a9f490e978177b71c9dbaa5465c51ea21129 # Blacken setup.py
|
||||
e59ba23468390217479465019f8d78e724a23550 # Blacken src/pip/__main__.py
|
||||
d7013db084e9a52242354ee5754dc5d19ccf062e # Blacken src/pip/_internal/build_env.py
|
||||
30e9ffacae75378fc3e3df48f754dabad037edb9 # Blacken src/pip/_internal/cache.py
|
||||
8341d56b46776a805286218ac5fb0e7850fd9341 # Blacken src/pip/_internal/cli/autocompletion.py
|
||||
3d3461ed65208656358b3595e25d8c31c5c89470 # Blacken src/pip/_internal/cli/base_command.py
|
||||
d489b0f1b104bc936b0fb17e6c33633664ebdc0e # Blacken src/pip/_internal/cli/cmdoptions.py
|
||||
591fe4841aefe9befa0530f2a54f820c4ecbb392 # Blacken src/pip/_internal/cli/command_context.py
|
||||
9265b28ef7248ae1847a80384dbeeb8119c3e2f5 # Blacken src/pip/_internal/cli/main.py
|
||||
847a369364878c38d210c90beed2737bb6fb3a85 # Blacken src/pip/_internal/cli/main_parser.py
|
||||
ec97119067041ae58b963935ff5f0e5d9fead80c # Blacken src/pip/_internal/cli/parser.py
|
||||
6e3b8de22fa39fa3073599ecf9db61367f4b3b32 # Blacken src/pip/_internal/cli/progress_bars.py
|
||||
55405227de983c5bd5bf0858ea12dbe537d3e490 # Blacken src/pip/_internal/cli/req_command.py
|
||||
d5ca5c850cae9a0c64882a8f49d3a318699a7e2e # Blacken src/pip/_internal/cli/spinners.py
|
||||
9747cb48f8430a7a91b36fe697dd18dbddb319f0 # Blacken src/pip/_internal/commands/__init__.py
|
||||
1c09fd6f124df08ca36bed68085ad68e89bb1957 # Blacken src/pip/_internal/commands/cache.py
|
||||
315e93d7eb87cd476afcc4eaf0f01a7b56a5037f # Blacken src/pip/_internal/commands/check.py
|
||||
8ae3b96ed7d24fd24024ccce4840da0dcf635f26 # Blacken src/pip/_internal/commands/completion.py
|
||||
42ca4792202f26a293ee48380718743a80bbee37 # Blacken src/pip/_internal/commands/configuration.py
|
||||
790ad78fcd43d41a5bef9dca34a3c128d05eb02c # Blacken src/pip/_internal/commands/debug.py
|
||||
a6fcc8f045afe257ce321f4012fc8fcb4be01eb3 # Blacken src/pip/_internal/commands/download.py
|
||||
920e735dfc60109351fbe2f4c483c2f6ede9e52d # Blacken src/pip/_internal/commands/freeze.py
|
||||
053004e0fcf0851238b1064fbce13aea87b24e9c # Blacken src/pip/_internal/commands/hash.py
|
||||
a6b6ae487e52c2242045b64cb8962e0a992cfd76 # Blacken src/pip/_internal/commands/help.py
|
||||
2495cf95a6c7eb61ccf1f9f0e8b8d736af914e53 # Blacken __main__.py
|
||||
c7ee560e00b85f7486b452c14ff49e4737996eda # Blacken tools/
|
||||
8e2e1964a4f0a060f7299a96a911c9e116b2283d # Blacken src/pip/_internal/commands/
|
||||
1bc0eef05679e87f45540ab0a294667cb3c6a88e # Blacken src/pip/_internal/network/
|
||||
069b01932a7d64a81c708c6254cc93e1f89e6783 # Blacken src/pip/_internal/req
|
||||
1897784d59e0d5fcda2dd75fea54ddd8be3d502a # Blacken src/pip/_internal/index
|
||||
94999255d5ede440c37137d210666fdf64302e75 # Reformat the codebase, with black
|
||||
585037a80a1177f1fa92e159a7079855782e543e # Cleanup implicit string concatenation
|
||||
8a6f6ac19b80a6dc35900a47016c851d9fcd2ee2 # Blacken src/pip/_internal/resolution directory
|
35
.github/workflows/ci.yml
vendored
35
.github/workflows/ci.yml
vendored
|
@ -91,7 +91,7 @@ jobs:
|
|||
- run: git diff --exit-code
|
||||
|
||||
tests-unix:
|
||||
name: tests / ${{ matrix.python }} / ${{ matrix.os }}
|
||||
name: tests / ${{ matrix.python.key || matrix.python }} / ${{ matrix.os }}
|
||||
runs-on: ${{ matrix.os }}-latest
|
||||
|
||||
needs: [packaging, determine-changes]
|
||||
|
@ -109,12 +109,14 @@ jobs:
|
|||
- "3.9"
|
||||
- "3.10"
|
||||
- "3.11"
|
||||
- "3.12"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ matrix.python }}
|
||||
allow-prereleases: true
|
||||
|
||||
- name: Install Ubuntu dependencies
|
||||
if: matrix.os == 'Ubuntu'
|
||||
|
@ -129,12 +131,12 @@ jobs:
|
|||
# Main check
|
||||
- name: Run unit tests
|
||||
run: >-
|
||||
nox -s test-${{ matrix.python }} --
|
||||
nox -s test-${{ matrix.python.key || matrix.python }} --
|
||||
-m unit
|
||||
--verbose --numprocesses auto --showlocals
|
||||
- name: Run integration tests
|
||||
run: >-
|
||||
nox -s test-${{ matrix.python }} --
|
||||
nox -s test-${{ matrix.python.key || matrix.python }} --
|
||||
-m integration
|
||||
--verbose --numprocesses auto --showlocals
|
||||
--durations=5
|
||||
|
@ -167,24 +169,13 @@ jobs:
|
|||
with:
|
||||
python-version: ${{ matrix.python }}
|
||||
|
||||
# We use a RAMDisk on Windows, since filesystem IO is a big slowdown
|
||||
# for our tests.
|
||||
- name: Create a RAMDisk
|
||||
run: ./tools/ci/New-RAMDisk.ps1 -Drive R -Size 1GB
|
||||
|
||||
- name: Setup RAMDisk permissions
|
||||
run: |
|
||||
mkdir R:\Temp
|
||||
$acl = Get-Acl "R:\Temp"
|
||||
$rule = New-Object System.Security.AccessControl.FileSystemAccessRule(
|
||||
"Everyone", "FullControl", "ContainerInherit,ObjectInherit", "None", "Allow"
|
||||
)
|
||||
$acl.AddAccessRule($rule)
|
||||
Set-Acl "R:\Temp" $acl
|
||||
|
||||
# We use C:\Temp (which is already available on the worker)
|
||||
# as a temporary directory for all of the tests because the
|
||||
# default value (under the user dir) is more deeply nested
|
||||
# and causes tests to fail with "path too long" errors.
|
||||
- run: pip install nox
|
||||
env:
|
||||
TEMP: "R:\\Temp"
|
||||
TEMP: "C:\\Temp"
|
||||
|
||||
# Main check
|
||||
- name: Run unit tests
|
||||
|
@ -194,7 +185,7 @@ jobs:
|
|||
-m unit
|
||||
--verbose --numprocesses auto --showlocals
|
||||
env:
|
||||
TEMP: "R:\\Temp"
|
||||
TEMP: "C:\\Temp"
|
||||
|
||||
- name: Run integration tests (group 1)
|
||||
if: matrix.group == 1
|
||||
|
@ -203,7 +194,7 @@ jobs:
|
|||
-m integration -k "not test_install"
|
||||
--verbose --numprocesses auto --showlocals
|
||||
env:
|
||||
TEMP: "R:\\Temp"
|
||||
TEMP: "C:\\Temp"
|
||||
|
||||
- name: Run integration tests (group 2)
|
||||
if: matrix.group == 2
|
||||
|
@ -212,7 +203,7 @@ jobs:
|
|||
-m integration -k "test_install"
|
||||
--verbose --numprocesses auto --showlocals
|
||||
env:
|
||||
TEMP: "R:\\Temp"
|
||||
TEMP: "C:\\Temp"
|
||||
|
||||
tests-zipapp:
|
||||
name: tests / zipapp
|
||||
|
|
19
.github/workflows/no-response.yml
vendored
19
.github/workflows/no-response.yml
vendored
|
@ -1,19 +0,0 @@
|
|||
name: No Response
|
||||
|
||||
# Both `issue_comment` and `scheduled` event types are required for this Action
|
||||
# to work properly.
|
||||
on:
|
||||
issue_comment:
|
||||
types: [created]
|
||||
schedule:
|
||||
# Schedule for five minutes after the hour, every hour
|
||||
- cron: '5 * * * *'
|
||||
|
||||
jobs:
|
||||
noResponse:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: lee-dohm/no-response@v0.5.0
|
||||
with:
|
||||
token: ${{ github.token }}
|
||||
responseRequiredLabel: "S: awaiting response"
|
|
@ -17,26 +17,15 @@ repos:
|
|||
exclude: .patch
|
||||
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 23.1.0
|
||||
rev: 23.7.0
|
||||
hooks:
|
||||
- id: black
|
||||
|
||||
- repo: https://github.com/PyCQA/flake8
|
||||
rev: 6.0.0
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.0.287
|
||||
hooks:
|
||||
- id: flake8
|
||||
additional_dependencies: [
|
||||
'flake8-bugbear',
|
||||
'flake8-logging-format',
|
||||
'flake8-implicit-str-concat',
|
||||
]
|
||||
exclude: tests/data
|
||||
|
||||
- repo: https://github.com/PyCQA/isort
|
||||
rev: 5.12.0
|
||||
hooks:
|
||||
- id: isort
|
||||
files: \.py$
|
||||
- id: ruff
|
||||
|
||||
- repo: https://github.com/pre-commit/mirrors-mypy
|
||||
rev: v0.961
|
||||
|
|
|
@ -6,7 +6,7 @@ build:
|
|||
python: "3.11"
|
||||
|
||||
sphinx:
|
||||
builder: htmldir
|
||||
builder: dirhtml
|
||||
configuration: docs/html/conf.py
|
||||
|
||||
python:
|
||||
|
|
10
AUTHORS.txt
10
AUTHORS.txt
|
@ -71,6 +71,7 @@ atse
|
|||
Atsushi Odagiri
|
||||
Avinash Karhana
|
||||
Avner Cohen
|
||||
Awit (Ah-Wit) Ghirmai
|
||||
Baptiste Mispelon
|
||||
Barney Gale
|
||||
barneygale
|
||||
|
@ -126,6 +127,7 @@ Chih-Hsuan Yen
|
|||
Chris Brinker
|
||||
Chris Hunt
|
||||
Chris Jerdonek
|
||||
Chris Kuehl
|
||||
Chris McDonough
|
||||
Chris Pawley
|
||||
Chris Pryer
|
||||
|
@ -330,6 +332,8 @@ Jarek Potiuk
|
|||
jarondl
|
||||
Jason Curtis
|
||||
Jason R. Coombs
|
||||
JasonMo
|
||||
JasonMo1
|
||||
Jay Graves
|
||||
Jean-Christophe Fillion-Robin
|
||||
Jeff Barber
|
||||
|
@ -344,6 +348,7 @@ Jim Fisher
|
|||
Jim Garrison
|
||||
Jiun Bae
|
||||
Jivan Amara
|
||||
Joe Bylund
|
||||
Joe Michelini
|
||||
John Paton
|
||||
John T. Wodder II
|
||||
|
@ -441,6 +446,7 @@ Matthew Einhorn
|
|||
Matthew Feickert
|
||||
Matthew Gilliard
|
||||
Matthew Iversen
|
||||
Matthew Treinish
|
||||
Matthew Trumbell
|
||||
Matthew Willson
|
||||
Matthias Bussonnier
|
||||
|
@ -582,6 +588,7 @@ Rishi
|
|||
RobberPhex
|
||||
Robert Collins
|
||||
Robert McGibbon
|
||||
Robert Pollak
|
||||
Robert T. McGibbon
|
||||
robin elisha robinson
|
||||
Roey Berman
|
||||
|
@ -614,6 +621,7 @@ SeongSoo Cho
|
|||
Sergey Vasilyev
|
||||
Seth Michael Larson
|
||||
Seth Woodworth
|
||||
Shantanu
|
||||
shireenrao
|
||||
Shivansh-007
|
||||
Shlomi Fish
|
||||
|
@ -638,6 +646,7 @@ Steve Barnes
|
|||
Steve Dower
|
||||
Steve Kowalik
|
||||
Steven Myint
|
||||
Steven Silvester
|
||||
stonebig
|
||||
Stéphane Bidoul
|
||||
Stéphane Bidoul (ACSONE)
|
||||
|
@ -707,6 +716,7 @@ Wilson Mo
|
|||
wim glenn
|
||||
Winson Luk
|
||||
Wolfgang Maier
|
||||
Wu Zhenyu
|
||||
XAMES3
|
||||
Xavier Fernandez
|
||||
xoviat
|
||||
|
|
|
@ -14,6 +14,7 @@ recursive-include src/pip/_vendor *COPYING*
|
|||
include docs/docutils.conf
|
||||
include docs/requirements.txt
|
||||
|
||||
exclude .git-blame-ignore-revs
|
||||
exclude .coveragerc
|
||||
exclude .mailmap
|
||||
exclude .appveyor.yml
|
||||
|
|
65
NEWS.rst
65
NEWS.rst
|
@ -9,6 +9,69 @@
|
|||
|
||||
.. towncrier release notes start
|
||||
|
||||
23.2.1 (2023-07-22)
|
||||
===================
|
||||
|
||||
Bug Fixes
|
||||
---------
|
||||
|
||||
- Disable PEP 658 metadata fetching with the legacy resolver. (`#12156 <https://github.com/pypa/pip/issues/12156>`_)
|
||||
|
||||
|
||||
23.2 (2023-07-15)
|
||||
=================
|
||||
|
||||
Process
|
||||
-------
|
||||
|
||||
- Deprecate support for eggs for Python 3.11 or later, when the new ``importlib.metadata`` backend is used to load distribution metadata. This only affects the egg *distribution format* (with the ``.egg`` extension); distributions using the ``.egg-info`` *metadata format* (but are not actually eggs) are not affected. For more information about eggs, see `relevant section in the setuptools documentation <https://setuptools.pypa.io/en/stable/deprecated/python_eggs.html>`__.
|
||||
|
||||
Deprecations and Removals
|
||||
-------------------------
|
||||
|
||||
- Deprecate legacy version and version specifiers that don't conform to `PEP 440
|
||||
<https://peps.python.org/pep-0440/>`_ (`#12063 <https://github.com/pypa/pip/issues/12063>`_)
|
||||
- ``freeze`` no longer excludes the ``setuptools``, ``distribute``, and ``wheel``
|
||||
from the output when running on Python 3.12 or later, where they are not
|
||||
included in a virtual environment by default. Use ``--exclude`` if you wish to
|
||||
exclude any of these packages. (`#4256 <https://github.com/pypa/pip/issues/4256>`_)
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
- make rejection messages slightly different between 1 and 8, so the user can make the difference. (`#12040 <https://github.com/pypa/pip/issues/12040>`_)
|
||||
|
||||
Bug Fixes
|
||||
---------
|
||||
|
||||
- Fix ``pip completion --zsh``. (`#11417 <https://github.com/pypa/pip/issues/11417>`_)
|
||||
- Prevent downloading files twice when PEP 658 metadata is present (`#11847 <https://github.com/pypa/pip/issues/11847>`_)
|
||||
- Add permission check before configuration (`#11920 <https://github.com/pypa/pip/issues/11920>`_)
|
||||
- Fix deprecation warnings in Python 3.12 for usage of shutil.rmtree (`#11957 <https://github.com/pypa/pip/issues/11957>`_)
|
||||
- Ignore invalid or unreadable ``origin.json`` files in the cache of locally built wheels. (`#11985 <https://github.com/pypa/pip/issues/11985>`_)
|
||||
- Fix installation of packages with PEP658 metadata using non-canonicalized names (`#12038 <https://github.com/pypa/pip/issues/12038>`_)
|
||||
- Correctly parse ``dist-info-metadata`` values from JSON-format index data. (`#12042 <https://github.com/pypa/pip/issues/12042>`_)
|
||||
- Fail with an error if the ``--python`` option is specified after the subcommand name. (`#12067 <https://github.com/pypa/pip/issues/12067>`_)
|
||||
- Fix slowness when using ``importlib.metadata`` (the default way for pip to read metadata in Python 3.11+) and there is a large overlap between already installed and to-be-installed packages. (`#12079 <https://github.com/pypa/pip/issues/12079>`_)
|
||||
- Pass the ``-r`` flag to mercurial to be explicit that a revision is passed and protect
|
||||
against ``hg`` options injection as part of VCS URLs. Users that do not have control on
|
||||
VCS URLs passed to pip are advised to upgrade. (`#12119 <https://github.com/pypa/pip/issues/12119>`_)
|
||||
|
||||
Vendored Libraries
|
||||
------------------
|
||||
|
||||
- Upgrade certifi to 2023.5.7
|
||||
- Upgrade platformdirs to 3.8.1
|
||||
- Upgrade pygments to 2.15.1
|
||||
- Upgrade pyparsing to 3.1.0
|
||||
- Upgrade Requests to 2.31.0
|
||||
- Upgrade rich to 13.4.2
|
||||
- Upgrade setuptools to 68.0.0
|
||||
- Updated typing_extensions to 4.6.0
|
||||
- Upgrade typing_extensions to 4.7.1
|
||||
- Upgrade urllib3 to 1.26.16
|
||||
|
||||
|
||||
23.1.2 (2023-04-26)
|
||||
===================
|
||||
|
||||
|
@ -53,7 +116,7 @@ Deprecations and Removals
|
|||
``--config-settings``. (`#11859 <https://github.com/pypa/pip/issues/11859>`_)
|
||||
- Using ``--config-settings`` with projects that don't have a ``pyproject.toml`` now prints
|
||||
a deprecation warning. In the future the presence of config settings will automatically
|
||||
enable the default build backend for legacy projects and pass the setttings to it. (`#11915 <https://github.com/pypa/pip/issues/11915>`_)
|
||||
enable the default build backend for legacy projects and pass the settings to it. (`#11915 <https://github.com/pypa/pip/issues/11915>`_)
|
||||
- Remove ``setup.py install`` fallback when building a wheel failed for projects without
|
||||
``pyproject.toml``. (`#8368 <https://github.com/pypa/pip/issues/8368>`_)
|
||||
- When the ``wheel`` package is not installed, pip now uses the default build backend
|
||||
|
|
14
README.rst
14
README.rst
|
@ -3,9 +3,15 @@ pip - The Python Package Installer
|
|||
|
||||
.. image:: https://img.shields.io/pypi/v/pip.svg
|
||||
:target: https://pypi.org/project/pip/
|
||||
:alt: PyPI
|
||||
|
||||
.. image:: https://img.shields.io/pypi/pyversions/pip
|
||||
:target: https://pypi.org/project/pip
|
||||
:alt: PyPI - Python Version
|
||||
|
||||
.. image:: https://readthedocs.org/projects/pip/badge/?version=latest
|
||||
:target: https://pip.pypa.io/en/latest
|
||||
:alt: Documentation
|
||||
|
||||
pip is the `package installer`_ for Python. You can use pip to install packages from the `Python Package Index`_ and other indexes.
|
||||
|
||||
|
@ -19,10 +25,6 @@ We release updates regularly, with a new version every 3 months. Find more detai
|
|||
* `Release notes`_
|
||||
* `Release process`_
|
||||
|
||||
In pip 20.3, we've `made a big improvement to the heart of pip`_; `learn more`_. We want your input, so `sign up for our user experience research studies`_ to help us do it right.
|
||||
|
||||
**Note**: pip 21.0, in January 2021, removed Python 2 support, per pip's `Python 2 support policy`_. Please migrate to Python 3.
|
||||
|
||||
If you find bugs, need help, or want to talk to the developers, please use our mailing lists or chat rooms:
|
||||
|
||||
* `Issue tracking`_
|
||||
|
@ -49,10 +51,6 @@ rooms, and mailing lists is expected to follow the `PSF Code of Conduct`_.
|
|||
.. _Release process: https://pip.pypa.io/en/latest/development/release-process/
|
||||
.. _GitHub page: https://github.com/pypa/pip
|
||||
.. _Development documentation: https://pip.pypa.io/en/latest/development
|
||||
.. _made a big improvement to the heart of pip: https://pyfound.blogspot.com/2020/11/pip-20-3-new-resolver.html
|
||||
.. _learn more: https://pip.pypa.io/en/latest/user_guide/#changes-to-the-pip-dependency-resolver-in-20-3-2020
|
||||
.. _sign up for our user experience research studies: https://pyfound.blogspot.com/2020/03/new-pip-resolver-to-roll-out-this-year.html
|
||||
.. _Python 2 support policy: https://pip.pypa.io/en/latest/development/release-process/#python-2-support
|
||||
.. _Issue tracking: https://github.com/pypa/pip/issues
|
||||
.. _Discourse channel: https://discuss.python.org/c/packaging
|
||||
.. _User IRC: https://kiwiirc.com/nextclient/#ircs://irc.libera.chat:+6697/pypa
|
||||
|
|
11
SECURITY.md
11
SECURITY.md
|
@ -1,3 +1,10 @@
|
|||
# Security and Vulnerability Reporting
|
||||
# Security Policy
|
||||
|
||||
If you find any security issues, please report to [security@python.org](mailto:security@python.org)
|
||||
## Reporting a Vulnerability
|
||||
|
||||
Please read the guidelines on reporting security issues [on the
|
||||
official website](https://www.python.org/dev/security/) for
|
||||
instructions on how to report a security-related problem to
|
||||
the Python Security Response Team responsibly.
|
||||
|
||||
To reach the response team, email `security at python dot org`.
|
||||
|
|
|
@ -21,6 +21,12 @@ Usage
|
|||
Description
|
||||
===========
|
||||
|
||||
.. attention::
|
||||
PyPI no longer supports ``pip search`` (or XML-RPC search). Please use https://pypi.org/search (via a browser)
|
||||
instead. See https://warehouse.pypa.io/api-reference/xml-rpc.html#deprecated-methods for more information.
|
||||
|
||||
However, XML-RPC search (and this command) may still be supported by indexes other than PyPI.
|
||||
|
||||
.. pip-command-description:: search
|
||||
|
||||
|
||||
|
|
|
@ -103,7 +103,7 @@ $ pip install --upgrade pip
|
|||
The current version of pip works on:
|
||||
|
||||
- Windows, Linux and MacOS.
|
||||
- CPython 3.7, 3.8, 3.9, 3.10 and latest PyPy3.
|
||||
- CPython 3.7, 3.8, 3.9, 3.10, 3.11, 3.12, and latest PyPy3.
|
||||
|
||||
pip is tested to work on the latest patch version of the Python interpreter,
|
||||
for each of the minor versions listed above. Previous patch versions are
|
||||
|
|
|
@ -56,6 +56,9 @@ package with the following properties:
|
|||
URL reference. `false` if the requirements was provided as a name and version
|
||||
specifier.
|
||||
|
||||
- `is_yanked`: `true` if the requirement was yanked from the index, but was still
|
||||
selected by pip conform to [PEP 592](https://peps.python.org/pep-0592/#installers).
|
||||
|
||||
- `download_info`: Information about the artifact (to be) downloaded for installation,
|
||||
using the [direct URL data
|
||||
structure](https://packaging.python.org/en/latest/specifications/direct-url-data-structure/).
|
||||
|
@ -106,6 +109,7 @@ will produce an output similar to this (metadata abriged for brevity):
|
|||
}
|
||||
},
|
||||
"is_direct": false,
|
||||
"is_yanked": false,
|
||||
"requested": true,
|
||||
"metadata": {
|
||||
"name": "pydantic",
|
||||
|
@ -133,6 +137,7 @@ will produce an output similar to this (metadata abriged for brevity):
|
|||
}
|
||||
},
|
||||
"is_direct": true,
|
||||
"is_yanked": false,
|
||||
"requested": true,
|
||||
"metadata": {
|
||||
"name": "packaging",
|
||||
|
|
|
@ -68,7 +68,7 @@ man pages][netrc-docs].
|
|||
pip supports loading credentials stored in your keyring using the
|
||||
{pypi}`keyring` library, which can be enabled py passing `--keyring-provider`
|
||||
with a value of `auto`, `disabled`, `import`, or `subprocess`. The default
|
||||
value `auto` respects `--no-input` and not query keyring at all if the option
|
||||
value `auto` respects `--no-input` and does not query keyring at all if the option
|
||||
is used; otherwise it tries the `import`, `subprocess`, and `disabled`
|
||||
providers (in this order) and uses the first one that works.
|
||||
|
||||
|
|
|
@ -27,6 +27,13 @@ While this cache attempts to minimize network activity, it does not prevent
|
|||
network access altogether. If you want a local install solution that
|
||||
circumvents accessing PyPI, see {ref}`Installing from local packages`.
|
||||
|
||||
```{versionchanged} 23.3
|
||||
A new cache format is now used, stored in a directory called `http-v2` (see
|
||||
below for this directory's location). Previously this cache was stored in a
|
||||
directory called `http` in the main cache directory. If you have completely
|
||||
switched to newer versions of `pip`, you may wish to delete the old directory.
|
||||
```
|
||||
|
||||
(wheel-caching)=
|
||||
|
||||
### Locally built wheels
|
||||
|
@ -124,11 +131,11 @@ The {ref}`pip cache` command can be used to manage pip's cache.
|
|||
|
||||
### Removing a single package
|
||||
|
||||
`pip cache remove setuptools` removes all wheel files related to setuptools from pip's cache.
|
||||
`pip cache remove setuptools` removes all wheel files related to setuptools from pip's cache. HTTP cache files are not removed at this time.
|
||||
|
||||
### Removing the cache
|
||||
|
||||
`pip cache purge` will clear all wheel files from pip's cache.
|
||||
`pip cache purge` will clear all files from pip's wheel and HTTP caches.
|
||||
|
||||
### Listing cached files
|
||||
|
||||
|
|
|
@ -28,19 +28,9 @@ It is possible to use the system trust store, instead of the bundled certifi
|
|||
certificates for verifying HTTPS certificates. This approach will typically
|
||||
support corporate proxy certificates without additional configuration.
|
||||
|
||||
In order to use system trust stores, you need to:
|
||||
|
||||
- Use Python 3.10 or newer.
|
||||
- Install the {pypi}`truststore` package, in the Python environment you're
|
||||
running pip in.
|
||||
|
||||
This is typically done by installing this package using a system package
|
||||
manager or by using pip in {ref}`Hash-checking mode` for this package and
|
||||
trusting the network using the `--trusted-host` flag.
|
||||
In order to use system trust stores, you need to use Python 3.10 or newer.
|
||||
|
||||
```{pip-cli}
|
||||
$ python -m pip install truststore
|
||||
[...]
|
||||
$ python -m pip install SomePackage --use-feature=truststore
|
||||
[...]
|
||||
Successfully installed SomePackage
|
||||
|
|
|
@ -8,7 +8,7 @@ and this article is intended to help readers understand what is happening
|
|||
```{note}
|
||||
This document is a work in progress. The details included are accurate (at the
|
||||
time of writing), but there is additional information, in particular around
|
||||
pip's interface with resolvelib, which have not yet been included.
|
||||
pip's interface with resolvelib, which has not yet been included.
|
||||
|
||||
Contributions to improve this document are welcome.
|
||||
```
|
||||
|
@ -26,7 +26,7 @@ The practical implication of that is that there will always be some situations
|
|||
where pip cannot determine what to install in a reasonable length of time. We
|
||||
make every effort to ensure that such situations happen rarely, but eliminating
|
||||
them altogether isn't even theoretically possible. We'll discuss what options
|
||||
yopu have if you hit a problem situation like this a little later.
|
||||
you have if you hit a problem situation like this a little later.
|
||||
|
||||
## Python specific issues
|
||||
|
||||
|
@ -136,7 +136,7 @@ operations:
|
|||
that satisfy them. This is essentially where the finder interacts with the
|
||||
resolver.
|
||||
* `is_satisfied_by` - checks if a candidate satisfies a requirement. This is
|
||||
basically the implementation of what a requirement meams.
|
||||
basically the implementation of what a requirement means.
|
||||
* `get_dependencies` - get the dependency metadata for a candidate. This is
|
||||
the implementation of the process of getting and reading package metadata.
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
sphinx ~= 6.0
|
||||
sphinx ~= 7.0
|
||||
towncrier
|
||||
furo
|
||||
myst_parser
|
||||
|
|
1
news/11394.bugfix.rst
Normal file
1
news/11394.bugfix.rst
Normal file
|
@ -0,0 +1 @@
|
|||
Ignore errors in temporary directory cleanup (show a warning instead).
|
5
news/11649.bugfix.rst
Normal file
5
news/11649.bugfix.rst
Normal file
|
@ -0,0 +1,5 @@
|
|||
Normalize extras according to :pep:`685` from package metadata in the resolver
|
||||
for comparison. This ensures extras are correctly compared and merged as long
|
||||
as the package providing the extra(s) is built with values normalized according
|
||||
to the standard. Note, however, that this *does not* solve cases where the
|
||||
package itself contains unnormalized extra values in the metadata.
|
1
news/12005.bugfix.rst
Normal file
1
news/12005.bugfix.rst
Normal file
|
@ -0,0 +1 @@
|
|||
Removed uses of ``datetime.datetime.utcnow`` from non-vendored code.
|
1
news/12059.doc.rst
Normal file
1
news/12059.doc.rst
Normal file
|
@ -0,0 +1 @@
|
|||
Document that ``pip search`` support has been removed from PyPI
|
1
news/12122.doc.rst
Normal file
1
news/12122.doc.rst
Normal file
|
@ -0,0 +1 @@
|
|||
Clarify --prefer-binary in CLI and docs
|
6
news/12155.process.rst
Normal file
6
news/12155.process.rst
Normal file
|
@ -0,0 +1,6 @@
|
|||
The metadata-fetching log message is moved to the VERBOSE level and now hidden
|
||||
by default. The more significant information in this message to most users are
|
||||
already available in surrounding logs (the package name and version of the
|
||||
metadata being fetched), while the URL to the exact metadata file is generally
|
||||
too long and clutters the output. The message can be brought back with
|
||||
``--verbose``.
|
1
news/12175.removal.rst
Normal file
1
news/12175.removal.rst
Normal file
|
@ -0,0 +1 @@
|
|||
Drop a fallback to using SecureTransport on macOS. It was useful when pip detected OpenSSL older than 1.0.1, but the current pip does not support any Python version supporting such old OpenSSL versions.
|
1
news/12183.trivial.rst
Normal file
1
news/12183.trivial.rst
Normal file
|
@ -0,0 +1 @@
|
|||
Add test cases for some behaviors of ``install --dry-run`` and ``--use-feature=fast-deps``.
|
1
news/12187.bugfix.rst
Normal file
1
news/12187.bugfix.rst
Normal file
|
@ -0,0 +1 @@
|
|||
Fix improper handling of the new onexc argument of ``shutil.rmtree()`` in Python 3.12.
|
1
news/12191.bugfix.rst
Normal file
1
news/12191.bugfix.rst
Normal file
|
@ -0,0 +1 @@
|
|||
Prevent downloading sdists twice when PEP 658 metadata is present.
|
1
news/12194.trivial.rst
Normal file
1
news/12194.trivial.rst
Normal file
|
@ -0,0 +1 @@
|
|||
Add lots of comments to the ``BuildTracker``.
|
1
news/12204.feature.rst
Normal file
1
news/12204.feature.rst
Normal file
|
@ -0,0 +1 @@
|
|||
Improve use of datastructures to make candidate selection 1.6x faster
|
1
news/12215.feature.rst
Normal file
1
news/12215.feature.rst
Normal file
|
@ -0,0 +1 @@
|
|||
Allow ``pip install --dry-run`` to use platform and ABI overriding options similar to ``--target``.
|
1
news/12224.feature.rst
Normal file
1
news/12224.feature.rst
Normal file
|
@ -0,0 +1 @@
|
|||
Add ``is_yanked`` boolean entry to the installation report (``--report``) to indicate whether the requirement was yanked from the index, but was still selected by pip conform to PEP 592.
|
1
news/12225.bugfix.rst
Normal file
1
news/12225.bugfix.rst
Normal file
|
@ -0,0 +1 @@
|
|||
Filter out yanked links from the available versions error message: "(from versions: 1.0, 2.0, 3.0)" will not contain yanked versions conform PEP 592. The yanked versions (if any) will be mentioned in a separate error message.
|
0
news/12252.trivial.rst
Normal file
0
news/12252.trivial.rst
Normal file
1
news/12254.process.rst
Normal file
1
news/12254.process.rst
Normal file
|
@ -0,0 +1 @@
|
|||
Added reference to `vulnerability reporting guidelines <https://www.python.org/dev/security/>`_ to pip's security policy.
|
0
news/12261.trivial.rst
Normal file
0
news/12261.trivial.rst
Normal file
1
news/12280.bugfix.rst
Normal file
1
news/12280.bugfix.rst
Normal file
|
@ -0,0 +1 @@
|
|||
Fix crash when the git version number contains something else than digits and dots.
|
1
news/12306.bugfix.rst
Normal file
1
news/12306.bugfix.rst
Normal file
|
@ -0,0 +1 @@
|
|||
Use ``-r=...`` instead of ``-r ...`` to specify references with Mercurial.
|
0
news/12AE57EC-683C-4A8E-BCCB-851FCD0730B4.trivial.rst
Normal file
0
news/12AE57EC-683C-4A8E-BCCB-851FCD0730B4.trivial.rst
Normal file
0
news/1F54AB69-21F3-49F6-B938-AB16E326F82C.trivial.rst
Normal file
0
news/1F54AB69-21F3-49F6-B938-AB16E326F82C.trivial.rst
Normal file
1
news/2984.bugfix.rst
Normal file
1
news/2984.bugfix.rst
Normal file
|
@ -0,0 +1 @@
|
|||
pip uses less memory when caching large packages. As a result, there is a new on-disk cache format stored in a new directory ($PIP_CACHE_DIR/http-v2).
|
1
news/4A0C40FF-ABE1-48C7-954C-7C3EB229135F.trivial.rst
Normal file
1
news/4A0C40FF-ABE1-48C7-954C-7C3EB229135F.trivial.rst
Normal file
|
@ -0,0 +1 @@
|
|||
Add ruff rules ASYNC,C4,C90,PERF,PLE,PLR for minor optimizations and to set upper limits on code complexity.
|
0
news/732404DE-8011-4146-8CAD-85D7756D88A6.trivial.rst
Normal file
0
news/732404DE-8011-4146-8CAD-85D7756D88A6.trivial.rst
Normal file
0
news/80291DF4-7B0F-4268-B682-E1FCA1C3ACED.trivial.rst
Normal file
0
news/80291DF4-7B0F-4268-B682-E1FCA1C3ACED.trivial.rst
Normal file
0
news/85F7E260-68FF-4C1E-A2CB-CF8634829D2D.trivial.rst
Normal file
0
news/85F7E260-68FF-4C1E-A2CB-CF8634829D2D.trivial.rst
Normal file
0
news/E2B261CA-A0CF-4309-B808-1210C0B54632.trivial.rst
Normal file
0
news/E2B261CA-A0CF-4309-B808-1210C0B54632.trivial.rst
Normal file
1
news/certifi.vendor.rst
Normal file
1
news/certifi.vendor.rst
Normal file
|
@ -0,0 +1 @@
|
|||
Upgrade certifi to 2023.7.22
|
|
@ -1,2 +0,0 @@
|
|||
Added seperate instructions for installing ``nox`` in the ``docs/development/getting-started.rst`` doc. and slight update
|
||||
to the below ``Running pip From Source Tree`` section.
|
1
news/truststore.vendor.rst
Normal file
1
news/truststore.vendor.rst
Normal file
|
@ -0,0 +1 @@
|
|||
Add truststore 0.8.0
|
0
news/zhsdgdlsjgksdfj.trivial.rst
Normal file
0
news/zhsdgdlsjgksdfj.trivial.rst
Normal file
12
noxfile.py
12
noxfile.py
|
@ -67,7 +67,7 @@ def should_update_common_wheels() -> bool:
|
|||
# -----------------------------------------------------------------------------
|
||||
# Development Commands
|
||||
# -----------------------------------------------------------------------------
|
||||
@nox.session(python=["3.7", "3.8", "3.9", "3.10", "3.11", "pypy3"])
|
||||
@nox.session(python=["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "pypy3"])
|
||||
def test(session: nox.Session) -> None:
|
||||
# Get the common wheels.
|
||||
if should_update_common_wheels():
|
||||
|
@ -89,6 +89,7 @@ def test(session: nox.Session) -> None:
|
|||
shutil.rmtree(sdist_dir, ignore_errors=True)
|
||||
|
||||
# fmt: off
|
||||
session.install("setuptools")
|
||||
session.run(
|
||||
"python", "setup.py", "sdist", "--formats=zip", "--dist-dir", sdist_dir,
|
||||
silent=True,
|
||||
|
@ -183,6 +184,12 @@ def lint(session: nox.Session) -> None:
|
|||
# git reset --hard origin/main
|
||||
@nox.session
|
||||
def vendoring(session: nox.Session) -> None:
|
||||
# Ensure that the session Python is running 3.10+
|
||||
# so that truststore can be installed correctly.
|
||||
session.run(
|
||||
"python", "-c", "import sys; sys.exit(1 if sys.version_info < (3, 10) else 0)"
|
||||
)
|
||||
|
||||
session.install("vendoring~=1.2.0")
|
||||
|
||||
parser = argparse.ArgumentParser(prog="nox -s vendoring")
|
||||
|
@ -219,7 +226,7 @@ def vendoring(session: nox.Session) -> None:
|
|||
new_version = old_version
|
||||
for inner_name, inner_version in pinned_requirements(vendor_txt):
|
||||
if inner_name == name:
|
||||
# this is a dedicated assignment, to make flake8 happy
|
||||
# this is a dedicated assignment, to make lint happy
|
||||
new_version = inner_version
|
||||
break
|
||||
else:
|
||||
|
@ -351,6 +358,7 @@ def build_dists(session: nox.Session) -> List[str]:
|
|||
)
|
||||
|
||||
session.log("# Build distributions")
|
||||
session.install("setuptools", "wheel")
|
||||
session.run("python", "setup.py", "sdist", "bdist_wheel", silent=True)
|
||||
produced_dists = glob.glob("dist/*")
|
||||
|
||||
|
|
|
@ -71,3 +71,56 @@ setuptools = "pkg_resources"
|
|||
CacheControl = "https://raw.githubusercontent.com/ionrock/cachecontrol/v0.12.6/LICENSE.txt"
|
||||
distlib = "https://bitbucket.org/pypa/distlib/raw/master/LICENSE.txt"
|
||||
webencodings = "https://github.com/SimonSapin/python-webencodings/raw/master/LICENSE"
|
||||
|
||||
[tool.ruff]
|
||||
extend-exclude = [
|
||||
"_vendor",
|
||||
"./build",
|
||||
".scratch",
|
||||
"data",
|
||||
]
|
||||
ignore = [
|
||||
"B019",
|
||||
"B020",
|
||||
"B904", # Ruff enables opinionated warnings by default
|
||||
"B905", # Ruff enables opinionated warnings by default
|
||||
"G202",
|
||||
]
|
||||
line-length = 88
|
||||
select = [
|
||||
"ASYNC",
|
||||
"B",
|
||||
"C4",
|
||||
"C90",
|
||||
"E",
|
||||
"F",
|
||||
"G",
|
||||
"I",
|
||||
"ISC",
|
||||
"PERF",
|
||||
"PLE",
|
||||
"PLR0",
|
||||
"W",
|
||||
"RUF100",
|
||||
]
|
||||
|
||||
[tool.ruff.isort]
|
||||
# We need to explicitly make pip "first party" as it's imported by code in
|
||||
# the docs and tests directories.
|
||||
known-first-party = ["pip"]
|
||||
known-third-party = ["pip._vendor"]
|
||||
|
||||
[tool.ruff.mccabe]
|
||||
max-complexity = 33 # default is 10
|
||||
|
||||
[tool.ruff.per-file-ignores]
|
||||
"noxfile.py" = ["G"]
|
||||
"src/pip/_internal/*" = ["PERF203"]
|
||||
"tests/*" = ["B011"]
|
||||
"tests/unit/test_finder.py" = ["C414"]
|
||||
|
||||
[tool.ruff.pylint]
|
||||
max-args = 15 # default is 5
|
||||
max-branches = 28 # default is 12
|
||||
max-returns = 13 # default is 6
|
||||
max-statements = 134 # default is 50
|
||||
|
|
51
setup.cfg
51
setup.cfg
|
@ -1,46 +1,13 @@
|
|||
[isort]
|
||||
profile = black
|
||||
skip =
|
||||
./build,
|
||||
.nox,
|
||||
.tox,
|
||||
.scratch,
|
||||
_vendor,
|
||||
data
|
||||
known_third_party =
|
||||
pip._vendor
|
||||
|
||||
[flake8]
|
||||
max-line-length = 88
|
||||
exclude =
|
||||
./build,
|
||||
.nox,
|
||||
.tox,
|
||||
.scratch,
|
||||
_vendor,
|
||||
data
|
||||
enable-extensions = G
|
||||
extend-ignore =
|
||||
G200, G202,
|
||||
# black adds spaces around ':'
|
||||
E203,
|
||||
# using a cache
|
||||
B019,
|
||||
# reassigning variables in a loop
|
||||
B020,
|
||||
per-file-ignores =
|
||||
# G: The plugin logging-format treats every .log and .error as logging.
|
||||
noxfile.py: G
|
||||
# B011: Do not call assert False since python -O removes these calls
|
||||
tests/*: B011
|
||||
|
||||
[mypy]
|
||||
mypy_path = $MYPY_CONFIG_FILE_DIR/src
|
||||
|
||||
strict = True
|
||||
|
||||
no_implicit_reexport = False
|
||||
allow_subclassing_any = True
|
||||
allow_untyped_calls = True
|
||||
warn_return_any = False
|
||||
ignore_missing_imports = True
|
||||
disallow_untyped_defs = True
|
||||
disallow_any_generics = True
|
||||
warn_unused_ignores = True
|
||||
no_implicit_optional = True
|
||||
|
||||
[mypy-pip._internal.utils._jaraco_text]
|
||||
ignore_errors = True
|
||||
|
@ -51,12 +18,8 @@ ignore_errors = True
|
|||
# These vendored libraries use runtime magic to populate things and don't sit
|
||||
# well with static typing out of the box. Eventually we should provide correct
|
||||
# typing information for their public interface and remove these configs.
|
||||
[mypy-pip._vendor.colorama]
|
||||
follow_imports = skip
|
||||
[mypy-pip._vendor.pkg_resources]
|
||||
follow_imports = skip
|
||||
[mypy-pip._vendor.progress.*]
|
||||
follow_imports = skip
|
||||
[mypy-pip._vendor.requests.*]
|
||||
follow_imports = skip
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
from typing import List, Optional
|
||||
|
||||
__version__ = "23.2.dev0"
|
||||
__version__ = "23.3.dev0"
|
||||
|
||||
|
||||
def main(args: Optional[List[str]] = None) -> int:
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
import os
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
# Remove '' and current working directory from the first entry
|
||||
# of sys.path, if present to avoid using current directory
|
||||
|
@ -20,12 +19,6 @@ if __package__ == "":
|
|||
sys.path.insert(0, path)
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Work around the error reported in #9540, pending a proper fix.
|
||||
# Note: It is essential the warning filter is set *before* importing
|
||||
# pip, as the deprecation happens at import time, not runtime.
|
||||
warnings.filterwarnings(
|
||||
"ignore", category=DeprecationWarning, module=".*packaging\\.version"
|
||||
)
|
||||
from pip._internal.cli.main import main as _main
|
||||
|
||||
sys.exit(_main())
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
from typing import List, Optional
|
||||
|
||||
import pip._internal.utils.inject_securetransport # noqa
|
||||
from pip._internal.utils import _log
|
||||
|
||||
# init_logging() must be called before any call to logging.getLogger()
|
||||
|
|
|
@ -78,12 +78,10 @@ class Cache:
|
|||
if can_not_cache:
|
||||
return []
|
||||
|
||||
candidates = []
|
||||
path = self.get_path_for_link(link)
|
||||
if os.path.isdir(path):
|
||||
for candidate in os.listdir(path):
|
||||
candidates.append((candidate, path))
|
||||
return candidates
|
||||
return [(candidate, path) for candidate in os.listdir(path)]
|
||||
return []
|
||||
|
||||
def get_path_for_link(self, link: Link) -> str:
|
||||
"""Return a directory to store cached items in for link."""
|
||||
|
@ -194,7 +192,17 @@ class CacheEntry:
|
|||
self.origin: Optional[DirectUrl] = None
|
||||
origin_direct_url_path = Path(self.link.file_path).parent / ORIGIN_JSON_NAME
|
||||
if origin_direct_url_path.exists():
|
||||
self.origin = DirectUrl.from_json(origin_direct_url_path.read_text())
|
||||
try:
|
||||
self.origin = DirectUrl.from_json(
|
||||
origin_direct_url_path.read_text(encoding="utf-8")
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
"Ignoring invalid cache entry origin file %s for %s (%s)",
|
||||
origin_direct_url_path,
|
||||
link.filename,
|
||||
e,
|
||||
)
|
||||
|
||||
|
||||
class WheelCache(Cache):
|
||||
|
@ -257,14 +265,24 @@ class WheelCache(Cache):
|
|||
@staticmethod
|
||||
def record_download_origin(cache_dir: str, download_info: DirectUrl) -> None:
|
||||
origin_path = Path(cache_dir) / ORIGIN_JSON_NAME
|
||||
if origin_path.is_file():
|
||||
origin = DirectUrl.from_json(origin_path.read_text())
|
||||
# TODO: use DirectUrl.equivalent when https://github.com/pypa/pip/pull/10564
|
||||
# is merged.
|
||||
if origin_path.exists():
|
||||
try:
|
||||
origin = DirectUrl.from_json(origin_path.read_text(encoding="utf-8"))
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
"Could not read origin file %s in cache entry (%s). "
|
||||
"Will attempt to overwrite it.",
|
||||
origin_path,
|
||||
e,
|
||||
)
|
||||
else:
|
||||
# TODO: use DirectUrl.equivalent when
|
||||
# https://github.com/pypa/pip/pull/10564 is merged.
|
||||
if origin.url != download_info.url:
|
||||
logger.warning(
|
||||
"Origin URL %s in cache entry %s does not match download URL %s. "
|
||||
"This is likely a pip bug or a cache corruption issue.",
|
||||
"Origin URL %s in cache entry %s does not match download URL "
|
||||
"%s. This is likely a pip bug or a cache corruption issue. "
|
||||
"Will overwrite it with the new value.",
|
||||
origin.url,
|
||||
cache_dir,
|
||||
download_info.url,
|
||||
|
|
|
@ -71,8 +71,9 @@ def autocomplete() -> None:
|
|||
|
||||
for opt in subcommand.parser.option_list_all:
|
||||
if opt.help != optparse.SUPPRESS_HELP:
|
||||
for opt_str in opt._long_opts + opt._short_opts:
|
||||
options.append((opt_str, opt.nargs))
|
||||
options += [
|
||||
(opt_str, opt.nargs) for opt_str in opt._long_opts + opt._short_opts
|
||||
]
|
||||
|
||||
# filter out previously specified options from available options
|
||||
prev_opts = [x.split("=")[0] for x in cwords[1 : cword - 1]]
|
||||
|
|
|
@ -131,6 +131,17 @@ class Command(CommandContextMixIn):
|
|||
", ".join(sorted(always_enabled_features)),
|
||||
)
|
||||
|
||||
# Make sure that the --python argument isn't specified after the
|
||||
# subcommand. We can tell, because if --python was specified,
|
||||
# we should only reach this point if we're running in the created
|
||||
# subprocess, which has the _PIP_RUNNING_IN_SUBPROCESS environment
|
||||
# variable set.
|
||||
if options.python and "_PIP_RUNNING_IN_SUBPROCESS" not in os.environ:
|
||||
logger.critical(
|
||||
"The --python option must be placed before the pip subcommand name"
|
||||
)
|
||||
sys.exit(ERROR)
|
||||
|
||||
# TODO: Try to get these passing down from the command?
|
||||
# without resorting to os.environ to hold these.
|
||||
# This also affects isolated builds and it should.
|
||||
|
@ -170,7 +181,7 @@ class Command(CommandContextMixIn):
|
|||
assert isinstance(status, int)
|
||||
return status
|
||||
except DiagnosticPipError as exc:
|
||||
logger.error("[present-rich] %s", exc)
|
||||
logger.error("%s", exc, extra={"rich": True})
|
||||
logger.debug("Exception information:", exc_info=True)
|
||||
|
||||
return ERROR
|
||||
|
|
|
@ -92,10 +92,10 @@ def check_dist_restriction(options: Values, check_target: bool = False) -> None:
|
|||
)
|
||||
|
||||
if check_target:
|
||||
if dist_restriction_set and not options.target_dir:
|
||||
if not options.dry_run and dist_restriction_set and not options.target_dir:
|
||||
raise CommandError(
|
||||
"Can not use any platform or abi specific options unless "
|
||||
"installing via '--target'"
|
||||
"installing via '--target' or using '--dry-run'"
|
||||
)
|
||||
|
||||
|
||||
|
@ -670,7 +670,10 @@ def prefer_binary() -> Option:
|
|||
dest="prefer_binary",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Prefer older binary packages over newer source packages.",
|
||||
help=(
|
||||
"Prefer binary packages over source packages, even if the "
|
||||
"source packages are newer."
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
|
@ -823,7 +826,7 @@ def _handle_config_settings(
|
|||
) -> None:
|
||||
key, sep, val = value.partition("=")
|
||||
if sep != "=":
|
||||
parser.error(f"Arguments to {opt_str} must be of the form KEY=VAL") # noqa
|
||||
parser.error(f"Arguments to {opt_str} must be of the form KEY=VAL")
|
||||
dest = getattr(parser.values, option.dest)
|
||||
if dest is None:
|
||||
dest = {}
|
||||
|
@ -918,13 +921,13 @@ def _handle_merge_hash(
|
|||
algo, digest = value.split(":", 1)
|
||||
except ValueError:
|
||||
parser.error(
|
||||
"Arguments to {} must be a hash name " # noqa
|
||||
"Arguments to {} must be a hash name "
|
||||
"followed by a value, like --hash=sha256:"
|
||||
"abcde...".format(opt_str)
|
||||
)
|
||||
if algo not in STRONG_HASHES:
|
||||
parser.error(
|
||||
"Allowed hash algorithms for {} are {}.".format( # noqa
|
||||
"Allowed hash algorithms for {} are {}.".format(
|
||||
opt_str, ", ".join(STRONG_HASHES)
|
||||
)
|
||||
)
|
||||
|
|
|
@ -229,7 +229,7 @@ class ConfigOptionParser(CustomOptionParser):
|
|||
val = strtobool(val)
|
||||
except ValueError:
|
||||
self.error(
|
||||
"{} is not a valid value for {} option, " # noqa
|
||||
"{} is not a valid value for {} option, "
|
||||
"please specify a boolean value like yes/no, "
|
||||
"true/false or 1/0 instead.".format(val, key)
|
||||
)
|
||||
|
@ -240,7 +240,7 @@ class ConfigOptionParser(CustomOptionParser):
|
|||
val = int(val)
|
||||
if not isinstance(val, int) or val < 0:
|
||||
self.error(
|
||||
"{} is not a valid value for {} option, " # noqa
|
||||
"{} is not a valid value for {} option, "
|
||||
"please instead specify either a non-negative integer "
|
||||
"or a boolean value like yes/no or false/true "
|
||||
"which is equivalent to 1/0.".format(val, key)
|
||||
|
|
|
@ -58,12 +58,9 @@ def _create_truststore_ssl_context() -> Optional["SSLContext"]:
|
|||
return None
|
||||
|
||||
try:
|
||||
import truststore
|
||||
except ImportError:
|
||||
raise CommandError(
|
||||
"To use the truststore feature, 'truststore' must be installed into "
|
||||
"pip's current environment."
|
||||
)
|
||||
from pip._vendor import truststore
|
||||
except ImportError as e:
|
||||
raise CommandError(f"The truststore feature is unavailable: {e}")
|
||||
|
||||
return truststore.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
|
||||
|
||||
|
@ -123,7 +120,7 @@ class SessionCommandMixin(CommandContextMixIn):
|
|||
ssl_context = None
|
||||
|
||||
session = PipSession(
|
||||
cache=os.path.join(cache_dir, "http") if cache_dir else None,
|
||||
cache=os.path.join(cache_dir, "http-v2") if cache_dir else None,
|
||||
retries=retries if retries is not None else options.retries,
|
||||
trusted_hosts=options.trusted_hosts,
|
||||
index_urls=self._get_index_urls(options),
|
||||
|
@ -268,7 +265,7 @@ class RequirementCommand(IndexGroupCommand):
|
|||
if "legacy-resolver" in options.deprecated_features_enabled:
|
||||
return "legacy"
|
||||
|
||||
return "2020-resolver"
|
||||
return "resolvelib"
|
||||
|
||||
@classmethod
|
||||
def make_requirement_preparer(
|
||||
|
@ -287,9 +284,10 @@ class RequirementCommand(IndexGroupCommand):
|
|||
"""
|
||||
temp_build_dir_path = temp_build_dir.path
|
||||
assert temp_build_dir_path is not None
|
||||
legacy_resolver = False
|
||||
|
||||
resolver_variant = cls.determine_resolver_variant(options)
|
||||
if resolver_variant == "2020-resolver":
|
||||
if resolver_variant == "resolvelib":
|
||||
lazy_wheel = "fast-deps" in options.features_enabled
|
||||
if lazy_wheel:
|
||||
logger.warning(
|
||||
|
@ -300,6 +298,7 @@ class RequirementCommand(IndexGroupCommand):
|
|||
"production."
|
||||
)
|
||||
else:
|
||||
legacy_resolver = True
|
||||
lazy_wheel = False
|
||||
if "fast-deps" in options.features_enabled:
|
||||
logger.warning(
|
||||
|
@ -320,6 +319,7 @@ class RequirementCommand(IndexGroupCommand):
|
|||
use_user_site=use_user_site,
|
||||
lazy_wheel=lazy_wheel,
|
||||
verbosity=verbosity,
|
||||
legacy_resolver=legacy_resolver,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
|
@ -349,7 +349,7 @@ class RequirementCommand(IndexGroupCommand):
|
|||
# The long import name and duplicated invocation is needed to convince
|
||||
# Mypy into correctly typechecking. Otherwise it would complain the
|
||||
# "Resolver" class being redefined.
|
||||
if resolver_variant == "2020-resolver":
|
||||
if resolver_variant == "resolvelib":
|
||||
import pip._internal.resolution.resolvelib.resolver
|
||||
|
||||
return pip._internal.resolution.resolvelib.resolver.Resolver(
|
||||
|
|
|
@ -3,10 +3,10 @@ import textwrap
|
|||
from optparse import Values
|
||||
from typing import Any, List
|
||||
|
||||
import pip._internal.utils.filesystem as filesystem
|
||||
from pip._internal.cli.base_command import Command
|
||||
from pip._internal.cli.status_codes import ERROR, SUCCESS
|
||||
from pip._internal.exceptions import CommandError, PipError
|
||||
from pip._internal.utils import filesystem
|
||||
from pip._internal.utils.logging import getLogger
|
||||
|
||||
logger = getLogger(__name__)
|
||||
|
@ -93,24 +93,30 @@ class CacheCommand(Command):
|
|||
num_http_files = len(self._find_http_files(options))
|
||||
num_packages = len(self._find_wheels(options, "*"))
|
||||
|
||||
http_cache_location = self._cache_dir(options, "http")
|
||||
http_cache_location = self._cache_dir(options, "http-v2")
|
||||
old_http_cache_location = self._cache_dir(options, "http")
|
||||
wheels_cache_location = self._cache_dir(options, "wheels")
|
||||
http_cache_size = filesystem.format_directory_size(http_cache_location)
|
||||
http_cache_size = filesystem.format_size(
|
||||
filesystem.directory_size(http_cache_location)
|
||||
+ filesystem.directory_size(old_http_cache_location)
|
||||
)
|
||||
wheels_cache_size = filesystem.format_directory_size(wheels_cache_location)
|
||||
|
||||
message = (
|
||||
textwrap.dedent(
|
||||
"""
|
||||
Package index page cache location: {http_cache_location}
|
||||
Package index page cache location (pip v23.3+): {http_cache_location}
|
||||
Package index page cache location (older pips): {old_http_cache_location}
|
||||
Package index page cache size: {http_cache_size}
|
||||
Number of HTTP files: {num_http_files}
|
||||
Locally built wheels location: {wheels_cache_location}
|
||||
Locally built wheels size: {wheels_cache_size}
|
||||
Number of locally built wheels: {package_count}
|
||||
"""
|
||||
""" # noqa: E501
|
||||
)
|
||||
.format(
|
||||
http_cache_location=http_cache_location,
|
||||
old_http_cache_location=old_http_cache_location,
|
||||
http_cache_size=http_cache_size,
|
||||
num_http_files=num_http_files,
|
||||
wheels_cache_location=wheels_cache_location,
|
||||
|
@ -151,14 +157,8 @@ class CacheCommand(Command):
|
|||
logger.info("\n".join(sorted(results)))
|
||||
|
||||
def format_for_abspath(self, files: List[str]) -> None:
|
||||
if not files:
|
||||
return
|
||||
|
||||
results = []
|
||||
for filename in files:
|
||||
results.append(filename)
|
||||
|
||||
logger.info("\n".join(sorted(results)))
|
||||
if files:
|
||||
logger.info("\n".join(sorted(files)))
|
||||
|
||||
def remove_cache_items(self, options: Values, args: List[Any]) -> None:
|
||||
if len(args) > 1:
|
||||
|
@ -195,8 +195,11 @@ class CacheCommand(Command):
|
|||
return os.path.join(options.cache_dir, subdir)
|
||||
|
||||
def _find_http_files(self, options: Values) -> List[str]:
|
||||
http_dir = self._cache_dir(options, "http")
|
||||
return filesystem.find_files(http_dir, "*")
|
||||
old_http_dir = self._cache_dir(options, "http")
|
||||
new_http_dir = self._cache_dir(options, "http-v2")
|
||||
return filesystem.find_files(old_http_dir, "*") + filesystem.find_files(
|
||||
new_http_dir, "*"
|
||||
)
|
||||
|
||||
def _find_wheels(self, options: Values, pattern: str) -> List[str]:
|
||||
wheel_dir = self._cache_dir(options, "wheels")
|
||||
|
|
|
@ -7,6 +7,7 @@ from pip._internal.cli.status_codes import ERROR, SUCCESS
|
|||
from pip._internal.operations.check import (
|
||||
check_package_set,
|
||||
create_package_set_from_installed,
|
||||
warn_legacy_versions_and_specifiers,
|
||||
)
|
||||
from pip._internal.utils.misc import write_output
|
||||
|
||||
|
@ -21,6 +22,7 @@ class CheckCommand(Command):
|
|||
|
||||
def run(self, options: Values, args: List[str]) -> int:
|
||||
package_set, parsing_probs = create_package_set_from_installed()
|
||||
warn_legacy_versions_and_specifiers(package_set)
|
||||
missing, conflicting = check_package_set(package_set)
|
||||
|
||||
for project_name in missing:
|
||||
|
|
|
@ -22,15 +22,10 @@ COMPLETION_SCRIPTS = {
|
|||
complete -o default -F _pip_completion {prog}
|
||||
""",
|
||||
"zsh": """
|
||||
function _pip_completion {{
|
||||
local words cword
|
||||
read -Ac words
|
||||
read -cn cword
|
||||
reply=( $( COMP_WORDS="$words[*]" \\
|
||||
COMP_CWORD=$(( cword-1 )) \\
|
||||
PIP_AUTO_COMPLETE=1 $words[1] 2>/dev/null ))
|
||||
}}
|
||||
compctl -K _pip_completion {prog}
|
||||
#compdef -P pip[0-9.]#
|
||||
compadd $( COMP_WORDS="$words[*]" \\
|
||||
COMP_CWORD=$((CURRENT-1)) \\
|
||||
PIP_AUTO_COMPLETE=1 $words[1] 2>/dev/null )
|
||||
""",
|
||||
"fish": """
|
||||
function __fish_complete_pip
|
||||
|
|
|
@ -46,22 +46,29 @@ def create_vendor_txt_map() -> Dict[str, str]:
|
|||
return dict(line.split("==", 1) for line in lines)
|
||||
|
||||
|
||||
def get_module_from_module_name(module_name: str) -> ModuleType:
|
||||
def get_module_from_module_name(module_name: str) -> Optional[ModuleType]:
|
||||
# Module name can be uppercase in vendor.txt for some reason...
|
||||
module_name = module_name.lower().replace("-", "_")
|
||||
# PATCH: setuptools is actually only pkg_resources.
|
||||
if module_name == "setuptools":
|
||||
module_name = "pkg_resources"
|
||||
|
||||
try:
|
||||
__import__(f"pip._vendor.{module_name}", globals(), locals(), level=0)
|
||||
return getattr(pip._vendor, module_name)
|
||||
except ImportError:
|
||||
# We allow 'truststore' to fail to import due
|
||||
# to being unavailable on Python 3.9 and earlier.
|
||||
if module_name == "truststore" and sys.version_info < (3, 10):
|
||||
return None
|
||||
raise
|
||||
|
||||
|
||||
def get_vendor_version_from_module(module_name: str) -> Optional[str]:
|
||||
module = get_module_from_module_name(module_name)
|
||||
version = getattr(module, "__version__", None)
|
||||
|
||||
if not version:
|
||||
if module and not version:
|
||||
# Try to find version in debundled module info.
|
||||
assert module.__file__ is not None
|
||||
env = get_environment([os.path.dirname(module.__file__)])
|
||||
|
@ -105,7 +112,7 @@ def show_tags(options: Values) -> None:
|
|||
tag_limit = 10
|
||||
|
||||
target_python = make_target_python(options)
|
||||
tags = target_python.get_tags()
|
||||
tags = target_python.get_sorted_tags()
|
||||
|
||||
# Display the target options that were explicitly provided.
|
||||
formatted_target = target_python.format_given()
|
||||
|
@ -134,10 +141,7 @@ def show_tags(options: Values) -> None:
|
|||
|
||||
|
||||
def ca_bundle_info(config: Configuration) -> str:
|
||||
levels = set()
|
||||
for key, _ in config.items():
|
||||
levels.add(key.split(".")[0])
|
||||
|
||||
levels = {key.split(".", 1)[0] for key, _ in config.items()}
|
||||
if not levels:
|
||||
return "Not specified"
|
||||
|
||||
|
|
|
@ -137,6 +137,10 @@ class DownloadCommand(RequirementCommand):
|
|||
assert req.name is not None
|
||||
preparer.save_linked_requirement(req)
|
||||
downloaded.append(req.name)
|
||||
|
||||
preparer.prepare_linked_requirements_more(requirement_set.requirements.values())
|
||||
requirement_set.warn_legacy_versions_and_specifiers()
|
||||
|
||||
if downloaded:
|
||||
write_output("Successfully downloaded %s", " ".join(downloaded))
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import sys
|
||||
from optparse import Values
|
||||
from typing import List
|
||||
from typing import AbstractSet, List
|
||||
|
||||
from pip._internal.cli import cmdoptions
|
||||
from pip._internal.cli.base_command import Command
|
||||
|
@ -8,7 +8,18 @@ from pip._internal.cli.status_codes import SUCCESS
|
|||
from pip._internal.operations.freeze import freeze
|
||||
from pip._internal.utils.compat import stdlib_pkgs
|
||||
|
||||
DEV_PKGS = {"pip", "setuptools", "distribute", "wheel"}
|
||||
|
||||
def _should_suppress_build_backends() -> bool:
|
||||
return sys.version_info < (3, 12)
|
||||
|
||||
|
||||
def _dev_pkgs() -> AbstractSet[str]:
|
||||
pkgs = {"pip"}
|
||||
|
||||
if _should_suppress_build_backends():
|
||||
pkgs |= {"setuptools", "distribute", "wheel"}
|
||||
|
||||
return pkgs
|
||||
|
||||
|
||||
class FreezeCommand(Command):
|
||||
|
@ -61,7 +72,7 @@ class FreezeCommand(Command):
|
|||
action="store_true",
|
||||
help=(
|
||||
"Do not skip these packages in the output:"
|
||||
" {}".format(", ".join(DEV_PKGS))
|
||||
" {}".format(", ".join(_dev_pkgs()))
|
||||
),
|
||||
)
|
||||
self.cmd_opts.add_option(
|
||||
|
@ -77,7 +88,7 @@ class FreezeCommand(Command):
|
|||
def run(self, options: Values, args: List[str]) -> int:
|
||||
skip = set(stdlib_pkgs)
|
||||
if not options.freeze_all:
|
||||
skip.update(DEV_PKGS)
|
||||
skip.update(_dev_pkgs())
|
||||
|
||||
if options.excludes:
|
||||
skip.update(options.excludes)
|
||||
|
|
|
@ -387,6 +387,9 @@ class InstallCommand(RequirementCommand):
|
|||
json.dump(report.to_dict(), f, indent=2, ensure_ascii=False)
|
||||
|
||||
if options.dry_run:
|
||||
# In non dry-run mode, the legacy versions and specifiers check
|
||||
# will be done as part of conflict detection.
|
||||
requirement_set.warn_legacy_versions_and_specifiers()
|
||||
would_install_items = sorted(
|
||||
(r.metadata["name"], r.metadata["version"])
|
||||
for r in requirement_set.requirements_to_install
|
||||
|
@ -498,7 +501,7 @@ class InstallCommand(RequirementCommand):
|
|||
show_traceback,
|
||||
options.use_user_site,
|
||||
)
|
||||
logger.error(message, exc_info=show_traceback) # noqa
|
||||
logger.error(message, exc_info=show_traceback)
|
||||
|
||||
return ERROR
|
||||
|
||||
|
@ -592,7 +595,7 @@ class InstallCommand(RequirementCommand):
|
|||
"source of the following dependency conflicts."
|
||||
)
|
||||
else:
|
||||
assert resolver_variant == "2020-resolver"
|
||||
assert resolver_variant == "resolvelib"
|
||||
parts.append(
|
||||
"pip's dependency resolver does not currently take into account "
|
||||
"all the packages that are installed. This behaviour is the "
|
||||
|
@ -625,7 +628,7 @@ class InstallCommand(RequirementCommand):
|
|||
requirement=req,
|
||||
dep_name=dep_name,
|
||||
dep_version=dep_version,
|
||||
you=("you" if resolver_variant == "2020-resolver" else "you'll"),
|
||||
you=("you" if resolver_variant == "resolvelib" else "you'll"),
|
||||
)
|
||||
parts.append(message)
|
||||
|
||||
|
|
|
@ -103,7 +103,10 @@ class ListCommand(IndexGroupCommand):
|
|||
dest="list_format",
|
||||
default="columns",
|
||||
choices=("columns", "freeze", "json"),
|
||||
help="Select the output format among: columns (default), freeze, or json",
|
||||
help=(
|
||||
"Select the output format among: columns (default), freeze, or json. "
|
||||
"The 'freeze' format cannot be used with the --outdated option."
|
||||
),
|
||||
)
|
||||
|
||||
self.cmd_opts.add_option(
|
||||
|
@ -157,7 +160,7 @@ class ListCommand(IndexGroupCommand):
|
|||
|
||||
if options.outdated and options.list_format == "freeze":
|
||||
raise CommandError(
|
||||
"List format 'freeze' can not be used with the --outdated option."
|
||||
"List format 'freeze' cannot be used with the --outdated option."
|
||||
)
|
||||
|
||||
cmdoptions.check_list_path_option(options)
|
||||
|
@ -294,7 +297,7 @@ class ListCommand(IndexGroupCommand):
|
|||
|
||||
# Create and add a separator.
|
||||
if len(data) > 0:
|
||||
pkg_strings.insert(1, " ".join(map(lambda x: "-" * x, sizes)))
|
||||
pkg_strings.insert(1, " ".join("-" * x for x in sizes))
|
||||
|
||||
for val in pkg_strings:
|
||||
write_output(val)
|
||||
|
|
|
@ -153,6 +153,9 @@ class WheelCommand(RequirementCommand):
|
|||
elif should_build_for_wheel_command(req):
|
||||
reqs_to_build.append(req)
|
||||
|
||||
preparer.prepare_linked_requirements_more(requirement_set.requirements.values())
|
||||
requirement_set.warn_legacy_versions_and_specifiers()
|
||||
|
||||
# build wheels
|
||||
build_successes, build_failures = build(
|
||||
reqs_to_build,
|
||||
|
|
|
@ -210,8 +210,15 @@ class Configuration:
|
|||
# Ensure directory exists.
|
||||
ensure_dir(os.path.dirname(fname))
|
||||
|
||||
# Ensure directory's permission(need to be writeable)
|
||||
try:
|
||||
with open(fname, "w") as f:
|
||||
parser.write(f)
|
||||
except OSError as error:
|
||||
raise ConfigurationError(
|
||||
f"An error occurred while writing to the configuration file "
|
||||
f"{fname}: {error}"
|
||||
)
|
||||
|
||||
#
|
||||
# Private routines
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import abc
|
||||
from typing import Optional
|
||||
|
||||
from pip._internal.index.package_finder import PackageFinder
|
||||
from pip._internal.metadata.base import BaseDistribution
|
||||
|
@ -19,12 +20,23 @@ class AbstractDistribution(metaclass=abc.ABCMeta):
|
|||
|
||||
- we must be able to create a Distribution object exposing the
|
||||
above metadata.
|
||||
|
||||
- if we need to do work in the build tracker, we must be able to generate a unique
|
||||
string to identify the requirement in the build tracker.
|
||||
"""
|
||||
|
||||
def __init__(self, req: InstallRequirement) -> None:
|
||||
super().__init__()
|
||||
self.req = req
|
||||
|
||||
@abc.abstractproperty
|
||||
def build_tracker_id(self) -> Optional[str]:
|
||||
"""A string that uniquely identifies this requirement to the build tracker.
|
||||
|
||||
If None, then this dist has no work to do in the build tracker, and
|
||||
``.prepare_distribution_metadata()`` will not be called."""
|
||||
raise NotImplementedError()
|
||||
|
||||
@abc.abstractmethod
|
||||
def get_metadata_distribution(self) -> BaseDistribution:
|
||||
raise NotImplementedError()
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
from typing import Optional
|
||||
|
||||
from pip._internal.distributions.base import AbstractDistribution
|
||||
from pip._internal.index.package_finder import PackageFinder
|
||||
from pip._internal.metadata import BaseDistribution
|
||||
|
@ -10,6 +12,10 @@ class InstalledDistribution(AbstractDistribution):
|
|||
been computed.
|
||||
"""
|
||||
|
||||
@property
|
||||
def build_tracker_id(self) -> Optional[str]:
|
||||
return None
|
||||
|
||||
def get_metadata_distribution(self) -> BaseDistribution:
|
||||
assert self.req.satisfied_by is not None, "not actually installed"
|
||||
return self.req.satisfied_by
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import logging
|
||||
from typing import Iterable, Set, Tuple
|
||||
from typing import Iterable, Optional, Set, Tuple
|
||||
|
||||
from pip._internal.build_env import BuildEnvironment
|
||||
from pip._internal.distributions.base import AbstractDistribution
|
||||
|
@ -18,6 +18,12 @@ class SourceDistribution(AbstractDistribution):
|
|||
generated, either using PEP 517 or using the legacy `setup.py egg_info`.
|
||||
"""
|
||||
|
||||
@property
|
||||
def build_tracker_id(self) -> Optional[str]:
|
||||
"""Identify this requirement uniquely by its link."""
|
||||
assert self.req.link
|
||||
return self.req.link.url_without_fragment
|
||||
|
||||
def get_metadata_distribution(self) -> BaseDistribution:
|
||||
return self.req.get_dist()
|
||||
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
from typing import Optional
|
||||
|
||||
from pip._vendor.packaging.utils import canonicalize_name
|
||||
|
||||
from pip._internal.distributions.base import AbstractDistribution
|
||||
|
@ -15,6 +17,10 @@ class WheelDistribution(AbstractDistribution):
|
|||
This does not need any preparation as wheels can be directly unpacked.
|
||||
"""
|
||||
|
||||
@property
|
||||
def build_tracker_id(self) -> Optional[str]:
|
||||
return None
|
||||
|
||||
def get_metadata_distribution(self) -> BaseDistribution:
|
||||
"""Loads the metadata from the wheel file into memory and returns a
|
||||
Distribution that uses it, not relying on the wheel file or
|
||||
|
|
|
@ -544,7 +544,7 @@ class HashMissing(HashError):
|
|||
# so the output can be directly copied into the requirements file.
|
||||
package = (
|
||||
self.req.original_link
|
||||
if self.req.original_link
|
||||
if self.req.is_direct
|
||||
# In case someone feeds something downright stupid
|
||||
# to InstallRequirement's constructor.
|
||||
else getattr(self.req, "req", None)
|
||||
|
|
|
@ -198,7 +198,7 @@ class LinkEvaluator:
|
|||
reason = f"wrong project name (not {self.project_name})"
|
||||
return (LinkType.different_project, reason)
|
||||
|
||||
supported_tags = self._target_python.get_tags()
|
||||
supported_tags = self._target_python.get_unsorted_tags()
|
||||
if not wheel.supported(supported_tags):
|
||||
# Include the wheel's tags in the reason string to
|
||||
# simplify troubleshooting compatibility issues.
|
||||
|
@ -414,7 +414,7 @@ class CandidateEvaluator:
|
|||
if specifier is None:
|
||||
specifier = specifiers.SpecifierSet()
|
||||
|
||||
supported_tags = target_python.get_tags()
|
||||
supported_tags = target_python.get_sorted_tags()
|
||||
|
||||
return cls(
|
||||
project_name=project_name,
|
||||
|
|
|
@ -89,7 +89,7 @@ def distutils_scheme(
|
|||
# finalize_options(); we only want to override here if the user
|
||||
# has explicitly requested it hence going back to the config
|
||||
if "install_lib" in d.get_option_dict("install"):
|
||||
scheme.update(dict(purelib=i.install_lib, platlib=i.install_lib))
|
||||
scheme.update({"purelib": i.install_lib, "platlib": i.install_lib})
|
||||
|
||||
if running_under_virtualenv():
|
||||
if home:
|
||||
|
|
|
@ -9,7 +9,7 @@ from pip._internal.utils.misc import strtobool
|
|||
from .base import BaseDistribution, BaseEnvironment, FilesystemWheel, MemoryWheel, Wheel
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Protocol
|
||||
from typing import Literal, Protocol
|
||||
else:
|
||||
Protocol = object
|
||||
|
||||
|
@ -50,6 +50,7 @@ def _should_use_importlib_metadata() -> bool:
|
|||
|
||||
|
||||
class Backend(Protocol):
|
||||
NAME: 'Literal["importlib", "pkg_resources"]'
|
||||
Distribution: Type[BaseDistribution]
|
||||
Environment: Type[BaseEnvironment]
|
||||
|
||||
|
|
|
@ -24,7 +24,7 @@ from typing import (
|
|||
|
||||
from pip._vendor.packaging.requirements import Requirement
|
||||
from pip._vendor.packaging.specifiers import InvalidSpecifier, SpecifierSet
|
||||
from pip._vendor.packaging.utils import NormalizedName
|
||||
from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
|
||||
from pip._vendor.packaging.version import LegacyVersion, Version
|
||||
|
||||
from pip._internal.exceptions import NoneMetadataError
|
||||
|
@ -37,7 +37,6 @@ from pip._internal.models.direct_url import (
|
|||
from pip._internal.utils.compat import stdlib_pkgs # TODO: Move definition here.
|
||||
from pip._internal.utils.egg_link import egg_link_path_from_sys_path
|
||||
from pip._internal.utils.misc import is_local, normalize_path
|
||||
from pip._internal.utils.packaging import safe_extra
|
||||
from pip._internal.utils.urls import url_to_path
|
||||
|
||||
from ._json import msg_to_json
|
||||
|
@ -460,6 +459,19 @@ class BaseDistribution(Protocol):
|
|||
|
||||
For modern .dist-info distributions, this is the collection of
|
||||
"Provides-Extra:" entries in distribution metadata.
|
||||
|
||||
The return value of this function is not particularly useful other than
|
||||
display purposes due to backward compatibility issues and the extra
|
||||
names being poorly normalized prior to PEP 685. If you want to perform
|
||||
logic operations on extras, use :func:`is_extra_provided` instead.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def is_extra_provided(self, extra: str) -> bool:
|
||||
"""Check whether an extra is provided by this distribution.
|
||||
|
||||
This is needed mostly for compatibility issues with pkg_resources not
|
||||
following the extra normalization rules defined in PEP 685.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
|
@ -537,10 +549,11 @@ class BaseDistribution(Protocol):
|
|||
"""Get extras from the egg-info directory."""
|
||||
known_extras = {""}
|
||||
for entry in self._iter_requires_txt_entries():
|
||||
if entry.extra in known_extras:
|
||||
extra = canonicalize_name(entry.extra)
|
||||
if extra in known_extras:
|
||||
continue
|
||||
known_extras.add(entry.extra)
|
||||
yield entry.extra
|
||||
known_extras.add(extra)
|
||||
yield extra
|
||||
|
||||
def _iter_egg_info_dependencies(self) -> Iterable[str]:
|
||||
"""Get distribution dependencies from the egg-info directory.
|
||||
|
@ -556,10 +569,11 @@ class BaseDistribution(Protocol):
|
|||
all currently available PEP 517 backends, although not standardized.
|
||||
"""
|
||||
for entry in self._iter_requires_txt_entries():
|
||||
if entry.extra and entry.marker:
|
||||
marker = f'({entry.marker}) and extra == "{safe_extra(entry.extra)}"'
|
||||
elif entry.extra:
|
||||
marker = f'extra == "{safe_extra(entry.extra)}"'
|
||||
extra = canonicalize_name(entry.extra)
|
||||
if extra and entry.marker:
|
||||
marker = f'({entry.marker}) and extra == "{extra}"'
|
||||
elif extra:
|
||||
marker = f'extra == "{extra}"'
|
||||
elif entry.marker:
|
||||
marker = entry.marker
|
||||
else:
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
from ._dists import Distribution
|
||||
from ._envs import Environment
|
||||
|
||||
__all__ = ["Distribution", "Environment"]
|
||||
__all__ = ["NAME", "Distribution", "Environment"]
|
||||
|
||||
NAME = "importlib"
|
||||
|
|
|
@ -27,7 +27,6 @@ from pip._internal.metadata.base import (
|
|||
Wheel,
|
||||
)
|
||||
from pip._internal.utils.misc import normalize_path
|
||||
from pip._internal.utils.packaging import safe_extra
|
||||
from pip._internal.utils.temp_dir import TempDirectory
|
||||
from pip._internal.utils.wheel import parse_wheel, read_wheel_metadata_file
|
||||
|
||||
|
@ -208,12 +207,16 @@ class Distribution(BaseDistribution):
|
|||
return cast(email.message.Message, self._dist.metadata)
|
||||
|
||||
def iter_provided_extras(self) -> Iterable[str]:
|
||||
return (
|
||||
safe_extra(extra) for extra in self.metadata.get_all("Provides-Extra", [])
|
||||
return self.metadata.get_all("Provides-Extra", [])
|
||||
|
||||
def is_extra_provided(self, extra: str) -> bool:
|
||||
return any(
|
||||
canonicalize_name(provided_extra) == canonicalize_name(extra)
|
||||
for provided_extra in self.metadata.get_all("Provides-Extra", [])
|
||||
)
|
||||
|
||||
def iter_dependencies(self, extras: Collection[str] = ()) -> Iterable[Requirement]:
|
||||
contexts: Sequence[Dict[str, str]] = [{"extra": safe_extra(e)} for e in extras]
|
||||
contexts: Sequence[Dict[str, str]] = [{"extra": e} for e in extras]
|
||||
for req_string in self.metadata.get_all("Requires-Dist", []):
|
||||
req = Requirement(req_string)
|
||||
if not req.marker:
|
||||
|
|
|
@ -151,7 +151,7 @@ def _emit_egg_deprecation(location: Optional[str]) -> None:
|
|||
deprecated(
|
||||
reason=f"Loading egg at {location} is deprecated.",
|
||||
replacement="to use pip for package installation.",
|
||||
gone_in=None,
|
||||
gone_in="23.3",
|
||||
)
|
||||
|
||||
|
||||
|
@ -174,7 +174,7 @@ class Environment(BaseEnvironment):
|
|||
for location in self._paths:
|
||||
yield from finder.find(location)
|
||||
for dist in finder.find_eggs(location):
|
||||
# _emit_egg_deprecation(dist.location) # TODO: Enable this.
|
||||
_emit_egg_deprecation(dist.location)
|
||||
yield dist
|
||||
# This must go last because that's how pkg_resources tie-breaks.
|
||||
yield from finder.find_linked(location)
|
||||
|
|
|
@ -24,8 +24,12 @@ from .base import (
|
|||
Wheel,
|
||||
)
|
||||
|
||||
__all__ = ["NAME", "Distribution", "Environment"]
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
NAME = "pkg_resources"
|
||||
|
||||
|
||||
class EntryPoint(NamedTuple):
|
||||
name: str
|
||||
|
@ -212,12 +216,16 @@ class Distribution(BaseDistribution):
|
|||
|
||||
def iter_dependencies(self, extras: Collection[str] = ()) -> Iterable[Requirement]:
|
||||
if extras: # pkg_resources raises on invalid extras, so we sanitize.
|
||||
extras = frozenset(extras).intersection(self._dist.extras)
|
||||
extras = frozenset(pkg_resources.safe_extra(e) for e in extras)
|
||||
extras = extras.intersection(self._dist.extras)
|
||||
return self._dist.requires(extras)
|
||||
|
||||
def iter_provided_extras(self) -> Iterable[str]:
|
||||
return self._dist.extras
|
||||
|
||||
def is_extra_provided(self, extra: str) -> bool:
|
||||
return pkg_resources.safe_extra(extra) in self._dist.extras
|
||||
|
||||
|
||||
class Environment(BaseEnvironment):
|
||||
def __init__(self, ws: pkg_resources.WorkingSet) -> None:
|
||||
|
|
|
@ -22,7 +22,10 @@ class InstallationReport:
|
|||
# is_direct is true if the requirement was a direct URL reference (which
|
||||
# includes editable requirements), and false if the requirement was
|
||||
# downloaded from a PEP 503 index or --find-links.
|
||||
"is_direct": bool(ireq.original_link),
|
||||
"is_direct": ireq.is_direct,
|
||||
# is_yanked is true if the requirement was yanked from the index, but
|
||||
# was still selected by pip to conform to PEP 592.
|
||||
"is_yanked": ireq.link.is_yanked if ireq.link else False,
|
||||
# requested is true if the requirement was specified by the user (aka
|
||||
# top level requirement), and false if it was installed as a dependency of a
|
||||
# requirement. https://peps.python.org/pep-0376/#requested
|
||||
|
@ -33,7 +36,7 @@ class InstallationReport:
|
|||
}
|
||||
if ireq.user_supplied and ireq.extras:
|
||||
# For top level requirements, the list of requested extras, if any.
|
||||
res["requested_extras"] = list(sorted(ireq.extras))
|
||||
res["requested_extras"] = sorted(ireq.extras)
|
||||
return res
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
|
|
|
@ -69,18 +69,6 @@ class LinkHash:
|
|||
def __post_init__(self) -> None:
|
||||
assert self.name in _SUPPORTED_HASHES
|
||||
|
||||
@classmethod
|
||||
def parse_pep658_hash(cls, dist_info_metadata: str) -> Optional["LinkHash"]:
|
||||
"""Parse a PEP 658 data-dist-info-metadata hash."""
|
||||
if dist_info_metadata == "true":
|
||||
return None
|
||||
name, sep, value = dist_info_metadata.partition("=")
|
||||
if not sep:
|
||||
return None
|
||||
if name not in _SUPPORTED_HASHES:
|
||||
return None
|
||||
return cls(name=name, value=value)
|
||||
|
||||
@classmethod
|
||||
@functools.lru_cache(maxsize=None)
|
||||
def find_hash_url_fragment(cls, url: str) -> Optional["LinkHash"]:
|
||||
|
@ -107,6 +95,28 @@ class LinkHash:
|
|||
return hashes.is_hash_allowed(self.name, hex_digest=self.value)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class MetadataFile:
|
||||
"""Information about a core metadata file associated with a distribution."""
|
||||
|
||||
hashes: Optional[Dict[str, str]]
|
||||
|
||||
def __post_init__(self) -> None:
|
||||
if self.hashes is not None:
|
||||
assert all(name in _SUPPORTED_HASHES for name in self.hashes)
|
||||
|
||||
|
||||
def supported_hashes(hashes: Optional[Dict[str, str]]) -> Optional[Dict[str, str]]:
|
||||
# Remove any unsupported hash types from the mapping. If this leaves no
|
||||
# supported hashes, return None
|
||||
if hashes is None:
|
||||
return None
|
||||
hashes = {n: v for n, v in hashes.items() if n in _SUPPORTED_HASHES}
|
||||
if not hashes:
|
||||
return None
|
||||
return hashes
|
||||
|
||||
|
||||
def _clean_url_path_part(part: str) -> str:
|
||||
"""
|
||||
Clean a "part" of a URL path (i.e. after splitting on "@" characters).
|
||||
|
@ -179,7 +189,7 @@ class Link(KeyBasedCompareMixin):
|
|||
"comes_from",
|
||||
"requires_python",
|
||||
"yanked_reason",
|
||||
"dist_info_metadata",
|
||||
"metadata_file_data",
|
||||
"cache_link_parsing",
|
||||
"egg_fragment",
|
||||
]
|
||||
|
@ -190,7 +200,7 @@ class Link(KeyBasedCompareMixin):
|
|||
comes_from: Optional[Union[str, "IndexContent"]] = None,
|
||||
requires_python: Optional[str] = None,
|
||||
yanked_reason: Optional[str] = None,
|
||||
dist_info_metadata: Optional[str] = None,
|
||||
metadata_file_data: Optional[MetadataFile] = None,
|
||||
cache_link_parsing: bool = True,
|
||||
hashes: Optional[Mapping[str, str]] = None,
|
||||
) -> None:
|
||||
|
@ -208,11 +218,10 @@ class Link(KeyBasedCompareMixin):
|
|||
a simple repository HTML link. If the file has been yanked but
|
||||
no reason was provided, this should be the empty string. See
|
||||
PEP 592 for more information and the specification.
|
||||
:param dist_info_metadata: the metadata attached to the file, or None if no such
|
||||
metadata is provided. This is the value of the "data-dist-info-metadata"
|
||||
attribute, if present, in a simple repository HTML link. This may be parsed
|
||||
into its own `Link` by `self.metadata_link()`. See PEP 658 for more
|
||||
information and the specification.
|
||||
:param metadata_file_data: the metadata attached to the file, or None if
|
||||
no such metadata is provided. This argument, if not None, indicates
|
||||
that a separate metadata file exists, and also optionally supplies
|
||||
hashes for that file.
|
||||
:param cache_link_parsing: A flag that is used elsewhere to determine
|
||||
whether resources retrieved from this link should be cached. PyPI
|
||||
URLs should generally have this set to False, for example.
|
||||
|
@ -220,6 +229,10 @@ class Link(KeyBasedCompareMixin):
|
|||
determine the validity of a download.
|
||||
"""
|
||||
|
||||
# The comes_from, requires_python, and metadata_file_data arguments are
|
||||
# only used by classmethods of this class, and are not used in client
|
||||
# code directly.
|
||||
|
||||
# url can be a UNC windows share
|
||||
if url.startswith("\\\\"):
|
||||
url = path_to_url(url)
|
||||
|
@ -239,7 +252,7 @@ class Link(KeyBasedCompareMixin):
|
|||
self.comes_from = comes_from
|
||||
self.requires_python = requires_python if requires_python else None
|
||||
self.yanked_reason = yanked_reason
|
||||
self.dist_info_metadata = dist_info_metadata
|
||||
self.metadata_file_data = metadata_file_data
|
||||
|
||||
super().__init__(key=url, defining_class=Link)
|
||||
|
||||
|
@ -262,9 +275,25 @@ class Link(KeyBasedCompareMixin):
|
|||
url = _ensure_quoted_url(urllib.parse.urljoin(page_url, file_url))
|
||||
pyrequire = file_data.get("requires-python")
|
||||
yanked_reason = file_data.get("yanked")
|
||||
dist_info_metadata = file_data.get("dist-info-metadata")
|
||||
hashes = file_data.get("hashes", {})
|
||||
|
||||
# PEP 714: Indexes must use the name core-metadata, but
|
||||
# clients should support the old name as a fallback for compatibility.
|
||||
metadata_info = file_data.get("core-metadata")
|
||||
if metadata_info is None:
|
||||
metadata_info = file_data.get("dist-info-metadata")
|
||||
|
||||
# The metadata info value may be a boolean, or a dict of hashes.
|
||||
if isinstance(metadata_info, dict):
|
||||
# The file exists, and hashes have been supplied
|
||||
metadata_file_data = MetadataFile(supported_hashes(metadata_info))
|
||||
elif metadata_info:
|
||||
# The file exists, but there are no hashes
|
||||
metadata_file_data = MetadataFile(None)
|
||||
else:
|
||||
# False or not present: the file does not exist
|
||||
metadata_file_data = None
|
||||
|
||||
# The Link.yanked_reason expects an empty string instead of a boolean.
|
||||
if yanked_reason and not isinstance(yanked_reason, str):
|
||||
yanked_reason = ""
|
||||
|
@ -278,7 +307,7 @@ class Link(KeyBasedCompareMixin):
|
|||
requires_python=pyrequire,
|
||||
yanked_reason=yanked_reason,
|
||||
hashes=hashes,
|
||||
dist_info_metadata=dist_info_metadata,
|
||||
metadata_file_data=metadata_file_data,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
|
@ -298,14 +327,39 @@ class Link(KeyBasedCompareMixin):
|
|||
url = _ensure_quoted_url(urllib.parse.urljoin(base_url, href))
|
||||
pyrequire = anchor_attribs.get("data-requires-python")
|
||||
yanked_reason = anchor_attribs.get("data-yanked")
|
||||
dist_info_metadata = anchor_attribs.get("data-dist-info-metadata")
|
||||
|
||||
# PEP 714: Indexes must use the name data-core-metadata, but
|
||||
# clients should support the old name as a fallback for compatibility.
|
||||
metadata_info = anchor_attribs.get("data-core-metadata")
|
||||
if metadata_info is None:
|
||||
metadata_info = anchor_attribs.get("data-dist-info-metadata")
|
||||
# The metadata info value may be the string "true", or a string of
|
||||
# the form "hashname=hashval"
|
||||
if metadata_info == "true":
|
||||
# The file exists, but there are no hashes
|
||||
metadata_file_data = MetadataFile(None)
|
||||
elif metadata_info is None:
|
||||
# The file does not exist
|
||||
metadata_file_data = None
|
||||
else:
|
||||
# The file exists, and hashes have been supplied
|
||||
hashname, sep, hashval = metadata_info.partition("=")
|
||||
if sep == "=":
|
||||
metadata_file_data = MetadataFile(supported_hashes({hashname: hashval}))
|
||||
else:
|
||||
# Error - data is wrong. Treat as no hashes supplied.
|
||||
logger.debug(
|
||||
"Index returned invalid data-dist-info-metadata value: %s",
|
||||
metadata_info,
|
||||
)
|
||||
metadata_file_data = MetadataFile(None)
|
||||
|
||||
return cls(
|
||||
url,
|
||||
comes_from=page_url,
|
||||
requires_python=pyrequire,
|
||||
yanked_reason=yanked_reason,
|
||||
dist_info_metadata=dist_info_metadata,
|
||||
metadata_file_data=metadata_file_data,
|
||||
)
|
||||
|
||||
def __str__(self) -> str:
|
||||
|
@ -407,17 +461,13 @@ class Link(KeyBasedCompareMixin):
|
|||
return match.group(1)
|
||||
|
||||
def metadata_link(self) -> Optional["Link"]:
|
||||
"""Implementation of PEP 658 parsing."""
|
||||
# Note that Link.from_element() parsing the "data-dist-info-metadata" attribute
|
||||
# from an HTML anchor tag is typically how the Link.dist_info_metadata attribute
|
||||
# gets set.
|
||||
if self.dist_info_metadata is None:
|
||||
"""Return a link to the associated core metadata file (if any)."""
|
||||
if self.metadata_file_data is None:
|
||||
return None
|
||||
metadata_url = f"{self.url_without_fragment}.metadata"
|
||||
metadata_link_hash = LinkHash.parse_pep658_hash(self.dist_info_metadata)
|
||||
if metadata_link_hash is None:
|
||||
if self.metadata_file_data.hashes is None:
|
||||
return Link(metadata_url)
|
||||
return Link(metadata_url, hashes=metadata_link_hash.as_dict())
|
||||
return Link(metadata_url, hashes=self.metadata_file_data.hashes)
|
||||
|
||||
def as_hashes(self) -> Hashes:
|
||||
return Hashes({k: [v] for k, v in self._hashes.items()})
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import sys
|
||||
from typing import List, Optional, Tuple
|
||||
from typing import List, Optional, Set, Tuple
|
||||
|
||||
from pip._vendor.packaging.tags import Tag
|
||||
|
||||
|
@ -22,6 +22,7 @@ class TargetPython:
|
|||
"py_version",
|
||||
"py_version_info",
|
||||
"_valid_tags",
|
||||
"_valid_tags_set",
|
||||
]
|
||||
|
||||
def __init__(
|
||||
|
@ -61,8 +62,9 @@ class TargetPython:
|
|||
self.py_version = py_version
|
||||
self.py_version_info = py_version_info
|
||||
|
||||
# This is used to cache the return value of get_tags().
|
||||
# This is used to cache the return value of get_(un)sorted_tags.
|
||||
self._valid_tags: Optional[List[Tag]] = None
|
||||
self._valid_tags_set: Optional[Set[Tag]] = None
|
||||
|
||||
def format_given(self) -> str:
|
||||
"""
|
||||
|
@ -84,7 +86,7 @@ class TargetPython:
|
|||
f"{key}={value!r}" for key, value in key_values if value is not None
|
||||
)
|
||||
|
||||
def get_tags(self) -> List[Tag]:
|
||||
def get_sorted_tags(self) -> List[Tag]:
|
||||
"""
|
||||
Return the supported PEP 425 tags to check wheel candidates against.
|
||||
|
||||
|
@ -108,3 +110,13 @@ class TargetPython:
|
|||
self._valid_tags = tags
|
||||
|
||||
return self._valid_tags
|
||||
|
||||
def get_unsorted_tags(self) -> Set[Tag]:
|
||||
"""Exactly the same as get_sorted_tags, but returns a set.
|
||||
|
||||
This is important for performance.
|
||||
"""
|
||||
if self._valid_tags_set is None:
|
||||
self._valid_tags_set = set(self.get_sorted_tags())
|
||||
|
||||
return self._valid_tags_set
|
||||
|
|
|
@ -514,7 +514,9 @@ class MultiDomainBasicAuth(AuthBase):
|
|||
|
||||
# Consume content and release the original connection to allow our new
|
||||
# request to reuse the same one.
|
||||
resp.content
|
||||
# The result of the assignment isn't used, it's just needed to consume
|
||||
# the content.
|
||||
_ = resp.content
|
||||
resp.raw.release_conn()
|
||||
|
||||
# Add our new username and password to the request
|
||||
|
|
|
@ -3,10 +3,11 @@
|
|||
|
||||
import os
|
||||
from contextlib import contextmanager
|
||||
from typing import Generator, Optional
|
||||
from datetime import datetime
|
||||
from typing import BinaryIO, Generator, Optional, Union
|
||||
|
||||
from pip._vendor.cachecontrol.cache import BaseCache
|
||||
from pip._vendor.cachecontrol.caches import FileCache
|
||||
from pip._vendor.cachecontrol.cache import SeparateBodyBaseCache
|
||||
from pip._vendor.cachecontrol.caches import SeparateBodyFileCache
|
||||
from pip._vendor.requests.models import Response
|
||||
|
||||
from pip._internal.utils.filesystem import adjacent_tmp_file, replace
|
||||
|
@ -28,7 +29,7 @@ def suppressed_cache_errors() -> Generator[None, None, None]:
|
|||
pass
|
||||
|
||||
|
||||
class SafeFileCache(BaseCache):
|
||||
class SafeFileCache(SeparateBodyBaseCache):
|
||||
"""
|
||||
A file based cache which is safe to use even when the target directory may
|
||||
not be accessible or writable.
|
||||
|
@ -43,7 +44,7 @@ class SafeFileCache(BaseCache):
|
|||
# From cachecontrol.caches.file_cache.FileCache._fn, brought into our
|
||||
# class for backwards-compatibility and to avoid using a non-public
|
||||
# method.
|
||||
hashed = FileCache.encode(name)
|
||||
hashed = SeparateBodyFileCache.encode(name)
|
||||
parts = list(hashed[:5]) + [hashed]
|
||||
return os.path.join(self.directory, *parts)
|
||||
|
||||
|
@ -53,17 +54,33 @@ class SafeFileCache(BaseCache):
|
|||
with open(path, "rb") as f:
|
||||
return f.read()
|
||||
|
||||
def set(self, key: str, value: bytes, expires: Optional[int] = None) -> None:
|
||||
path = self._get_cache_path(key)
|
||||
def _write(self, path: str, data: bytes) -> None:
|
||||
with suppressed_cache_errors():
|
||||
ensure_dir(os.path.dirname(path))
|
||||
|
||||
with adjacent_tmp_file(path) as f:
|
||||
f.write(value)
|
||||
f.write(data)
|
||||
|
||||
replace(f.name, path)
|
||||
|
||||
def set(
|
||||
self, key: str, value: bytes, expires: Union[int, datetime, None] = None
|
||||
) -> None:
|
||||
path = self._get_cache_path(key)
|
||||
self._write(path, value)
|
||||
|
||||
def delete(self, key: str) -> None:
|
||||
path = self._get_cache_path(key)
|
||||
with suppressed_cache_errors():
|
||||
os.remove(path)
|
||||
with suppressed_cache_errors():
|
||||
os.remove(path + ".body")
|
||||
|
||||
def get_body(self, key: str) -> Optional[BinaryIO]:
|
||||
path = self._get_cache_path(key) + ".body"
|
||||
with suppressed_cache_errors():
|
||||
return open(path, "rb")
|
||||
|
||||
def set_body(self, key: str, body: bytes) -> None:
|
||||
path = self._get_cache_path(key) + ".body"
|
||||
self._write(path, body)
|
||||
|
|
|
@ -419,15 +419,17 @@ class PipSession(requests.Session):
|
|||
msg += f" (from {source})"
|
||||
logger.info(msg)
|
||||
|
||||
host_port = parse_netloc(host)
|
||||
if host_port not in self.pip_trusted_origins:
|
||||
self.pip_trusted_origins.append(host_port)
|
||||
parsed_host, parsed_port = parse_netloc(host)
|
||||
if parsed_host is None:
|
||||
raise ValueError(f"Trusted host URL must include a host part: {host!r}")
|
||||
if (parsed_host, parsed_port) not in self.pip_trusted_origins:
|
||||
self.pip_trusted_origins.append((parsed_host, parsed_port))
|
||||
|
||||
self.mount(
|
||||
build_url_from_netloc(host, scheme="http") + "/", self._trusted_host_adapter
|
||||
)
|
||||
self.mount(build_url_from_netloc(host) + "/", self._trusted_host_adapter)
|
||||
if not host_port[1]:
|
||||
if not parsed_port:
|
||||
self.mount(
|
||||
build_url_from_netloc(host, scheme="http") + ":",
|
||||
self._trusted_host_adapter,
|
||||
|
|
|
@ -51,10 +51,22 @@ def get_build_tracker() -> Generator["BuildTracker", None, None]:
|
|||
yield tracker
|
||||
|
||||
|
||||
class TrackerId(str):
|
||||
"""Uniquely identifying string provided to the build tracker."""
|
||||
|
||||
|
||||
class BuildTracker:
|
||||
"""Ensure that an sdist cannot request itself as a setup requirement.
|
||||
|
||||
When an sdist is prepared, it identifies its setup requirements in the
|
||||
context of ``BuildTracker.track()``. If a requirement shows up recursively, this
|
||||
raises an exception.
|
||||
|
||||
This stops fork bombs embedded in malicious packages."""
|
||||
|
||||
def __init__(self, root: str) -> None:
|
||||
self._root = root
|
||||
self._entries: Set[InstallRequirement] = set()
|
||||
self._entries: Dict[TrackerId, InstallRequirement] = {}
|
||||
logger.debug("Created build tracker: %s", self._root)
|
||||
|
||||
def __enter__(self) -> "BuildTracker":
|
||||
|
@ -69,16 +81,15 @@ class BuildTracker:
|
|||
) -> None:
|
||||
self.cleanup()
|
||||
|
||||
def _entry_path(self, link: Link) -> str:
|
||||
hashed = hashlib.sha224(link.url_without_fragment.encode()).hexdigest()
|
||||
def _entry_path(self, key: TrackerId) -> str:
|
||||
hashed = hashlib.sha224(key.encode()).hexdigest()
|
||||
return os.path.join(self._root, hashed)
|
||||
|
||||
def add(self, req: InstallRequirement) -> None:
|
||||
def add(self, req: InstallRequirement, key: TrackerId) -> None:
|
||||
"""Add an InstallRequirement to build tracking."""
|
||||
|
||||
assert req.link
|
||||
# Get the file to write information about this requirement.
|
||||
entry_path = self._entry_path(req.link)
|
||||
entry_path = self._entry_path(key)
|
||||
|
||||
# Try reading from the file. If it exists and can be read from, a build
|
||||
# is already in progress, so a LookupError is raised.
|
||||
|
@ -92,33 +103,37 @@ class BuildTracker:
|
|||
raise LookupError(message)
|
||||
|
||||
# If we're here, req should really not be building already.
|
||||
assert req not in self._entries
|
||||
assert key not in self._entries
|
||||
|
||||
# Start tracking this requirement.
|
||||
with open(entry_path, "w", encoding="utf-8") as fp:
|
||||
fp.write(str(req))
|
||||
self._entries.add(req)
|
||||
self._entries[key] = req
|
||||
|
||||
logger.debug("Added %s to build tracker %r", req, self._root)
|
||||
|
||||
def remove(self, req: InstallRequirement) -> None:
|
||||
def remove(self, req: InstallRequirement, key: TrackerId) -> None:
|
||||
"""Remove an InstallRequirement from build tracking."""
|
||||
|
||||
assert req.link
|
||||
# Delete the created file and the corresponding entries.
|
||||
os.unlink(self._entry_path(req.link))
|
||||
self._entries.remove(req)
|
||||
# Delete the created file and the corresponding entry.
|
||||
os.unlink(self._entry_path(key))
|
||||
del self._entries[key]
|
||||
|
||||
logger.debug("Removed %s from build tracker %r", req, self._root)
|
||||
|
||||
def cleanup(self) -> None:
|
||||
for req in set(self._entries):
|
||||
self.remove(req)
|
||||
for key, req in list(self._entries.items()):
|
||||
self.remove(req, key)
|
||||
|
||||
logger.debug("Removed build tracker: %r", self._root)
|
||||
|
||||
@contextlib.contextmanager
|
||||
def track(self, req: InstallRequirement) -> Generator[None, None, None]:
|
||||
self.add(req)
|
||||
def track(self, req: InstallRequirement, key: str) -> Generator[None, None, None]:
|
||||
"""Ensure that `key` cannot install itself as a setup requirement.
|
||||
|
||||
:raises LookupError: If `key` was already provided in a parent invocation of
|
||||
the context introduced by this method."""
|
||||
tracker_id = TrackerId(key)
|
||||
self.add(req, tracker_id)
|
||||
yield
|
||||
self.remove(req)
|
||||
self.remove(req, tracker_id)
|
||||
|
|
|
@ -5,12 +5,15 @@ import logging
|
|||
from typing import Callable, Dict, List, NamedTuple, Optional, Set, Tuple
|
||||
|
||||
from pip._vendor.packaging.requirements import Requirement
|
||||
from pip._vendor.packaging.specifiers import LegacySpecifier
|
||||
from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
|
||||
from pip._vendor.packaging.version import LegacyVersion
|
||||
|
||||
from pip._internal.distributions import make_distribution_for_install_requirement
|
||||
from pip._internal.metadata import get_default_environment
|
||||
from pip._internal.metadata.base import DistributionVersion
|
||||
from pip._internal.req.req_install import InstallRequirement
|
||||
from pip._internal.utils.deprecation import deprecated
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
@ -57,6 +60,8 @@ def check_package_set(
|
|||
package name and returns a boolean.
|
||||
"""
|
||||
|
||||
warn_legacy_versions_and_specifiers(package_set)
|
||||
|
||||
missing = {}
|
||||
conflicting = {}
|
||||
|
||||
|
@ -147,3 +152,36 @@ def _create_whitelist(
|
|||
break
|
||||
|
||||
return packages_affected
|
||||
|
||||
|
||||
def warn_legacy_versions_and_specifiers(package_set: PackageSet) -> None:
|
||||
for project_name, package_details in package_set.items():
|
||||
if isinstance(package_details.version, LegacyVersion):
|
||||
deprecated(
|
||||
reason=(
|
||||
f"{project_name} {package_details.version} "
|
||||
f"has a non-standard version number."
|
||||
),
|
||||
replacement=(
|
||||
f"to upgrade to a newer version of {project_name} "
|
||||
f"or contact the author to suggest that they "
|
||||
f"release a version with a conforming version number"
|
||||
),
|
||||
issue=12063,
|
||||
gone_in="23.3",
|
||||
)
|
||||
for dep in package_details.dependencies:
|
||||
if any(isinstance(spec, LegacySpecifier) for spec in dep.specifier):
|
||||
deprecated(
|
||||
reason=(
|
||||
f"{project_name} {package_details.version} "
|
||||
f"has a non-standard dependency specifier {dep}."
|
||||
),
|
||||
replacement=(
|
||||
f"to upgrade to a newer version of {project_name} "
|
||||
f"or contact the author to suggest that they "
|
||||
f"release a version with a conforming dependency specifiers"
|
||||
),
|
||||
issue=12063,
|
||||
gone_in="23.3",
|
||||
)
|
||||
|
|
|
@ -267,9 +267,9 @@ def get_csv_rows_for_installed(
|
|||
path = _fs_to_record_path(f, lib_dir)
|
||||
digest, length = rehash(f)
|
||||
installed_rows.append((path, digest, length))
|
||||
for installed_record_path in installed.values():
|
||||
installed_rows.append((installed_record_path, "", ""))
|
||||
return installed_rows
|
||||
return installed_rows + [
|
||||
(installed_record_path, "", "") for installed_record_path in installed.values()
|
||||
]
|
||||
|
||||
|
||||
def get_console_script_specs(console: Dict[str, str]) -> List[str]:
|
||||
|
|
|
@ -4,10 +4,10 @@
|
|||
# The following comment should be removed at some point in the future.
|
||||
# mypy: strict-optional=False
|
||||
|
||||
import logging
|
||||
import mimetypes
|
||||
import os
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
from typing import Dict, Iterable, List, Optional
|
||||
|
||||
from pip._vendor.packaging.utils import canonicalize_name
|
||||
|
@ -21,7 +21,6 @@ from pip._internal.exceptions import (
|
|||
InstallationError,
|
||||
MetadataInconsistent,
|
||||
NetworkConnectionError,
|
||||
PreviousBuildDirError,
|
||||
VcsHashUnsupported,
|
||||
)
|
||||
from pip._internal.index.package_finder import PackageFinder
|
||||
|
@ -37,6 +36,7 @@ from pip._internal.network.lazy_wheel import (
|
|||
from pip._internal.network.session import PipSession
|
||||
from pip._internal.operations.build.build_tracker import BuildTracker
|
||||
from pip._internal.req.req_install import InstallRequirement
|
||||
from pip._internal.utils._log import getLogger
|
||||
from pip._internal.utils.direct_url_helpers import (
|
||||
direct_url_for_editable,
|
||||
direct_url_from_link,
|
||||
|
@ -47,13 +47,12 @@ from pip._internal.utils.misc import (
|
|||
display_path,
|
||||
hash_file,
|
||||
hide_url,
|
||||
is_installable_dir,
|
||||
)
|
||||
from pip._internal.utils.temp_dir import TempDirectory
|
||||
from pip._internal.utils.unpacking import unpack_file
|
||||
from pip._internal.vcs import vcs
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
logger = getLogger(__name__)
|
||||
|
||||
|
||||
def _get_prepared_distribution(
|
||||
|
@ -65,7 +64,9 @@ def _get_prepared_distribution(
|
|||
) -> BaseDistribution:
|
||||
"""Prepare a distribution for installation."""
|
||||
abstract_dist = make_distribution_for_install_requirement(req)
|
||||
with build_tracker.track(req):
|
||||
tracker_id = abstract_dist.build_tracker_id
|
||||
if tracker_id is not None:
|
||||
with build_tracker.track(req, tracker_id):
|
||||
abstract_dist.prepare_distribution_metadata(
|
||||
finder, build_isolation, check_build_deps
|
||||
)
|
||||
|
@ -226,6 +227,7 @@ class RequirementPreparer:
|
|||
use_user_site: bool,
|
||||
lazy_wheel: bool,
|
||||
verbosity: int,
|
||||
legacy_resolver: bool,
|
||||
) -> None:
|
||||
super().__init__()
|
||||
|
||||
|
@ -259,6 +261,9 @@ class RequirementPreparer:
|
|||
# How verbose should underlying tooling be?
|
||||
self.verbosity = verbosity
|
||||
|
||||
# Are we using the legacy resolver?
|
||||
self.legacy_resolver = legacy_resolver
|
||||
|
||||
# Memoized downloaded files, as mapping of url: path.
|
||||
self._downloaded: Dict[str, str] = {}
|
||||
|
||||
|
@ -313,21 +318,7 @@ class RequirementPreparer:
|
|||
autodelete=True,
|
||||
parallel_builds=parallel_builds,
|
||||
)
|
||||
|
||||
# If a checkout exists, it's unwise to keep going. version
|
||||
# inconsistencies are logged later, but do not fail the
|
||||
# installation.
|
||||
# FIXME: this won't upgrade when there's an existing
|
||||
# package unpacked in `req.source_dir`
|
||||
# TODO: this check is now probably dead code
|
||||
if is_installable_dir(req.source_dir):
|
||||
raise PreviousBuildDirError(
|
||||
"pip can't proceed with requirements '{}' due to a"
|
||||
"pre-existing build directory ({}). This is likely "
|
||||
"due to a previous installation that failed . pip is "
|
||||
"being responsible and not assuming it can delete this. "
|
||||
"Please delete it and try again.".format(req, req.source_dir)
|
||||
)
|
||||
req.ensure_pristine_source_checkout()
|
||||
|
||||
def _get_linked_req_hashes(self, req: InstallRequirement) -> Hashes:
|
||||
# By the time this is called, the requirement's link should have
|
||||
|
@ -352,7 +343,7 @@ class RequirementPreparer:
|
|||
# a surprising hash mismatch in the future.
|
||||
# file:/// URLs aren't pinnable, so don't complain about them
|
||||
# not being pinned.
|
||||
if req.original_link is None and not req.is_pinned:
|
||||
if not req.is_direct and not req.is_pinned:
|
||||
raise HashUnpinned()
|
||||
|
||||
# If known-good hashes are missing for this requirement,
|
||||
|
@ -365,6 +356,11 @@ class RequirementPreparer:
|
|||
self,
|
||||
req: InstallRequirement,
|
||||
) -> Optional[BaseDistribution]:
|
||||
if self.legacy_resolver:
|
||||
logger.debug(
|
||||
"Metadata-only fetching is not used in the legacy resolver",
|
||||
)
|
||||
return None
|
||||
if self.require_hashes:
|
||||
logger.debug(
|
||||
"Metadata-only fetching is not used as hash checking is required",
|
||||
|
@ -385,7 +381,7 @@ class RequirementPreparer:
|
|||
if metadata_link is None:
|
||||
return None
|
||||
assert req.req is not None
|
||||
logger.info(
|
||||
logger.verbose(
|
||||
"Obtaining dependency information for %s from %s",
|
||||
req.req,
|
||||
metadata_link,
|
||||
|
@ -410,7 +406,7 @@ class RequirementPreparer:
|
|||
# NB: raw_name will fall back to the name from the install requirement if
|
||||
# the Name: field is not present, but it's noted in the raw_name docstring
|
||||
# that that should NEVER happen anyway.
|
||||
if metadata_dist.raw_name != req.req.name:
|
||||
if canonicalize_name(metadata_dist.raw_name) != canonicalize_name(req.req.name):
|
||||
raise MetadataInconsistent(
|
||||
req, "Name", req.req.name, metadata_dist.raw_name
|
||||
)
|
||||
|
@ -470,7 +466,19 @@ class RequirementPreparer:
|
|||
for link, (filepath, _) in batch_download:
|
||||
logger.debug("Downloading link %s to %s", link, filepath)
|
||||
req = links_to_fully_download[link]
|
||||
# Record the downloaded file path so wheel reqs can extract a Distribution
|
||||
# in .get_dist().
|
||||
req.local_file_path = filepath
|
||||
# Record that the file is downloaded so we don't do it again in
|
||||
# _prepare_linked_requirement().
|
||||
self._downloaded[req.link.url] = filepath
|
||||
|
||||
# If this is an sdist, we need to unpack it after downloading, but the
|
||||
# .source_dir won't be set up until we are in _prepare_linked_requirement().
|
||||
# Add the downloaded archive to the install requirement to unpack after
|
||||
# preparing the source dir.
|
||||
if not req.is_wheel:
|
||||
req.needs_unpacked_archive(Path(filepath))
|
||||
|
||||
# This step is necessary to ensure all lazy wheels are processed
|
||||
# successfully by the 'download', 'wheel', and 'install' commands.
|
||||
|
|
|
@ -1,6 +1,3 @@
|
|||
# The following comment should be removed at some point in the future.
|
||||
# mypy: strict-optional=False
|
||||
|
||||
import functools
|
||||
import logging
|
||||
import os
|
||||
|
@ -9,6 +6,7 @@ import sys
|
|||
import uuid
|
||||
import zipfile
|
||||
from optparse import Values
|
||||
from pathlib import Path
|
||||
from typing import Any, Collection, Dict, Iterable, List, Optional, Sequence, Union
|
||||
|
||||
from pip._vendor.packaging.markers import Marker
|
||||
|
@ -20,7 +18,7 @@ from pip._vendor.packaging.version import parse as parse_version
|
|||
from pip._vendor.pyproject_hooks import BuildBackendHookCaller
|
||||
|
||||
from pip._internal.build_env import BuildEnvironment, NoOpBuildEnvironment
|
||||
from pip._internal.exceptions import InstallationError
|
||||
from pip._internal.exceptions import InstallationError, PreviousBuildDirError
|
||||
from pip._internal.locations import get_scheme
|
||||
from pip._internal.metadata import (
|
||||
BaseDistribution,
|
||||
|
@ -50,11 +48,13 @@ from pip._internal.utils.misc import (
|
|||
backup_dir,
|
||||
display_path,
|
||||
hide_url,
|
||||
is_installable_dir,
|
||||
redact_auth_from_url,
|
||||
)
|
||||
from pip._internal.utils.packaging import safe_extra
|
||||
from pip._internal.utils.subprocess import runner_with_spinner_message
|
||||
from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
|
||||
from pip._internal.utils.unpacking import unpack_file
|
||||
from pip._internal.utils.virtualenv import running_under_virtualenv
|
||||
from pip._internal.vcs import vcs
|
||||
|
||||
|
@ -104,6 +104,8 @@ class InstallRequirement:
|
|||
if link.is_file:
|
||||
self.source_dir = os.path.normpath(os.path.abspath(link.file_path))
|
||||
|
||||
# original_link is the direct URL that was provided by the user for the
|
||||
# requirement, either directly or via a constraints file.
|
||||
if link is None and req and req.url:
|
||||
# PEP 508 URL requirement
|
||||
link = Link(req.url)
|
||||
|
@ -126,7 +128,7 @@ class InstallRequirement:
|
|||
if extras:
|
||||
self.extras = extras
|
||||
elif req:
|
||||
self.extras = {safe_extra(extra) for extra in req.extras}
|
||||
self.extras = req.extras
|
||||
else:
|
||||
self.extras = set()
|
||||
if markers is None and req:
|
||||
|
@ -181,6 +183,9 @@ class InstallRequirement:
|
|||
# This requirement needs more preparation before it can be built
|
||||
self.needs_more_preparation = False
|
||||
|
||||
# This requirement needs to be unpacked before it can be installed.
|
||||
self._archive_source: Optional[Path] = None
|
||||
|
||||
def __str__(self) -> str:
|
||||
if self.req:
|
||||
s = str(self.req)
|
||||
|
@ -242,15 +247,22 @@ class InstallRequirement:
|
|||
|
||||
@property
|
||||
def specifier(self) -> SpecifierSet:
|
||||
assert self.req is not None
|
||||
return self.req.specifier
|
||||
|
||||
@property
|
||||
def is_direct(self) -> bool:
|
||||
"""Whether this requirement was specified as a direct URL."""
|
||||
return self.original_link is not None
|
||||
|
||||
@property
|
||||
def is_pinned(self) -> bool:
|
||||
"""Return whether I am pinned to an exact version.
|
||||
|
||||
For example, some-package==1.2 is pinned; some-package>1.2 is not.
|
||||
"""
|
||||
specifiers = self.specifier
|
||||
assert self.req is not None
|
||||
specifiers = self.req.specifier
|
||||
return len(specifiers) == 1 and next(iter(specifiers)).operator in {"==", "==="}
|
||||
|
||||
def match_markers(self, extras_requested: Optional[Iterable[str]] = None) -> bool:
|
||||
|
@ -260,7 +272,12 @@ class InstallRequirement:
|
|||
extras_requested = ("",)
|
||||
if self.markers is not None:
|
||||
return any(
|
||||
self.markers.evaluate({"extra": extra}) for extra in extras_requested
|
||||
self.markers.evaluate({"extra": extra})
|
||||
# TODO: Remove these two variants when packaging is upgraded to
|
||||
# support the marker comparison logic specified in PEP 685.
|
||||
or self.markers.evaluate({"extra": safe_extra(extra)})
|
||||
or self.markers.evaluate({"extra": canonicalize_name(extra)})
|
||||
for extra in extras_requested
|
||||
)
|
||||
else:
|
||||
return True
|
||||
|
@ -293,11 +310,12 @@ class InstallRequirement:
|
|||
good_hashes = self.hash_options.copy()
|
||||
if trust_internet:
|
||||
link = self.link
|
||||
elif self.original_link and self.user_supplied:
|
||||
elif self.is_direct and self.user_supplied:
|
||||
link = self.original_link
|
||||
else:
|
||||
link = None
|
||||
if link and link.hash:
|
||||
assert link.hash_name is not None
|
||||
good_hashes.setdefault(link.hash_name, []).append(link.hash)
|
||||
return Hashes(good_hashes)
|
||||
|
||||
|
@ -307,6 +325,7 @@ class InstallRequirement:
|
|||
return None
|
||||
s = str(self.req)
|
||||
if self.comes_from:
|
||||
comes_from: Optional[str]
|
||||
if isinstance(self.comes_from, str):
|
||||
comes_from = self.comes_from
|
||||
else:
|
||||
|
@ -338,7 +357,7 @@ class InstallRequirement:
|
|||
|
||||
# When parallel builds are enabled, add a UUID to the build directory
|
||||
# name so multiple builds do not interfere with each other.
|
||||
dir_name: str = canonicalize_name(self.name)
|
||||
dir_name: str = canonicalize_name(self.req.name)
|
||||
if parallel_builds:
|
||||
dir_name = f"{dir_name}_{uuid.uuid4().hex}"
|
||||
|
||||
|
@ -381,6 +400,7 @@ class InstallRequirement:
|
|||
)
|
||||
|
||||
def warn_on_mismatching_name(self) -> None:
|
||||
assert self.req is not None
|
||||
metadata_name = canonicalize_name(self.metadata["Name"])
|
||||
if canonicalize_name(self.req.name) == metadata_name:
|
||||
# Everything is fine.
|
||||
|
@ -450,6 +470,7 @@ class InstallRequirement:
|
|||
# Things valid for sdists
|
||||
@property
|
||||
def unpacked_source_directory(self) -> str:
|
||||
assert self.source_dir, f"No source dir for {self}"
|
||||
return os.path.join(
|
||||
self.source_dir, self.link and self.link.subdirectory_fragment or ""
|
||||
)
|
||||
|
@ -493,7 +514,7 @@ class InstallRequirement:
|
|||
"to use --use-pep517 or add a "
|
||||
"pyproject.toml file to the project"
|
||||
),
|
||||
gone_in="23.3",
|
||||
gone_in="24.0",
|
||||
)
|
||||
self.use_pep517 = False
|
||||
return
|
||||
|
@ -536,7 +557,7 @@ class InstallRequirement:
|
|||
Under PEP 517 and PEP 660, call the backend hook to prepare the metadata.
|
||||
Under legacy processing, call setup.py egg-info.
|
||||
"""
|
||||
assert self.source_dir
|
||||
assert self.source_dir, f"No source dir for {self}"
|
||||
details = self.name or f"from {self.link}"
|
||||
|
||||
if self.use_pep517:
|
||||
|
@ -585,8 +606,10 @@ class InstallRequirement:
|
|||
if self.metadata_directory:
|
||||
return get_directory_distribution(self.metadata_directory)
|
||||
elif self.local_file_path and self.is_wheel:
|
||||
assert self.req is not None
|
||||
return get_wheel_distribution(
|
||||
FilesystemWheel(self.local_file_path), canonicalize_name(self.name)
|
||||
FilesystemWheel(self.local_file_path),
|
||||
canonicalize_name(self.req.name),
|
||||
)
|
||||
raise AssertionError(
|
||||
f"InstallRequirement {self} has no metadata directory and no wheel: "
|
||||
|
@ -594,9 +617,9 @@ class InstallRequirement:
|
|||
)
|
||||
|
||||
def assert_source_matches_version(self) -> None:
|
||||
assert self.source_dir
|
||||
assert self.source_dir, f"No source dir for {self}"
|
||||
version = self.metadata["version"]
|
||||
if self.req.specifier and version not in self.req.specifier:
|
||||
if self.req and self.req.specifier and version not in self.req.specifier:
|
||||
logger.warning(
|
||||
"Requested %s, but installing version %s",
|
||||
self,
|
||||
|
@ -633,6 +656,27 @@ class InstallRequirement:
|
|||
parallel_builds=parallel_builds,
|
||||
)
|
||||
|
||||
def needs_unpacked_archive(self, archive_source: Path) -> None:
|
||||
assert self._archive_source is None
|
||||
self._archive_source = archive_source
|
||||
|
||||
def ensure_pristine_source_checkout(self) -> None:
|
||||
"""Ensure the source directory has not yet been built in."""
|
||||
assert self.source_dir is not None
|
||||
if self._archive_source is not None:
|
||||
unpack_file(str(self._archive_source), self.source_dir)
|
||||
elif is_installable_dir(self.source_dir):
|
||||
# If a checkout exists, it's unwise to keep going.
|
||||
# version inconsistencies are logged later, but do not fail
|
||||
# the installation.
|
||||
raise PreviousBuildDirError(
|
||||
f"pip can't proceed with requirements '{self}' due to a "
|
||||
f"pre-existing build directory ({self.source_dir}). This is likely "
|
||||
"due to a previous installation that failed . pip is "
|
||||
"being responsible and not assuming it can delete this. "
|
||||
"Please delete it and try again."
|
||||
)
|
||||
|
||||
# For editable installations
|
||||
def update_editable(self) -> None:
|
||||
if not self.link:
|
||||
|
@ -689,9 +733,10 @@ class InstallRequirement:
|
|||
name = name.replace(os.path.sep, "/")
|
||||
return name
|
||||
|
||||
assert self.req is not None
|
||||
path = os.path.join(parentdir, path)
|
||||
name = _clean_zip_name(path, rootdir)
|
||||
return self.name + "/" + name
|
||||
return self.req.name + "/" + name
|
||||
|
||||
def archive(self, build_dir: Optional[str]) -> None:
|
||||
"""Saves archive to provided build_dir.
|
||||
|
@ -770,8 +815,9 @@ class InstallRequirement:
|
|||
use_user_site: bool = False,
|
||||
pycompile: bool = True,
|
||||
) -> None:
|
||||
assert self.req is not None
|
||||
scheme = get_scheme(
|
||||
self.name,
|
||||
self.req.name,
|
||||
user=use_user_site,
|
||||
home=home,
|
||||
root=root,
|
||||
|
@ -785,7 +831,7 @@ class InstallRequirement:
|
|||
prefix=prefix,
|
||||
home=home,
|
||||
use_user_site=use_user_site,
|
||||
name=self.name,
|
||||
name=self.req.name,
|
||||
setup_py_path=self.setup_py_path,
|
||||
isolated=self.isolated,
|
||||
build_env=self.build_env,
|
||||
|
@ -798,13 +844,13 @@ class InstallRequirement:
|
|||
assert self.local_file_path
|
||||
|
||||
install_wheel(
|
||||
self.name,
|
||||
self.req.name,
|
||||
self.local_file_path,
|
||||
scheme=scheme,
|
||||
req_description=str(self.req),
|
||||
pycompile=pycompile,
|
||||
warn_script_location=warn_script_location,
|
||||
direct_url=self.download_info if self.original_link else None,
|
||||
direct_url=self.download_info if self.is_direct else None,
|
||||
requested=self.user_supplied,
|
||||
)
|
||||
self.install_succeeded = True
|
||||
|
@ -858,7 +904,7 @@ def check_legacy_setup_py_options(
|
|||
reason="--build-option and --global-option are deprecated.",
|
||||
issue=11859,
|
||||
replacement="to use --config-settings",
|
||||
gone_in="23.3",
|
||||
gone_in="24.0",
|
||||
)
|
||||
logger.warning(
|
||||
"Implying --no-binary=:all: due to the presence of "
|
||||
|
|
|
@ -2,9 +2,12 @@ import logging
|
|||
from collections import OrderedDict
|
||||
from typing import Dict, List
|
||||
|
||||
from pip._vendor.packaging.specifiers import LegacySpecifier
|
||||
from pip._vendor.packaging.utils import canonicalize_name
|
||||
from pip._vendor.packaging.version import LegacyVersion
|
||||
|
||||
from pip._internal.req.req_install import InstallRequirement
|
||||
from pip._internal.utils.deprecation import deprecated
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
@ -80,3 +83,37 @@ class RequirementSet:
|
|||
for install_req in self.all_requirements
|
||||
if not install_req.constraint and not install_req.satisfied_by
|
||||
]
|
||||
|
||||
def warn_legacy_versions_and_specifiers(self) -> None:
|
||||
for req in self.requirements_to_install:
|
||||
version = req.get_dist().version
|
||||
if isinstance(version, LegacyVersion):
|
||||
deprecated(
|
||||
reason=(
|
||||
f"pip has selected the non standard version {version} "
|
||||
f"of {req}. In the future this version will be "
|
||||
f"ignored as it isn't standard compliant."
|
||||
),
|
||||
replacement=(
|
||||
"set or update constraints to select another version "
|
||||
"or contact the package author to fix the version number"
|
||||
),
|
||||
issue=12063,
|
||||
gone_in="23.3",
|
||||
)
|
||||
for dep in req.get_dist().iter_dependencies():
|
||||
if any(isinstance(spec, LegacySpecifier) for spec in dep.specifier):
|
||||
deprecated(
|
||||
reason=(
|
||||
f"pip has selected {req} {version} which has non "
|
||||
f"standard dependency specifier {dep}. "
|
||||
f"In the future this version of {req} will be "
|
||||
f"ignored as it isn't standard compliant."
|
||||
),
|
||||
replacement=(
|
||||
"set or update constraints to select another version "
|
||||
"or contact the package author to fix the version number"
|
||||
),
|
||||
issue=12063,
|
||||
gone_in="23.3",
|
||||
)
|
||||
|
|
|
@ -274,7 +274,7 @@ class StashedUninstallPathSet:
|
|||
|
||||
def commit(self) -> None:
|
||||
"""Commits the uninstall by removing stashed files."""
|
||||
for _, save_dir in self._save_dirs.items():
|
||||
for save_dir in self._save_dirs.values():
|
||||
save_dir.cleanup()
|
||||
self._moves = []
|
||||
self._save_dirs = {}
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
from typing import FrozenSet, Iterable, Optional, Tuple, Union
|
||||
|
||||
from pip._vendor.packaging.specifiers import SpecifierSet
|
||||
from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
|
||||
from pip._vendor.packaging.utils import NormalizedName
|
||||
from pip._vendor.packaging.version import LegacyVersion, Version
|
||||
|
||||
from pip._internal.models.link import Link, links_equivalent
|
||||
|
@ -12,11 +12,11 @@ CandidateLookup = Tuple[Optional["Candidate"], Optional[InstallRequirement]]
|
|||
CandidateVersion = Union[LegacyVersion, Version]
|
||||
|
||||
|
||||
def format_name(project: str, extras: FrozenSet[str]) -> str:
|
||||
def format_name(project: NormalizedName, extras: FrozenSet[NormalizedName]) -> str:
|
||||
if not extras:
|
||||
return project
|
||||
canonical_extras = sorted(canonicalize_name(e) for e in extras)
|
||||
return "{}[{}]".format(project, ",".join(canonical_extras))
|
||||
extras_expr = ",".join(sorted(extras))
|
||||
return f"{project}[{extras_expr}]"
|
||||
|
||||
|
||||
class Constraint:
|
||||
|
|
|
@ -341,6 +341,7 @@ class AlreadyInstalledCandidate(Candidate):
|
|||
self.dist = dist
|
||||
self._ireq = _make_install_req_from_dist(dist, template)
|
||||
self._factory = factory
|
||||
self._version = None
|
||||
|
||||
# This is just logging some messages, so we can do it eagerly.
|
||||
# The returned dist would be exactly the same as self.dist because we
|
||||
|
@ -376,7 +377,9 @@ class AlreadyInstalledCandidate(Candidate):
|
|||
|
||||
@property
|
||||
def version(self) -> CandidateVersion:
|
||||
return self.dist.version
|
||||
if self._version is None:
|
||||
self._version = self.dist.version
|
||||
return self._version
|
||||
|
||||
@property
|
||||
def is_editable(self) -> bool:
|
||||
|
@ -426,7 +429,15 @@ class ExtrasCandidate(Candidate):
|
|||
extras: FrozenSet[str],
|
||||
) -> None:
|
||||
self.base = base
|
||||
self.extras = extras
|
||||
self.extras = frozenset(canonicalize_name(e) for e in extras)
|
||||
# If any extras are requested in their non-normalized forms, keep track
|
||||
# of their raw values. This is needed when we look up dependencies
|
||||
# since PEP 685 has not been implemented for marker-matching, and using
|
||||
# the non-normalized extra for lookup ensures the user can select a
|
||||
# non-normalized extra in a package with its non-normalized form.
|
||||
# TODO: Remove this attribute when packaging is upgraded to support the
|
||||
# marker comparison logic specified in PEP 685.
|
||||
self._unnormalized_extras = extras.difference(self.extras)
|
||||
|
||||
def __str__(self) -> str:
|
||||
name, rest = str(self.base).split(" ", 1)
|
||||
|
@ -477,6 +488,50 @@ class ExtrasCandidate(Candidate):
|
|||
def source_link(self) -> Optional[Link]:
|
||||
return self.base.source_link
|
||||
|
||||
def _warn_invalid_extras(
|
||||
self,
|
||||
requested: FrozenSet[str],
|
||||
valid: FrozenSet[str],
|
||||
) -> None:
|
||||
"""Emit warnings for invalid extras being requested.
|
||||
|
||||
This emits a warning for each requested extra that is not in the
|
||||
candidate's ``Provides-Extra`` list.
|
||||
"""
|
||||
invalid_extras_to_warn = frozenset(
|
||||
extra
|
||||
for extra in requested
|
||||
if extra not in valid
|
||||
# If an extra is requested in an unnormalized form, skip warning
|
||||
# about the normalized form being missing.
|
||||
and extra in self.extras
|
||||
)
|
||||
if not invalid_extras_to_warn:
|
||||
return
|
||||
for extra in sorted(invalid_extras_to_warn):
|
||||
logger.warning(
|
||||
"%s %s does not provide the extra '%s'",
|
||||
self.base.name,
|
||||
self.version,
|
||||
extra,
|
||||
)
|
||||
|
||||
def _calculate_valid_requested_extras(self) -> FrozenSet[str]:
|
||||
"""Get a list of valid extras requested by this candidate.
|
||||
|
||||
The user (or upstream dependant) may have specified extras that the
|
||||
candidate doesn't support. Any unsupported extras are dropped, and each
|
||||
cause a warning to be logged here.
|
||||
"""
|
||||
requested_extras = self.extras.union(self._unnormalized_extras)
|
||||
valid_extras = frozenset(
|
||||
extra
|
||||
for extra in requested_extras
|
||||
if self.base.dist.is_extra_provided(extra)
|
||||
)
|
||||
self._warn_invalid_extras(requested_extras, valid_extras)
|
||||
return valid_extras
|
||||
|
||||
def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]:
|
||||
factory = self.base._factory
|
||||
|
||||
|
@ -486,18 +541,7 @@ class ExtrasCandidate(Candidate):
|
|||
if not with_requires:
|
||||
return
|
||||
|
||||
# The user may have specified extras that the candidate doesn't
|
||||
# support. We ignore any unsupported extras here.
|
||||
valid_extras = self.extras.intersection(self.base.dist.iter_provided_extras())
|
||||
invalid_extras = self.extras.difference(self.base.dist.iter_provided_extras())
|
||||
for extra in sorted(invalid_extras):
|
||||
logger.warning(
|
||||
"%s %s does not provide the extra '%s'",
|
||||
self.base.name,
|
||||
self.version,
|
||||
extra,
|
||||
)
|
||||
|
||||
valid_extras = self._calculate_valid_requested_extras()
|
||||
for r in self.base.dist.iter_dependencies(valid_extras):
|
||||
requirement = factory.make_requirement_from_spec(
|
||||
str(r), self.base._ireq, valid_extras
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue