1
1
Fork 0
mirror of https://github.com/pypa/pip synced 2023-12-13 21:30:23 +01:00

Merge branch 'master' into add-3.8-travis-ci

This commit is contained in:
Hugo van Kemenade 2019-11-26 08:13:47 +02:00 committed by GitHub
commit 229981192d
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
178 changed files with 5641 additions and 3024 deletions

View file

@ -3,8 +3,6 @@ environment:
# Unit and integration tests.
- PYTHON: "C:\\Python27-x64"
RUN_INTEGRATION_TESTS: "True"
- PYTHON: "C:\\Python35-x64"
RUN_INTEGRATION_TESTS: "True"
- PYTHON: "C:\\Python36-x64"
RUN_INTEGRATION_TESTS: "True"
# Unit tests only.
@ -64,8 +62,8 @@ test_script:
subst T: $env:TEMP
$env:TEMP = "T:\"
$env:TMP = "T:\"
tox -e py -- -m unit
tox -e py -- -m unit -n auto
if ($LastExitCode -eq 0 -and $env:RUN_INTEGRATION_TESTS -eq "True") {
tox -e py -- --use-venv -m integration -n2 --durations=20
tox -e py -- --use-venv -m integration -n auto --durations=20
}
}

View file

@ -15,20 +15,19 @@ jobs:
inputs:
versionSpec: '3'
- bash: pip install twine nox setuptools wheel
- bash: |
git config --global user.email "pypa-dev@googlegroups.com"
git config --global user.name "pip"
displayName: Setup Git credentials
- bash: pip install nox
displayName: Install dependencies
- bash: nox -s generate_authors
displayName: Generate AUTHORS.txt
- bash: nox -s prepare-release -- 99.9
displayName: Prepare dummy release
- bash: nox -s generate_news -- --yes
displayName: Generate NEWS.rst
- bash: python setup.py sdist bdist_wheel
displayName: Create sdist and wheel
- bash: twine check dist/*
displayName: Check distributions with twine
- bash: nox -s build-release -- 99.9
displayName: Generate distributions for the dummy release
- task: PublishBuildArtifacts@1
displayName: 'Publish Artifact: dist'

View file

@ -12,10 +12,13 @@ jobs:
Python27-x86:
python.version: '2.7'
python.architecture: x86
Python35-x64:
python.version: '3.5'
python.architecture: x64
Python37-x64:
python.version: '3.7'
python.architecture: x64
maxParallel: 2
maxParallel: 3
steps:
- template: ../steps/run-tests-windows.yml

View file

@ -0,0 +1,74 @@
[CmdletBinding()]
param(
[Parameter(Mandatory=$true,
HelpMessage="Drive letter to use for the RAMDisk")]
[String]$drive,
[Parameter(HelpMessage="Size to allocate to the RAMDisk")]
[UInt64]$size=1GB
)
$ErrorActionPreference = "Stop"
Set-StrictMode -Version Latest
Write-Output "Installing FS-iSCSITarget-Server"
Install-WindowsFeature -Name FS-iSCSITarget-Server
Write-Output "Starting MSiSCSI"
Start-Service MSiSCSI
$retry = 10
do {
$service = Get-Service MSiSCSI
if ($service.Status -eq "Running") {
break;
}
$retry--
Start-Sleep -Milliseconds 500
} until ($retry -eq 0)
$service = Get-Service MSiSCSI
if ($service.Status -ne "Running") {
throw "MSiSCSI is not running"
}
Write-Output "Configuring Firewall"
Get-NetFirewallServiceFilter -Service MSiSCSI | Enable-NetFirewallRule
Write-Output "Configuring RAMDisk"
# Must use external-facing IP address, otherwise New-IscsiTargetPortal is
# unable to connect.
$ip = (
Get-NetIPAddress -AddressFamily IPv4 |
Where-Object {$_.IPAddress -ne "127.0.0.1"}
)[0].IPAddress
if (
-not (Get-IscsiServerTarget -ComputerName localhost | Where-Object {$_.TargetName -eq "ramdisks"})
) {
New-IscsiServerTarget `
-ComputerName localhost `
-TargetName ramdisks `
-InitiatorId IPAddress:$ip
}
$newVirtualDisk = New-IscsiVirtualDisk `
-ComputerName localhost `
-Path ramdisk:local$drive.vhdx `
-Size $size
Add-IscsiVirtualDiskTargetMapping `
-ComputerName localhost `
-TargetName ramdisks `
-Path ramdisk:local$drive.vhdx
Write-Output "Connecting to iSCSI"
New-IscsiTargetPortal -TargetPortalAddress $ip
Get-IscsiTarget | Where-Object {!$_.IsConnected} | Connect-IscsiTarget
Write-Output "Configuring disk"
$newDisk = Get-IscsiConnection |
Get-Disk |
Where-Object {$_.SerialNumber -eq $newVirtualDisk.SerialNumber}
Set-Disk -InputObject $newDisk -IsOffline $false
Initialize-Disk -InputObject $newDisk -PartitionStyle MBR
New-Partition -InputObject $newDisk -UseMaximumSize -DriveLetter $drive
Format-Volume -DriveLetter $drive -NewFileSystemLabel Temp -FileSystem NTFS

View file

@ -8,10 +8,28 @@ steps:
versionSpec: '$(python.version)'
architecture: '$(python.architecture)'
- task: PowerShell@2
inputs:
filePath: .azure-pipelines/scripts/New-RAMDisk.ps1
arguments: "-Drive R -Size 1GB"
displayName: Setup RAMDisk
- powershell: |
mkdir R:\Temp
$acl = Get-Acl "R:\Temp"
$rule = New-Object System.Security.AccessControl.FileSystemAccessRule(
"Everyone", "FullControl", "ContainerInherit,ObjectInherit", "None", "Allow"
)
$acl.AddAccessRule($rule)
Set-Acl "R:\Temp" $acl
displayName: Set RAMDisk Permissions
- bash: pip install --upgrade setuptools tox
displayName: Install Tox
- script: tox -e py -- -m unit -n 3 --junit-xml=junit/unit-test.xml
- script: tox -e py -- -m unit -n auto --junit-xml=junit/unit-test.xml
env:
TEMP: "R:\\Temp"
displayName: Tox run unit tests
- ${{ if eq(parameters.runIntegrationTests, 'true') }}:
@ -23,11 +41,9 @@ steps:
# Shorten paths to get under MAX_PATH or else integration tests will fail
# https://bugs.python.org/issue18199
subst T: $env:TEMP
$env:TEMP = "T:\"
$env:TMP = "T:\"
$env:TEMP = "R:\Temp"
tox -e py -- -m integration -n 3 --duration=5 --junit-xml=junit/integration-test.xml
tox -e py -- -m integration -n auto --duration=5 --junit-xml=junit/integration-test.xml
displayName: Tox run integration tests
- task: PublishTestResults@2

View file

@ -7,14 +7,14 @@ steps:
- bash: pip install --upgrade setuptools tox
displayName: Install Tox
- script: tox -e py -- -m unit --junit-xml=junit/unit-test.xml
- script: tox -e py -- -m unit -n auto --junit-xml=junit/unit-test.xml
displayName: Tox run unit tests
# Run integration tests in two groups so we will fail faster if there is a failure in the first group
- script: tox -e py -- -m integration -n 4 --duration=5 -k "not test_install" --junit-xml=junit/integration-test-group0.xml
- script: tox -e py -- -m integration -n auto --duration=5 -k "not test_install" --junit-xml=junit/integration-test-group0.xml
displayName: Tox run Group 0 integration tests
- script: tox -e py -- -m integration -n 4 --duration=5 -k "test_install" --junit-xml=junit/integration-test-group1.xml
- script: tox -e py -- -m integration -n auto --duration=5 -k "test_install" --junit-xml=junit/integration-test-group1.xml
displayName: Tox run Group 1 integration tests
- task: PublishTestResults@2

View file

@ -1,5 +1,5 @@
# Number of days of inactivity before issue is closed for lack of response
daysUntilClose: 30
daysUntilClose: 15
# Label requiring a response
responseRequiredLabel: "S: awaiting response"
# Comment to post when closing an Issue for lack of response. Set to `false` to disable

View file

@ -26,6 +26,12 @@ jobs:
uses: actions/setup-python@v1
with:
python-version: ${{ matrix.env.PYTHON_VERSION || 3.8 }}
- name: set PY
run: echo "::set-env name=PY::$(python -VV | sha256sum | cut -d' ' -f1)"
- uses: actions/cache@v1
with:
path: ~/.cache/pre-commit
key: pre-commit|${{ env.PY }}|${{ hashFiles('.pre-commit-config.yaml') }}
- name: Pre-configure global Git settings
run: >-
tools/travis/setup.sh

View file

@ -32,7 +32,7 @@ repos:
args: []
- id: mypy
name: mypy, for Py2
exclude: noxfile.py|docs|tests
exclude: noxfile.py|tools/automation/release|docs|tests
args: ["-2"]
- repo: https://github.com/pre-commit/pygrep-hooks

View file

@ -30,13 +30,13 @@ jobs:
# PyPy
- stage: secondary
env: GROUP=1
python: pypy3.5-6.0
python: pypy3.5-7.0.0
- env: GROUP=2
python: pypy3.5-6.0
python: pypy3.5-7.0.0
- env: GROUP=1
python: pypy2.7-6.0
python: pypy2.7-7.1.1
- env: GROUP=2
python: pypy2.7-6.0
python: pypy2.7-7.1.1
# Other Supported CPython
- env: GROUP=1
python: 3.7

View file

@ -7,6 +7,20 @@
.. towncrier release notes start
19.3.1 (2019-10-17)
===================
Features
--------
- Document Python 3.8 support. (`#7219 <https://github.com/pypa/pip/issues/7219>`_)
Bug Fixes
---------
- Fix bug that prevented installation of PEP 517 packages without ``setup.py``. (`#6606 <https://github.com/pypa/pip/issues/6606>`_)
19.3 (2019-10-14)
=================

View file

@ -1,13 +1,13 @@
Finding and choosing files (``index.py`` and ``PackageFinder``)
Finding and choosing files (``index`` and ``PackageFinder``)
---------------------------------------------------------------
The ``index.py`` module is a top-level module in pip responsible for deciding
The ``pip._internal.index`` sub-package in pip is responsible for deciding
what file to download and from where, given a requirement for a project. The
module's functionality is largely exposed through and coordinated by the
module's ``PackageFinder`` class.
package's functionality is largely exposed through and coordinated by the
package's ``PackageFinder`` class.
.. _index-py-overview:
.. _index-overview:
Overview
********
@ -15,17 +15,17 @@ Overview
Here is a rough description of the process that pip uses to choose what
file to download for a package, given a requirement:
1. Access the various network and file system locations configured for pip
that contain package files. These locations can include, for example,
pip's :ref:`--index-url <--index-url>` (with default
https://pypi.org/simple/ ) and any configured
:ref:`--extra-index-url <--extra-index-url>` locations.
Each of these locations is a `PEP 503`_ "simple repository" page, which
is an HTML page of anchor links.
2. Collect together all of the links (e.g. by parsing the anchor links
from the HTML pages) and create ``Link`` objects from each of these.
The :ref:`LinkCollector <link-collector-class>` class is responsible
for both this step and the previous.
1. Collect together the various network and file system locations containing
project package files. These locations are derived, for example, from pip's
:ref:`--index-url <install_--index-url>` (with default
https://pypi.org/simple/ ) setting and any configured
:ref:`--extra-index-url <install_--extra-index-url>` locations. Each of the
project page URL's is an HTML page of anchor links, as defined in
`PEP 503`_, the "Simple Repository API."
2. For each project page URL, fetch the HTML and parse out the anchor links,
creating a ``Link`` object from each one. The :ref:`LinkCollector
<link-collector-class>` class is responsible for both the previous step
and fetching the HTML over the network.
3. Determine which of the links are minimally relevant, using the
:ref:`LinkEvaluator <link-evaluator-class>` class. Create an
``InstallationCandidate`` object (aka candidate for install) for each
@ -38,7 +38,7 @@ file to download for a package, given a requirement:
<candidate-evaluator-class>` class).
The remainder of this section is organized by documenting some of the
classes inside ``index.py``, in the following order:
classes inside the ``index`` package, in the following order:
* the main :ref:`PackageFinder <package-finder-class>` class,
* the :ref:`LinkCollector <link-collector-class>` class,
@ -54,7 +54,7 @@ The ``PackageFinder`` class
***************************
The ``PackageFinder`` class is the primary way through which code in pip
interacts with ``index.py``. It is an umbrella class that encapsulates and
interacts with ``index`` package. It is an umbrella class that encapsulates and
groups together various package-finding functionality.
The ``PackageFinder`` class is responsible for searching the network and file
@ -89,7 +89,7 @@ case, the ``PackageFinder`` instance is created by the
``self_outdated_check.py`` module's ``pip_self_version_check()`` function.
The ``PackageFinder`` class is responsible for doing all of the things listed
in the :ref:`Overview <index-py-overview>` section like fetching and parsing
in the :ref:`Overview <index-overview>` section like fetching and parsing
`PEP 503`_ simple repository HTML pages, evaluating which links in the simple
repository pages are relevant for each requirement, and further filtering and
sorting by preference the candidates for install coming from the relevant
@ -105,12 +105,18 @@ One of ``PackageFinder``'s main top-level methods is
:ref:`LinkEvaluator <link-evaluator-class>` object to filter out some of
those links, and then returns a list of ``InstallationCandidates`` (aka
candidates for install). This corresponds to steps 1-3 of the
:ref:`Overview <index-py-overview>` above.
:ref:`Overview <index-overview>` above.
2. Constructs a ``CandidateEvaluator`` object and uses that to determine
the best candidate. It does this by calling the ``CandidateEvaluator``
class's ``compute_best_candidate()`` method on the return value of
``find_all_candidates()``. This corresponds to steps 4-5 of the Overview.
``PackageFinder`` also has a ``process_project_url()`` method (called by
``find_best_candidate()``) to process a `PEP 503`_ "simple repository"
project page. This method fetches and parses the HTML from a PEP 503 project
page URL, extracts the anchor elements and creates ``Link`` objects from
them, and then evaluates those links.
.. _link-collector-class:
@ -119,22 +125,22 @@ The ``LinkCollector`` class
The :ref:`LinkCollector <link-collector-class>` class is the class
responsible for collecting the raw list of "links" to package files
(represented as ``Link`` objects). An instance of the class accesses the
various `PEP 503`_ HTML "simple repository" pages, parses their HTML,
extracts the links from the anchor elements, and creates ``Link`` objects
from that information. The ``LinkCollector`` class is "unintelligent" in that
it doesn't do any evaluation of whether the links are relevant to the
original requirement; it just collects them.
(represented as ``Link`` objects) from file system locations, as well as the
`PEP 503`_ project page URL's that ``PackageFinder`` should access.
The ``LinkCollector`` class takes into account the user's :ref:`--find-links
<--find-links>`, :ref:`--extra-index-url <--extra-index-url>`, and related
options when deciding which locations to collect links from. The class's main
method is the ``collect_links()`` method. The :ref:`PackageFinder
<install_--find-links>`, :ref:`--extra-index-url <install_--extra-index-url>`,
and related options when deciding which locations to collect links from. The
class's main method is the ``collect_links()`` method. The :ref:`PackageFinder
<package-finder-class>` class invokes this method as the first step of its
``find_all_candidates()`` method.
The ``LinkCollector`` class is the only class in the ``index.py`` module that
makes network requests and is the only class in the module that depends
``LinkCollector`` also has a ``fetch_page()`` method to fetch the HTML from a
project page URL. This method is "unintelligent" in that it doesn't parse the
HTML.
The ``LinkCollector`` class is the only class in the ``index`` sub-package that
makes network requests and is the only class in the sub-package that depends
directly on ``PipSession``, which stores pip's configuration options and
state for making requests.
@ -182,12 +188,11 @@ user, and other user preferences, etc.
Specifically, the class has a ``get_applicable_candidates()`` method.
This accepts the ``InstallationCandidate`` objects resulting from the links
accepted by the ``LinkEvaluator`` class's ``evaluate_link()`` method, and
it further filters them to a list of "applicable" candidates.
accepted by the ``LinkEvaluator`` class's ``evaluate_link()`` method, filters
them to a list of "applicable" candidates and orders them by preference.
The ``CandidateEvaluator`` class also has a ``sort_best_candidate()`` method
that orders the applicable candidates by preference, and then returns the
best (i.e. most preferred).
that returns the best (i.e. most preferred) candidate.
Finally, the class has a ``compute_best_candidate()`` method that calls
``get_applicable_candidates()`` followed by ``sort_best_candidate()``, and

View file

@ -0,0 +1,179 @@
.. note::
This section of the documentation is currently being written. pip
developers welcome your help to complete this documentation. If you're
interested in helping out, please let us know in the `tracking issue`_.
.. _`tracking issue`: https://github.com/pypa/pip/issues/7279
**********************
Continuous Integration
**********************
Supported interpreters
======================
pip support a variety of Python interpreters:
- CPython 2.7
- CPython 3.5
- CPython 3.6
- CPython 3.7
- CPython 3.8
- Latest PyPy
- Latest PyPy3
on different operating systems:
- Linux
- Windows
- MacOS
and on different architectures:
- x64
- x86
so 42 hypothetical interpreters.
Checks
======
``pip`` CI runs different kind of tests:
- lint (defined in ``.pre-commit-config.yaml``)
- docs
- vendoring (is the ``src/_internal/_vendor`` directory cleanly vendored)
- unit tests (present in ``tests/unit``)
- "integration" tests (mostly present in ``tests/functional``)
- package (test the packaging steps)
Since lint, docs, vendoring and package tests only need to run on a pip
developer/contributor machine, they only need to be tested on the x64 variant
of the 3 different operating systems, and when an interpreter needs to be
specified it's ok to require the latest CPython interpreter.
So only unit tests and integration tests would need to be run with the different
interpreters.
Services
========
pip test suite and checks are distributed on four different platforms that
provides free executors for open source packages:
- `Travis CI`_ (Used for Linux)
- `Appveyor CI`_ (Windows only)
- `Azure DevOps CI`_ (Linux, MacOS & Windows tests)
- `GitHub Actions`_ (Linux, MacOS & Windows tests)
.. _`Travis CI`: https://travis-ci.org/
.. _`Appveyor CI`: https://www.appveyor.com/
.. _`Azure DevOps CI`: https://dev.azure.com/
.. _`GitHub Actions`: https://github.com/features/actions
Current run tests
=================
Developer tasks
---------------
======== =============== ================ =========== ============
OS docs lint vendoring packages
======== =============== ================ =========== ============
Linux Travis, Github Travis, Github Travis Azure
Windows Azure
MacOS Azure
======== =============== ================ =========== ============
Actual testing
--------------
+------------------------------+---------------+-----------------+
| **interpreter** | **unit** | **integration** |
+-----------+----------+-------+---------------+-----------------+
| | | CP2.7 | Azure | Azure |
| | +-------+---------------+-----------------+
| | | CP3.5 | Azure | |
| | +-------+---------------+-----------------+
| | | CP3.6 | Azure | |
| | +-------+---------------+-----------------+
| | x86 | CP3.7 | Azure | |
| | +-------+---------------+-----------------+
| | | CP3.8 | | |
| | +-------+---------------+-----------------+
| | | PyPy | | |
| | +-------+---------------+-----------------+
| | | PyPy3 | | |
| Windows +----------+-------+---------------+-----------------+
| | | CP2.7 | Appveyor | Appveyor |
| | +-------+---------------+-----------------+
| | | CP3.5 | Azure | Azure |
| | +-------+---------------+-----------------+
| | | CP3.6 | Appveyor | Appveyor |
| | +-------+---------------+-----------------+
| | x64 | CP3.7 | Azure | Azure |
| | +-------+---------------+-----------------+
| | | CP3.8 | | |
| | +-------+---------------+-----------------+
| | | PyPy | | |
| | +-------+---------------+-----------------+
| | | PyPy3 | | |
+-----------+----------+-------+---------------+-----------------+
| | | CP2.7 | | |
| | +-------+---------------+-----------------+
| | | CP3.5 | | |
| | +-------+---------------+-----------------+
| | | CP3.6 | | |
| | +-------+---------------+-----------------+
| | x86 | CP3.7 | | |
| | +-------+---------------+-----------------+
| | | CP3.8 | | |
| | +-------+---------------+-----------------+
| | | PyPy | | |
| | +-------+---------------+-----------------+
| | | PyPy3 | | |
| Linux +----------+-------+---------------+-----------------+
| | | CP2.7 | Travis,Azure | Travis,Azure |
| | +-------+---------------+-----------------+
| | | CP3.5 | Travis,Azure | Travis,Azure |
| | +-------+---------------+-----------------+
| | | CP3.6 | Travis,Azure | Travis,Azure |
| | +-------+---------------+-----------------+
| | x64 | CP3.7 | Travis,Azure | Travis,Azure |
| | +-------+---------------+-----------------+
| | | CP3.8 | Travis | Travis |
| | +-------+---------------+-----------------+
| | | PyPy | Travis | Travis |
| | +-------+---------------+-----------------+
| | | PyPy3 | Travis | Travis |
+-----------+----------+-------+---------------+-----------------+
| | | CP2.7 | | |
| | +-------+---------------+-----------------+
| | | CP3.5 | | |
| | +-------+---------------+-----------------+
| | | CP3.6 | | |
| | +-------+---------------+-----------------+
| | x86 | CP3.7 | | |
| | +-------+---------------+-----------------+
| | | CP3.8 | | |
| | +-------+---------------+-----------------+
| | | PyPy | | |
| | +-------+---------------+-----------------+
| | | PyPy3 | | |
| MacOS +----------+-------+---------------+-----------------+
| | | CP2.7 | Azure | Azure |
| | +-------+---------------+-----------------+
| | | CP3.5 | Azure | Azure |
| | +-------+---------------+-----------------+
| | | CP3.6 | Azure | Azure |
| | +-------+---------------+-----------------+
| | x64 | CP3.7 | Azure | Azure |
| | +-------+---------------+-----------------+
| | | CP3.8 | | |
| | +-------+---------------+-----------------+
| | | PyPy | | |
| | +-------+---------------+-----------------+
| | | PyPy3 | | |
+-----------+----------+-------+---------------+-----------------+

View file

@ -247,7 +247,7 @@ and they will initiate a vote among the existing maintainers.
- CI Administration capabilities
- ReadTheDocs Administration capabilities
.. _`Studies have shown`: https://smartbear.com/smartbear/media/pdfs/wp-cc-11-best-practices-of-peer-code-review.pdf
.. _`Studies have shown`: https://www.kessler.de/prd/smartbear/BestPracticesForPeerCodeReview.pdf
.. _`resolve merge conflicts`: https://help.github.com/articles/resolving-a-merge-conflict-using-the-command-line/
.. _`Travis CI`: https://travis-ci.org/
.. _`Appveyor CI`: https://www.appveyor.com/

View file

@ -8,6 +8,18 @@ This document is meant to get you setup to work on pip and to act as a guide and
reference to the the development setup. If you face any issues during this
process, please `open an issue`_ about it on the issue tracker.
Get the source code
-------------------
To work on pip, you first need to get the source code of pip. The source code is
available on `GitHub`_.
.. code-block:: console
$ git clone https://github.com/pypa/pip
$ cd pip
Development Environment
-----------------------
@ -40,6 +52,13 @@ To run tests locally, run:
$ tox -e py36
Generally, it can take a long time to run pip's test suite. To run tests in parallel,
which is faster, run:
.. code-block:: console
$ tox -e py36 -- -n auto
The example above runs tests against Python 3.6. You can also use other
versions like ``py27`` and ``pypy3``.
@ -96,3 +115,4 @@ The built documentation can be found in the ``docs/build`` folder.
.. _`install Python`: https://realpython.com/installing-python/
.. _`PEP 484 type-comments`: https://www.python.org/dev/peps/pep-0484/#suggested-syntax-for-python-2-7-and-straddling-code
.. _`rich CLI`: https://docs.pytest.org/en/latest/usage.html#specifying-tests-selecting-tests
.. _`GitHub`: https://github.com/pypa/pip

View file

@ -14,6 +14,7 @@ or the `pypa-dev mailing list`_, to ask questions or get involved.
getting-started
contributing
ci
issue-triage
architecture/index
release-process

View file

@ -80,20 +80,13 @@ Creating a new release
----------------------
#. Checkout the current pip ``master`` branch.
#. Ensure you have the latest ``wheel``, ``setuptools``, ``twine`` and ``nox`` packages installed.
#. Generate a new ``AUTHORS.txt`` (``nox -s generate_authors``) and commit the
results.
#. Bump the version in ``pip/__init__.py`` to the release version and commit
the results. Usually this involves dropping just the ``.devN`` suffix on the
version.
#. Generate a new ``NEWS.rst`` (``nox -s generate_news``) and commit the
results.
#. Create a tag at the current commit, of the form ``YY.N``
(``git tag YY.N``).
#. Checkout the tag (``git checkout YY.N``).
#. Create the distribution files (``python setup.py sdist bdist_wheel``).
#. Upload the distribution files to PyPI using twine
(``twine upload dist/*``).
#. Ensure you have the latest ``nox`` installed.
#. Prepare for release using ``nox -s prepare-release -- YY.N``.
This will update the relevant files and tag the correct commit.
#. Build the release artifacts using ``nox -s build-release -- YY.N``.
This will checkout the tag, generate the distribution files to be
uploaded and checkout the master branch again.
#. Upload the release to PyPI using ``nox -s upload-release -- YY.N``.
#. Push all of the changes including the tag.
#. Regenerate the ``get-pip.py`` script in the `get-pip repository`_ (as
documented there) and commit the results.
@ -104,7 +97,20 @@ Creating a new release
.. note::
Steps 3 to 6 are automated in ``nox -s release -- YY.N`` command.
If the release dropped the support of an obsolete Python version ``M.m``,
a new ``M.m/get-pip.py`` needs to be published: update the ``all`` task from
``tasks/generate.py`` in `get-pip repository`_ and make a pull request to
`psf-salt repository`_ to add the new ``get-pip.py`` (and its directory) to
``salt/pypa/bootstrap/init.sls``.
.. note::
If the ``get-pip.py`` script needs to be updated due to changes in pip internals
and if the last ``M.m/get-pip.py`` published still uses the default template, make
sure to first duplicate ``templates/default.py`` as ``templates/pre-YY.N.py``
before updating it and specify in ``tasks/generate.py`` that ``M.m/get-pip.py``
now needs to use ``templates/pre-YY.N.py``.
Creating a bug-fix release
@ -117,12 +123,16 @@ order to create one of these the changes should already be merged into the
#. Create a new ``release/YY.N.Z+1`` branch off of the ``YY.N`` tag using the
command ``git checkout -b release/YY.N.Z+1 YY.N``.
#. Cherry pick the fixed commits off of the ``master`` branch, fixing any
conflicts and moving any changelog entries from the development version's
changelog section to the ``YY.N.Z+1`` section.
conflicts.
#. Run ``nox -s prepare-release -- YY.N.Z+1``.
#. Merge master into your release branch and drop the news files that have been
included in your release (otherwise they would also appear in the ``YY.N+1``
changelog)
#. Push the ``release/YY.N.Z+1`` branch to github and submit a PR for it against
the ``master`` branch and wait for the tests to run.
#. Once tests run, merge the ``release/YY.N.Z+1`` branch into master, and follow
the above release process starting with step 4.
.. _`get-pip repository`: https://github.com/pypa/get-pip
.. _`psf-salt repository`: https://github.com/python/psf-salt
.. _`CPython`: https://github.com/pypa/cpython

View file

@ -18,12 +18,14 @@ Just make sure to :ref:`upgrade pip <Upgrading pip>`.
Installing with get-pip.py
--------------------------
To install pip, securely download `get-pip.py
<https://bootstrap.pypa.io/get-pip.py>`_. [1]_::
To install pip, securely [1]_ download ``get-pip.py`` by following
this link: `get-pip.py
<https://bootstrap.pypa.io/get-pip.py>`_. Alternatively, use ``curl``::
curl https://bootstrap.pypa.io/get-pip.py -o get-pip.py
Then run the following::
Then run the following command in the folder where you
have downloaded ``get-pip.py``::
python get-pip.py
@ -109,7 +111,7 @@ On Windows [4]_::
Python and OS Compatibility
---------------------------
pip works with CPython versions 2.7, 3.5, 3.6, 3.7 and also PyPy.
pip works with CPython versions 2.7, 3.5, 3.6, 3.7, 3.8 and also PyPy.
This means pip works on the latest patch version of each of these minor
versions. Previous patch versions are supported on a best effort approach.

View file

@ -47,7 +47,7 @@ Options
.. pip-command-options:: download
.. pip-index-options::
.. pip-index-options:: download
Examples

View file

@ -148,19 +148,26 @@ and the newline following it is effectively ignored.
Comments are stripped *before* line continuations are processed.
To interpret the requirements file in UTF-8 format add a comment
``# -*- coding: utf-8 -*-`` to the first or second line of the file.
The following options are supported:
* :ref:`-i, --index-url <--index-url>`
* :ref:`--extra-index-url <--extra-index-url>`
* :ref:`--no-index <--no-index>`
* :ref:`-f, --find-links <--find-links>`
* :ref:`-i, --index-url <install_--index-url>`
* :ref:`--extra-index-url <install_--extra-index-url>`
* :ref:`--no-index <install_--no-index>`
* :ref:`-c, --constraint <install_--constraint>`
* :ref:`-r, --requirement <install_--requirement>`
* :ref:`-e, --editable <install_--editable>`
* :ref:`-f, --find-links <install_--find-links>`
* :ref:`--no-binary <install_--no-binary>`
* :ref:`--only-binary <install_--only-binary>`
* :ref:`--require-hashes <--require-hashes>`
* :ref:`--require-hashes <install_--require-hashes>`
* :ref:`--pre <install_--pre>`
* :ref:`--trusted-host <--trusted-host>`
For example, to specify :ref:`--no-index <--no-index>` and two
:ref:`--find-links <--find-links>` locations:
For example, to specify :ref:`--no-index <install_--no-index>` and two
:ref:`--find-links <install_--find-links>` locations:
::
@ -244,7 +251,8 @@ pip supports installing from a package index using a :term:`requirement
specifier <pypug:Requirement Specifier>`. Generally speaking, a requirement
specifier is composed of a project name followed by optional :term:`version
specifiers <pypug:Version Specifier>`. :pep:`508` contains a full specification
of the format of a requirement.
of the format of a requirement. Since version 18.1 pip supports the
``url_req``-form specification.
Some examples:
@ -384,7 +392,7 @@ So if your repository layout is:
- some_file
- some_other_file
You'll need to use ``pip install -e vcs+protocol://repo_url/#egg=pkg&subdirectory=pkg_dir``.
You'll need to use ``pip install -e "vcs+protocol://repo_url/#egg=pkg&subdirectory=pkg_dir"``.
Git
@ -570,6 +578,9 @@ and use any packages found there. This is disabled via the same
of that is not part of the pip API. As of 7.0, pip makes a subdirectory for
each sdist that wheels are built from and places the resulting wheels inside.
As of version 20.0, pip also caches wheels when building from an immutable Git
reference (i.e. a commit hash).
Pip attempts to choose the best wheels from those built in preference to
building a new wheel. Note that this means when a package has both optional
C extensions and builds ``py`` tagged wheels when the C extension can't be built
@ -818,7 +829,7 @@ Options
.. pip-command-options:: install
.. pip-index-options::
.. pip-index-options:: install
.. _`pip install Examples`:

View file

@ -20,7 +20,7 @@ Options
.. pip-command-options:: list
.. pip-index-options::
.. pip-index-options:: list
Examples

View file

@ -59,7 +59,7 @@ Options
.. pip-command-options:: wheel
.. pip-index-options::
.. pip-index-options:: wheel
Examples

View file

@ -49,6 +49,35 @@ For more information and examples, see the :ref:`pip install` reference.
.. _PyPI: https://pypi.org/
Basic Authentication Credentials
********************************
pip supports basic authentication credentials. Basically, in the url there is
a username and password separated by ``:``.
``https://[username[:password]@]pypi.company.com/simple``
Certain special characters are not valid in the authentication part of URLs.
If the user or password part of your login credentials contain any of the
special characters
`here <https://en.wikipedia.org/wiki/Percent-encoding#Percent-encoding_reserved_characters>`_
then they must be percent-encoded. For example, for a
user with username "user" and password "he//o" accessing a repository at
pypi.company.com, the index URL with credentials would look like:
``https://user:he%2F%2Fo@pypi.company.com``
Support for percent-encoded authentication in index URLs was added in pip 10.0.0
(in `#3236 <https://github.com/pypa/pip/issues/3236>`_). Users that must use authentication
for their Python repository on systems with older pip versions should make the latest
get-pip.py available in their environment to bootstrap pip to a recent-enough version.
For indexes that only require single-part authentication tokens, provide the token
as the "username" and do not provide a password, for example -
``https://0123456789abcdef@pypi.company.com``
Using a Proxy Server
********************
@ -394,8 +423,8 @@ set like this:
ignore-installed = true
no-dependencies = yes
To enable the boolean options ``--no-compile`` and ``--no-cache-dir``, falsy
values have to be used:
To enable the boolean options ``--no-compile``, ``--no-warn-script-location``
and ``--no-cache-dir``, falsy values have to be used:
.. code-block:: ini
@ -404,6 +433,7 @@ values have to be used:
[install]
no-compile = no
no-warn-script-location = false
Appending options like ``--find-links`` can be written on multiple lines:
@ -514,7 +544,7 @@ $ pip wheel --wheel-dir DIR -r requirements.txt
Then, to install from local only, you'll be using :ref:`--find-links
<--find-links>` and :ref:`--no-index <--no-index>` like so::
<install_--find-links>` and :ref:`--no-index <install_--no-index>` like so::
$ pip install --no-index --find-links=DIR -r requirements.txt

View file

@ -86,9 +86,13 @@ class PipGeneralOptions(PipOptions):
class PipIndexOptions(PipOptions):
required_arguments = 1
def process_options(self):
cmd_name = self.arguments[0]
self._format_options(
[o() for o in cmdoptions.index_group['options']]
[o() for o in cmdoptions.index_group['options']],
cmd_name=cmd_name,
)

2
news/1668.feature Normal file
View file

@ -0,0 +1,2 @@
Default to doing a user install (as if ``--user`` was passed) when the main
site-packages directory is not writeable and user site-packages are enabled.

1
news/5702.bugfix Normal file
View file

@ -0,0 +1 @@
Correctly handle system site-packages, in virtual environments created with venv (PEP 405).

1
news/5716.bugfix Normal file
View file

@ -0,0 +1 @@
Fix case sensitive comparison of pip freeze when used with -r option.

1
news/5860.trivial Normal file
View file

@ -0,0 +1 @@
Updated info about pip support for url_req portion of PEP508 in doc.

1
news/6004.trivial Normal file
View file

@ -0,0 +1 @@
Read version in setup.py without re

1
news/6340.feature Normal file
View file

@ -0,0 +1 @@
Add a new option ``--save-wheel-names <filename>`` to ``pip wheel`` that writes the names of the resulting wheels to the given filename.

2
news/6410.bugfix Normal file
View file

@ -0,0 +1,2 @@
Enforce PEP 508 requirement format in ``pyproject.toml``
``build-system.requires``.

1
news/6414.feature Normal file
View file

@ -0,0 +1 @@
Warn if a path in PATH starts with tilde during ``pip install``.

1
news/6599.bugfix Normal file
View file

@ -0,0 +1 @@
Fix building packages which specify ``backend-path`` in pyproject.toml.

2
news/6640.feature Normal file
View file

@ -0,0 +1,2 @@
Cache wheels built from Git requirements that are considered immutable,
because they point to a commit hash.

2
news/6783.bugfix Normal file
View file

@ -0,0 +1,2 @@
Fix passwords being visible in the index-url in
"Downloading <url>" message.

5
news/6852.feature Normal file
View file

@ -0,0 +1,5 @@
Cache wheels that ``pip wheel`` built locally, matching what
``pip install`` does. This particularly helps performance in workflows where
``pip wheel`` is used for `building before installing
<https://pip.pypa.io/en/stable/user_guide/#installing-from-local-packages>`_.
Users desiring the original behavior can use ``pip wheel --no-cache-dir``.

1
news/6998.removal Normal file
View file

@ -0,0 +1 @@
Deprecate setup.py-based builds that do not generate an ``.egg-info`` directory.

1
news/7146.feature Normal file
View file

@ -0,0 +1 @@
Display CA information in ``pip debug``.

1
news/7155.bugfix Normal file
View file

@ -0,0 +1 @@
Correctly handle system site-packages, in virtual environments created with venv (PEP 405).

0
news/7178.trivial Normal file
View file

1
news/7182.doc Normal file
View file

@ -0,0 +1 @@
Document that "coding: utf-8" is supported in requirements.txt

1
news/7191.bugfix Normal file
View file

@ -0,0 +1 @@
Change method from shutil.remove to shutil.rmtree in noxfile.py.

1
news/7193.bugfix Normal file
View file

@ -0,0 +1 @@
Skip running tests which require subversion, when svn isn't installed

1
news/7197.doc Normal file
View file

@ -0,0 +1 @@
Explain how to get pip's source code in `Getting Started <https://pip.pypa.io/en/stable/development/getting-started/>`_

1
news/7199.trivial Normal file
View file

@ -0,0 +1 @@
adding line in trivial file to avoid linter issues.

1
news/7201.doc Normal file
View file

@ -0,0 +1 @@
Describe how basic authentication credentials in URLs work.

1
news/7207.bugfix Normal file
View file

@ -0,0 +1 @@
Fix not sending client certificates when using ``--trusted-host``.

1
news/7222.doc Normal file
View file

@ -0,0 +1 @@
Add more clear installation instructions

1
news/7225.feature Normal file
View file

@ -0,0 +1 @@
Show only the filename (instead of full URL), when downloading from PyPI.

1
news/7230.trivial Normal file
View file

@ -0,0 +1 @@
Change ``pip._internal.wheel`` to respect docstring conventions.

1
news/7268.trivial Normal file
View file

@ -0,0 +1 @@
refactoring: remove should_use_ephemeral_cache

1
news/7281.trivial Normal file
View file

@ -0,0 +1 @@
refactor _get_used_vcs_backend

1
news/7297.removal Normal file
View file

@ -0,0 +1 @@
Deprecate undocumented ``--skip-requirements-regex`` option.

2
news/7327.removal Normal file
View file

@ -0,0 +1,2 @@
Use literal "abi3" for wheel tag on CPython 3.x, to align with PEP 384
which only defines it for this platform.

1
news/7333.bugfix Normal file
View file

@ -0,0 +1 @@
Include ``subdirectory`` URL fragments in cache keys.

0
news/7334.trivial Normal file
View file

2
news/7340.bugfix Normal file
View file

@ -0,0 +1,2 @@
Fix typo in warning message when any of ``--build-option``, ``--global-option``
and ``--install-option`` is used in requirements.txt

1
news/7347.doc Normal file
View file

@ -0,0 +1 @@
Fix documentation links for index options

4
news/7355.removal Normal file
View file

@ -0,0 +1,4 @@
Remove interpreter-specific major version tag e.g. ``cp3-none-any``
from consideration. This behavior was not documented strictly, and this
tag in particular is `not useful <https://snarky.ca/the-challenges-in-designing-a-library-for-pep-425/>`_.
Anyone with a use case can create an issue with pypa/packaging.

0
news/7359.trivial Normal file
View file

1
news/7385.doc Normal file
View file

@ -0,0 +1 @@
Better document the requirements file format

1
news/7393.bugfix Normal file
View file

@ -0,0 +1 @@
Fix the logging of cached HTTP response shown as downloading.

View file

View file

@ -4,13 +4,17 @@
# The following comment should be removed at some point in the future.
# mypy: disallow-untyped-defs=False
import io
import glob
import os
import shutil
import subprocess
import sys
import nox
sys.path.append(".")
from tools.automation import release # isort:skip # noqa
sys.path.pop()
nox.options.reuse_existing_virtualenvs = True
nox.options.sessions = ["lint"]
@ -27,29 +31,6 @@ AUTHORS_FILE = "AUTHORS.txt"
VERSION_FILE = "src/pip/__init__.py"
def get_author_list():
"""Get the list of authors from Git commits.
"""
# subprocess because session.run doesn't give us stdout
result = subprocess.run(
["git", "log", "--use-mailmap", "--format=%aN <%aE>"],
capture_output=True,
encoding="utf-8",
)
# Create a unique list.
authors = []
seen_authors = set()
for author in result.stdout.splitlines():
author = author.strip()
if author.lower() not in seen_authors:
seen_authors.add(author.lower())
authors.append(author)
# Sort our list of Authors by their case insensitive name
return sorted(authors, key=lambda x: x.lower())
def run_with_protected_pip(session, *arguments):
"""Do a session.run("pip", *arguments), using a "protected" pip.
@ -76,23 +57,18 @@ def should_update_common_wheels():
# Clear the stale cache.
if need_to_repopulate:
shutil.remove(LOCATIONS["common-wheels"], ignore_errors=True)
shutil.rmtree(LOCATIONS["common-wheels"], ignore_errors=True)
return need_to_repopulate
def update_version_file(new_version):
with open(VERSION_FILE, "w", encoding="utf-8") as f:
f.write('__version__ = "{}"\n'.format(new_version))
# -----------------------------------------------------------------------------
# Development Commands
# These are currently prototypes to evaluate whether we want to switch over
# completely to nox for all our automation. Contributors should prefer using
# `tox -e ...` until this note is removed.
# -----------------------------------------------------------------------------
@nox.session(python=["2.7", "3.5", "3.6", "3.7", "pypy"])
@nox.session(python=["2.7", "3.5", "3.6", "3.7", "3.8", "pypy", "pypy3"])
def test(session):
# Get the common wheels.
if should_update_common_wheels():
@ -111,6 +87,8 @@ def test(session):
# Build source distribution
sdist_dir = os.path.join(session.virtualenv.location, "sdist")
if os.path.exists(sdist_dir):
shutil.rmtree(sdist_dir, ignore_errors=True)
session.run(
"python", "setup.py", "sdist",
"--formats=zip", "--dist-dir", sdist_dir,
@ -174,70 +152,95 @@ def lint(session):
# -----------------------------------------------------------------------------
# Release Commands
# -----------------------------------------------------------------------------
@nox.session(python=False)
def generate_authors(session):
# Get our list of authors
session.log("Collecting author names")
authors = get_author_list()
@nox.session(name="prepare-release")
def prepare_release(session):
version = release.get_version_from_arguments(session.posargs)
if not version:
session.error("Usage: nox -s prepare-release -- YY.N[.P]")
# Write our authors to the AUTHORS file
session.log("Writing AUTHORS")
with io.open(AUTHORS_FILE, "w", encoding="utf-8") as fp:
fp.write(u"\n".join(authors))
fp.write(u"\n")
session.log("# Ensure nothing is staged")
if release.modified_files_in_git("--staged"):
session.error("There are files staged in git")
@nox.session
def generate_news(session):
session.log("Generating NEWS")
session.install("towncrier")
# You can pass 2 possible arguments: --draft, --yes
session.run("towncrier", *session.posargs)
@nox.session
def release(session):
assert len(session.posargs) == 1, "A version number is expected"
new_version = session.posargs[0]
parts = new_version.split('.')
# Expect YY.N or YY.N.P
assert 2 <= len(parts) <= 3, parts
# Only integers
parts = list(map(int, parts))
session.log("Generating commits for version {}".format(new_version))
session.log("Checking that nothing is staged")
# Non-zero exit code means that something is already staged
session.run("git", "diff", "--staged", "--exit-code", external=True)
session.log(f"Updating {AUTHORS_FILE}")
generate_authors(session)
if subprocess.run(["git", "diff", "--exit-code"]).returncode:
session.run("git", "add", AUTHORS_FILE, external=True)
session.run(
"git", "commit", "-m", f"Updating {AUTHORS_FILE}",
external=True,
session.log(f"# Updating {AUTHORS_FILE}")
release.generate_authors(AUTHORS_FILE)
if release.modified_files_in_git():
release.commit_file(
session, AUTHORS_FILE, message=f"Update {AUTHORS_FILE}",
)
else:
session.log(f"No update needed for {AUTHORS_FILE}")
session.log(f"# No changes to {AUTHORS_FILE}")
session.log("Generating NEWS")
session.install("towncrier")
session.run("towncrier", "--yes", "--version", new_version)
session.log("# Generating NEWS")
release.generate_news(session, version)
session.log("Updating version")
update_version_file(new_version)
session.run("git", "add", VERSION_FILE, external=True)
session.run("git", "commit", "-m", f"Release {new_version}", external=True)
session.log(f"# Bumping for release {version}")
release.update_version_file(version, VERSION_FILE)
release.commit_file(session, VERSION_FILE, message="Bump for release")
session.log("Tagging release")
session.run(
"git", "tag", "-m", f"Release {new_version}", new_version,
external=True,
)
session.log("# Tagging release")
release.create_git_tag(session, version, message=f"Release {version}")
next_dev_version = f"{parts[0]}.{parts[1] + 1}.dev0"
update_version_file(next_dev_version)
session.run("git", "add", VERSION_FILE, external=True)
session.run("git", "commit", "-m", "Back to development", external=True)
session.log("# Bumping for development")
next_dev_version = release.get_next_development_version(version)
release.update_version_file(next_dev_version, VERSION_FILE)
release.commit_file(session, VERSION_FILE, message="Bump for development")
@nox.session(name="build-release")
def build_release(session):
version = release.get_version_from_arguments(session.posargs)
if not version:
session.error("Usage: nox -s build-release -- YY.N[.P]")
session.log("# Ensure no files in dist/")
if release.have_files_in_folder("dist"):
session.error("There are files in dist/. Remove them and try again")
session.log("# Install dependencies")
session.install("setuptools", "wheel", "twine")
session.log("# Checkout the tag")
session.run("git", "checkout", version, external=True, silent=True)
session.log("# Build distributions")
session.run("python", "setup.py", "sdist", "bdist_wheel", silent=True)
session.log("# Verify distributions")
session.run("twine", "check", *glob.glob("dist/*"), silent=True)
session.log("# Checkout the master branch")
session.run("git", "checkout", "master", external=True, silent=True)
@nox.session(name="upload-release")
def upload_release(session):
version = release.get_version_from_arguments(session.posargs)
if not version:
session.error("Usage: nox -s upload-release -- YY.N[.P]")
session.log("# Install dependencies")
session.install("twine")
distribution_files = glob.glob("dist/*")
session.log(f"# Distribution files: {distribution_files}")
# Sanity check: Make sure there's 2 distribution files.
count = len(distribution_files)
if count != 2:
session.error(
f"Expected 2 distribution files for upload, got {count}. "
f"Remove dist/ and run 'nox -s build-release -- {version}'"
)
# Sanity check: Make sure the files are correctly named.
expected_distribution_files = [
f"pip-{version}-py2.py3-none-any.whl",
f"pip-{version}.tar.gz",
]
if sorted(distribution_files) != sorted(expected_distribution_files):
session.error(
f"Distribution files do not seem to be for {version} release."
)
session.log("# Upload distributions")
session.run("twine", "upload", *distribution_files)

View file

@ -30,6 +30,7 @@ ignore = W504
follow_imports = silent
ignore_missing_imports = True
disallow_untyped_defs = True
disallow_any_generics = True
[mypy-pip/_vendor/*]
follow_imports = skip

View file

@ -3,39 +3,34 @@
import codecs
import os
import re
import sys
from setuptools import find_packages, setup
here = os.path.abspath(os.path.dirname(__file__))
def read(*parts):
def read(rel_path):
here = os.path.abspath(os.path.dirname(__file__))
# intentionally *not* adding an encoding option to open, See:
# https://github.com/pypa/virtualenv/issues/201#issuecomment-3145690
with codecs.open(os.path.join(here, *parts), 'r') as fp:
with codecs.open(os.path.join(here, rel_path), 'r') as fp:
return fp.read()
def find_version(*file_paths):
version_file = read(*file_paths)
version_match = re.search(
r"^__version__ = ['\"]([^'\"]*)['\"]",
version_file,
re.M,
)
if version_match:
return version_match.group(1)
raise RuntimeError("Unable to find version string.")
def get_version(rel_path):
for line in read(rel_path).splitlines():
if line.startswith('__version__'):
# __version__ = "0.9"
delim = '"' if '"' in line else "'"
return line.split(delim)[1]
else:
raise RuntimeError("Unable to find version string.")
long_description = read('README.rst')
setup(
name="pip",
version=find_version("src", "pip", "__init__.py"),
version=get_version("src/pip/__init__.py"),
description="The PyPA recommended tool for installing Python packages.",
long_description=long_description,
@ -52,6 +47,7 @@ setup(
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
],
@ -75,8 +71,8 @@ setup(
entry_points={
"console_scripts": [
"pip=pip._internal.main:main",
"pip%s=pip._internal.main:main" % sys.version_info[:1],
"pip%s.%s=pip._internal.main:main" % sys.version_info[:2],
"pip{}=pip._internal.main:main".format(sys.version_info[0]),
"pip{}.{}=pip._internal.main:main".format(*sys.version_info[:2]),
],
},

View file

@ -1 +1 @@
__version__ = "19.4.dev0"
__version__ = "20.0.dev0"

View file

@ -23,7 +23,7 @@ from pip._internal.utils.ui import open_spinner
if MYPY_CHECK_RUNNING:
from typing import Tuple, Set, Iterable, Optional, List
from pip._internal.index import PackageFinder
from pip._internal.index.package_finder import PackageFinder
logger = logging.getLogger(__name__)

View file

@ -20,7 +20,7 @@ from pip._internal.wheel import InvalidWheelFilename, Wheel
if MYPY_CHECK_RUNNING:
from typing import Optional, Set, List, Any
from pip._internal.index import FormatControl
from pip._internal.models.format_control import FormatControl
from pip._internal.pep425tags import Pep425Tag
logger = logging.getLogger(__name__)
@ -58,6 +58,10 @@ class Cache(object):
key_parts = [link.url_without_fragment]
if link.hash_name is not None and link.hash is not None:
key_parts.append("=".join([link.hash_name, link.hash]))
if link.subdirectory_fragment:
key_parts.append(
"=".join(["subdirectory", link.subdirectory_fragment])
)
key_url = "#".join(key_parts)
# Encode our key url with sha224, we'll use this because it has similar
@ -73,19 +77,18 @@ class Cache(object):
return parts
def _get_candidates(self, link, package_name):
def _get_candidates(self, link, canonical_package_name):
# type: (Link, Optional[str]) -> List[Any]
can_not_cache = (
not self.cache_dir or
not package_name or
not canonical_package_name or
not link
)
if can_not_cache:
return []
canonical_name = canonicalize_name(package_name)
formats = self.format_control.get_allowed_formats(
canonical_name
canonical_package_name
)
if not self.allowed_formats.intersection(formats):
return []
@ -168,11 +171,23 @@ class SimpleWheelCache(Cache):
# type: (...) -> Link
candidates = []
for wheel_name in self._get_candidates(link, package_name):
if not package_name:
return link
canonical_package_name = canonicalize_name(package_name)
for wheel_name in self._get_candidates(link, canonical_package_name):
try:
wheel = Wheel(wheel_name)
except InvalidWheelFilename:
continue
if canonicalize_name(wheel.name) != canonical_package_name:
logger.debug(
"Ignoring cached wheel {} for {} as it "
"does not match the expected distribution name {}.".format(
wheel_name, link, package_name
)
)
continue
if not wheel.supported(supported_tags):
# Built for a different python/arch/etc
continue

View file

@ -92,7 +92,7 @@ class Command(CommandContextMixIn):
raise NotImplementedError
def parse_args(self, args):
# type: (List[str]) -> Tuple
# type: (List[str]) -> Tuple[Any, Any]
# factored out for testability
return self.parser.parse_args(args)
@ -131,6 +131,17 @@ class Command(CommandContextMixIn):
) + message
deprecated(message, replacement=None, gone_in=None)
if options.skip_requirements_regex:
deprecated(
"--skip-requirements-regex is unsupported and will be removed",
replacement=(
"manage requirements/constraints files explicitly, "
"possibly generating them from metadata"
),
gone_in="20.1",
issue=7297,
)
# TODO: Try to get these passing down from the command?
# without resorting to os.environ to hold these.
# This also affects isolated builds and it should.

View file

@ -83,8 +83,8 @@ def check_install_build_global(options, check_options=None):
control = options.format_control
control.disallow_binaries()
warnings.warn(
'Disabling all use of wheels due to the use of --build-options '
'/ --global-options / --install-options.', stacklevel=2,
'Disabling all use of wheels due to the use of --build-option '
'/ --global-option / --install-option.', stacklevel=2,
)

View file

@ -8,13 +8,14 @@ PackageFinder machinery and all its vendored dependencies, etc.
# The following comment should be removed at some point in the future.
# mypy: disallow-untyped-defs=False
import logging
import os
from functools import partial
from pip._internal.cli.base_command import Command
from pip._internal.cli.command_context import CommandContextMixIn
from pip._internal.exceptions import CommandError
from pip._internal.index import PackageFinder
from pip._internal.index.package_finder import PackageFinder
from pip._internal.legacy_resolve import Resolver
from pip._internal.models.selection_prefs import SelectionPreferences
from pip._internal.network.session import PipSession
@ -41,6 +42,8 @@ if MYPY_CHECK_RUNNING:
from pip._internal.req.req_tracker import RequirementTracker
from pip._internal.utils.temp_dir import TempDirectory
logger = logging.getLogger(__name__)
class SessionCommandMixin(CommandContextMixIn):
@ -149,6 +152,9 @@ class RequirementCommand(IndexGroupCommand):
temp_build_dir, # type: TempDirectory
options, # type: Values
req_tracker, # type: RequirementTracker
session, # type: PipSession
finder, # type: PackageFinder
use_user_site, # type: bool
download_dir=None, # type: str
wheel_download_dir=None, # type: str
):
@ -166,12 +172,15 @@ class RequirementCommand(IndexGroupCommand):
progress_bar=options.progress_bar,
build_isolation=options.build_isolation,
req_tracker=req_tracker,
session=session,
finder=finder,
require_hashes=options.require_hashes,
use_user_site=use_user_site,
)
@staticmethod
def make_resolver(
preparer, # type: RequirementPreparer
session, # type: PipSession
finder, # type: PackageFinder
options, # type: Values
wheel_cache=None, # type: Optional[WheelCache]
@ -195,7 +204,6 @@ class RequirementCommand(IndexGroupCommand):
)
return Resolver(
preparer=preparer,
session=session,
finder=finder,
make_install_req=make_install_req,
use_user_site=use_user_site,
@ -204,7 +212,7 @@ class RequirementCommand(IndexGroupCommand):
ignore_requires_python=ignore_requires_python,
force_reinstall=force_reinstall,
upgrade_strategy=upgrade_strategy,
py_version_info=py_version_info
py_version_info=py_version_info,
)
def populate_requirement_set(
@ -220,9 +228,6 @@ class RequirementCommand(IndexGroupCommand):
"""
Marshal cmd line args into a requirement set.
"""
# NOTE: As a side-effect, options.require_hashes and
# requirement_set.require_hashes may be updated
for filename in options.constraints:
for req_to_add in parse_requirements(
filename,
@ -250,6 +255,7 @@ class RequirementCommand(IndexGroupCommand):
req_to_add.is_direct = True
requirement_set.add_requirement(req_to_add)
# NOTE: options.require_hashes may be set if --require-hashes is True
for filename in options.requirements:
for req_to_add in parse_requirements(
filename,
@ -258,9 +264,14 @@ class RequirementCommand(IndexGroupCommand):
use_pep517=options.use_pep517):
req_to_add.is_direct = True
requirement_set.add_requirement(req_to_add)
# If --require-hashes was a line in a requirements file, tell
# RequirementSet about it:
requirement_set.require_hashes = options.require_hashes
# If any requirement has hash options, enable hash checking.
requirements = (
requirement_set.unnamed_requirements +
list(requirement_set.requirements.values())
)
if any(req.has_hash_options for req in requirements):
options.require_hashes = True
if not (args or options.editables or options.requirements):
opts = {'name': self.name}
@ -274,6 +285,18 @@ class RequirementCommand(IndexGroupCommand):
'You must give at least one requirement to %(name)s '
'(see "pip help %(name)s")' % opts)
@staticmethod
def trace_basic_info(finder):
# type: (PackageFinder) -> None
"""
Trace basic information about the provided objects.
"""
# Display where finder is looking for packages
search_scope = finder.search_scope
locations = search_scope.get_formatted_locations()
if locations:
logger.info(locations)
def _build_package_finder(
self,
options, # type: Values

View file

@ -5,8 +5,11 @@ from __future__ import absolute_import
import locale
import logging
import os
import sys
from pip._vendor.certifi import where
from pip._internal.cli import cmdoptions
from pip._internal.cli.base_command import Command
from pip._internal.cli.cmdoptions import make_target_python
@ -17,14 +20,14 @@ from pip._internal.utils.typing import MYPY_CHECK_RUNNING
from pip._internal.wheel import format_tag
if MYPY_CHECK_RUNNING:
from typing import Any, List
from typing import Any, List, Optional
from optparse import Values
logger = logging.getLogger(__name__)
def show_value(name, value):
# type: (str, str) -> None
# type: (str, Optional[str]) -> None
logger.info('{}: {}'.format(name, value))
@ -75,6 +78,25 @@ def show_tags(options):
logger.info(msg)
def ca_bundle_info(config):
levels = set()
for key, value in config.items():
levels.add(key.split('.')[0])
if not levels:
return "Not specified"
levels_that_override_global = ['install', 'wheel', 'download']
global_overriding_level = [
level for level in levels if level in levels_that_override_global
]
if not global_overriding_level:
return 'global'
levels.remove('global')
return ", ".join(levels)
class DebugCommand(Command):
"""
Display debug information.
@ -90,6 +112,7 @@ class DebugCommand(Command):
cmd_opts = self.cmd_opts
cmdoptions.add_target_python_options(cmd_opts)
self.parser.insert_option_group(0, cmd_opts)
self.parser.config.load()
def run(self, options, args):
# type: (Values, List[Any]) -> int
@ -110,6 +133,11 @@ class DebugCommand(Command):
show_value('sys.platform', sys.platform)
show_sys_implementation()
show_value("'cert' config value", ca_bundle_info(self.parser.config))
show_value("REQUESTS_CA_BUNDLE", os.environ.get('REQUESTS_CA_BUNDLE'))
show_value("CURL_CA_BUNDLE", os.environ.get('CURL_CA_BUNDLE'))
show_value("pip._vendor.certifi.where()", where())
show_tags(options)
return SUCCESS

View file

@ -10,7 +10,7 @@ from pip._internal.cli import cmdoptions
from pip._internal.cli.cmdoptions import make_target_python
from pip._internal.cli.req_command import RequirementCommand
from pip._internal.req import RequirementSet
from pip._internal.req.req_tracker import RequirementTracker
from pip._internal.req.req_tracker import get_requirement_tracker
from pip._internal.utils.filesystem import check_path_owner
from pip._internal.utils.misc import ensure_dir, normalize_path, write_output
from pip._internal.utils.temp_dir import TempDirectory
@ -111,13 +111,11 @@ class DownloadCommand(RequirementCommand):
)
options.cache_dir = None
with RequirementTracker() as req_tracker, TempDirectory(
with get_requirement_tracker() as req_tracker, TempDirectory(
options.build_dir, delete=build_delete, kind="download"
) as directory:
requirement_set = RequirementSet(
require_hashes=options.require_hashes,
)
requirement_set = RequirementSet()
self.populate_requirement_set(
requirement_set,
args,
@ -131,16 +129,21 @@ class DownloadCommand(RequirementCommand):
temp_build_dir=directory,
options=options,
req_tracker=req_tracker,
session=session,
finder=finder,
download_dir=options.download_dir,
use_user_site=False,
)
resolver = self.make_resolver(
preparer=preparer,
finder=finder,
session=session,
options=options,
py_version_info=options.python_version,
)
self.trace_basic_info(finder)
resolver.resolve(requirement_set)
downloaded = ' '.join([

View file

@ -1,4 +1,3 @@
# The following comment should be removed at some point in the future.
# It's included for now because without it InstallCommand.run() has a
# couple errors where we have to know req.name is str rather than
@ -13,6 +12,7 @@ import logging
import operator
import os
import shutil
import site
from optparse import SUPPRESS_HELP
from pip._vendor import pkg_resources
@ -31,8 +31,8 @@ from pip._internal.exceptions import (
from pip._internal.locations import distutils_scheme
from pip._internal.operations.check import check_install_conflicts
from pip._internal.req import RequirementSet, install_given_reqs
from pip._internal.req.req_tracker import RequirementTracker
from pip._internal.utils.filesystem import check_path_owner
from pip._internal.req.req_tracker import get_requirement_tracker
from pip._internal.utils.filesystem import check_path_owner, test_writable_dir
from pip._internal.utils.misc import (
ensure_dir,
get_installed_version,
@ -42,7 +42,7 @@ from pip._internal.utils.misc import (
from pip._internal.utils.temp_dir import TempDirectory
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
from pip._internal.utils.virtualenv import virtualenv_no_global
from pip._internal.wheel import WheelBuilder
from pip._internal.wheel_builder import WheelBuilder
if MYPY_CHECK_RUNNING:
from optparse import Values
@ -50,7 +50,7 @@ if MYPY_CHECK_RUNNING:
from pip._internal.models.format_control import FormatControl
from pip._internal.req.req_install import InstallRequirement
from pip._internal.wheel import BinaryAllowedPredicate
from pip._internal.wheel_builder import BinaryAllowedPredicate
logger = logging.getLogger(__name__)
@ -102,6 +102,8 @@ def get_check_binary_allowed(format_control):
# type: (FormatControl) -> BinaryAllowedPredicate
def check_binary_allowed(req):
# type: (InstallRequirement) -> bool
if req.use_pep517:
return True
canonical_name = canonicalize_name(req.name)
allowed_formats = format_control.get_allowed_formats(canonical_name)
return "binary" in allowed_formats
@ -291,19 +293,14 @@ class InstallCommand(RequirementCommand):
options.src_dir = os.path.abspath(options.src_dir)
install_options = options.install_options or []
if options.use_user_site:
if options.prefix_path:
raise CommandError(
"Can not combine '--user' and '--prefix' as they imply "
"different installation locations"
)
if virtualenv_no_global():
raise InstallationError(
"Can not perform a '--user' install. User site-packages "
"are not visible in this virtualenv."
)
install_options.append('--user')
install_options.append('--prefix=')
options.use_user_site = decide_user_install(
options.use_user_site,
prefix_path=options.prefix_path,
target_dir=options.target_dir,
root_path=options.root_path,
isolated_mode=options.isolated_mode,
)
target_temp_dir = None # type: Optional[TempDirectory]
target_temp_dir_path = None # type: Optional[str]
@ -320,7 +317,6 @@ class InstallCommand(RequirementCommand):
# Create a target directory for using with the target option
target_temp_dir = TempDirectory(kind="target")
target_temp_dir_path = target_temp_dir.path
install_options.append('--home=' + target_temp_dir_path)
global_options = options.global_options or []
@ -347,11 +343,10 @@ class InstallCommand(RequirementCommand):
)
options.cache_dir = None
with RequirementTracker() as req_tracker, TempDirectory(
with get_requirement_tracker() as req_tracker, TempDirectory(
options.build_dir, delete=build_delete, kind="install"
) as directory:
requirement_set = RequirementSet(
require_hashes=options.require_hashes,
check_supported_wheels=not options.target_dir,
)
@ -364,11 +359,13 @@ class InstallCommand(RequirementCommand):
temp_build_dir=directory,
options=options,
req_tracker=req_tracker,
session=session,
finder=finder,
use_user_site=options.use_user_site,
)
resolver = self.make_resolver(
preparer=preparer,
finder=finder,
session=session,
options=options,
wheel_cache=wheel_cache,
use_user_site=options.use_user_site,
@ -378,6 +375,9 @@ class InstallCommand(RequirementCommand):
upgrade_strategy=upgrade_strategy,
use_pep517=options.use_pep517,
)
self.trace_basic_info(finder)
resolver.resolve(requirement_set)
try:
@ -463,13 +463,13 @@ class InstallCommand(RequirementCommand):
)
working_set = pkg_resources.WorkingSet(lib_locations)
reqs = sorted(installed, key=operator.attrgetter('name'))
installed.sort(key=operator.attrgetter('name'))
items = []
for req in reqs:
item = req.name
for result in installed:
item = result.name
try:
installed_version = get_installed_version(
req.name, working_set=working_set
result.name, working_set=working_set
)
if installed_version:
item += '-' + installed_version
@ -594,6 +594,72 @@ def get_lib_location_guesses(*args, **kwargs):
return [scheme['purelib'], scheme['platlib']]
def site_packages_writable(**kwargs):
return all(
test_writable_dir(d) for d in set(get_lib_location_guesses(**kwargs))
)
def decide_user_install(
use_user_site, # type: Optional[bool]
prefix_path=None, # type: Optional[str]
target_dir=None, # type: Optional[str]
root_path=None, # type: Optional[str]
isolated_mode=False, # type: bool
):
# type: (...) -> bool
"""Determine whether to do a user install based on the input options.
If use_user_site is False, no additional checks are done.
If use_user_site is True, it is checked for compatibility with other
options.
If use_user_site is None, the default behaviour depends on the environment,
which is provided by the other arguments.
"""
# In some cases (config from tox), use_user_site can be set to an integer
# rather than a bool, which 'use_user_site is False' wouldn't catch.
if (use_user_site is not None) and (not use_user_site):
logger.debug("Non-user install by explicit request")
return False
if use_user_site:
if prefix_path:
raise CommandError(
"Can not combine '--user' and '--prefix' as they imply "
"different installation locations"
)
if virtualenv_no_global():
raise InstallationError(
"Can not perform a '--user' install. User site-packages "
"are not visible in this virtualenv."
)
logger.debug("User install by explicit request")
return True
# If we are here, user installs have not been explicitly requested/avoided
assert use_user_site is None
# user install incompatible with --prefix/--target
if prefix_path or target_dir:
logger.debug("Non-user install due to --prefix or --target option")
return False
# If user installs are not enabled, choose a non-user install
if not site.ENABLE_USER_SITE:
logger.debug("Non-user install because user site-packages disabled")
return False
# If we have permission for a non-user install, do that,
# otherwise do a user install.
if site_packages_writable(root=root_path, isolated=isolated_mode):
logger.debug("Non-user install because site-packages writeable")
return False
logger.info("Defaulting to user installation because normal site-packages "
"is not writeable")
return True
def create_env_error_message(error, show_traceback, using_user_site):
"""Format an error message for an EnvironmentError

View file

@ -12,7 +12,7 @@ from pip._vendor.six.moves import zip_longest
from pip._internal.cli import cmdoptions
from pip._internal.cli.req_command import IndexGroupCommand
from pip._internal.exceptions import CommandError
from pip._internal.index import PackageFinder
from pip._internal.index.package_finder import PackageFinder
from pip._internal.models.selection_prefs import SelectionPreferences
from pip._internal.self_outdated_check import make_link_collector
from pip._internal.utils.misc import (

View file

@ -13,10 +13,10 @@ from pip._internal.cli import cmdoptions
from pip._internal.cli.req_command import RequirementCommand
from pip._internal.exceptions import CommandError, PreviousBuildDirError
from pip._internal.req import RequirementSet
from pip._internal.req.req_tracker import RequirementTracker
from pip._internal.req.req_tracker import get_requirement_tracker
from pip._internal.utils.temp_dir import TempDirectory
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
from pip._internal.wheel import WheelBuilder
from pip._internal.wheel_builder import WheelBuilder
if MYPY_CHECK_RUNNING:
from optparse import Values
@ -102,6 +102,16 @@ class WheelCommand(RequirementCommand):
cmd_opts.add_option(cmdoptions.no_clean())
cmd_opts.add_option(cmdoptions.require_hashes())
cmd_opts.add_option(
'--save-wheel-names',
dest='path_to_wheelnames',
action='store',
metavar='path',
help=("Store the filenames of the built or downloaded wheels "
"in a new file of given path. Filenames are separated "
"by new line and file ends with new line"),
)
index_opts = cmdoptions.make_option_group(
cmdoptions.index_group,
self.parser,
@ -110,6 +120,28 @@ class WheelCommand(RequirementCommand):
self.parser.insert_option_group(0, index_opts)
self.parser.insert_option_group(0, cmd_opts)
def save_wheelnames(
self,
links_filenames,
path_to_wheelnames,
wheel_filenames,
):
if path_to_wheelnames is None:
return
entries_to_save = wheel_filenames + links_filenames
entries_to_save = [
filename + '\n' for filename in entries_to_save
if filename.endswith('whl')
]
try:
with open(path_to_wheelnames, 'w') as f:
f.writelines(entries_to_save)
except EnvironmentError as e:
logger.error('Cannot write to the given path: %s\n%s' %
(path_to_wheelnames, e))
raise
def run(self, options, args):
# type: (Values, List[Any]) -> None
cmdoptions.check_install_build_global(options)
@ -125,13 +157,11 @@ class WheelCommand(RequirementCommand):
build_delete = (not (options.no_clean or options.build_dir))
wheel_cache = WheelCache(options.cache_dir, options.format_control)
with RequirementTracker() as req_tracker, TempDirectory(
with get_requirement_tracker() as req_tracker, TempDirectory(
options.build_dir, delete=build_delete, kind="wheel"
) as directory:
requirement_set = RequirementSet(
require_hashes=options.require_hashes,
)
requirement_set = RequirementSet()
try:
self.populate_requirement_set(
@ -143,18 +173,23 @@ class WheelCommand(RequirementCommand):
temp_build_dir=directory,
options=options,
req_tracker=req_tracker,
session=session,
finder=finder,
wheel_download_dir=options.wheel_dir,
use_user_site=False,
)
resolver = self.make_resolver(
preparer=preparer,
finder=finder,
session=session,
options=options,
wheel_cache=wheel_cache,
ignore_requires_python=options.ignore_requires_python,
use_pep517=options.use_pep517,
)
self.trace_basic_info(finder)
resolver.resolve(requirement_set)
# build wheels
@ -162,11 +197,18 @@ class WheelCommand(RequirementCommand):
preparer, wheel_cache,
build_options=options.build_options or [],
global_options=options.global_options or [],
no_clean=options.no_clean,
path_to_wheelnames=options.path_to_wheelnames
)
build_failures = wb.build(
requirement_set.requirements.values(),
)
self.save_wheelnames(
[req.link.filename for req in
requirement_set.successfully_downloaded
if req.link is not None],
wb.path_to_wheelnames,
wb.wheel_filenames,
)
if len(build_failures) != 0:
raise CommandError(
"Failed to build one or more wheels"

View file

@ -13,7 +13,6 @@ Some terminology:
# The following comment should be removed at some point in the future.
# mypy: strict-optional=False
# mypy: disallow-untyped-defs=False
import locale
import logging
@ -78,6 +77,7 @@ CONFIG_BASENAME = 'pip.ini' if WINDOWS else 'pip.conf'
def get_configuration_files():
# type: () -> Dict[Kind, List[str]]
global_config_files = [
os.path.join(path, CONFIG_BASENAME)
for path in appdirs.site_config_dirs('pip')

View file

@ -1,4 +1,4 @@
from pip._internal.distributions.source.legacy import SourceDistribution
from pip._internal.distributions.source import SourceDistribution
from pip._internal.distributions.wheel import WheelDistribution
from pip._internal.utils.typing import MYPY_CHECK_RUNNING

View file

@ -16,28 +16,21 @@ class SourceDistribution(AbstractDistribution):
The preparation step for these needs metadata for the packages to be
generated, either using PEP 517 or using the legacy `setup.py egg_info`.
NOTE from @pradyunsg (14 June 2019)
I expect SourceDistribution class will need to be split into
`legacy_source` (setup.py based) and `source` (PEP 517 based) when we start
bringing logic for preparation out of InstallRequirement into this class.
"""
def get_pkg_resources_distribution(self):
return self.req.get_dist()
def prepare_distribution_metadata(self, finder, build_isolation):
# Prepare for building. We need to:
# 1. Load pyproject.toml (if it exists)
# 2. Set up the build environment
# Load pyproject.toml, to determine whether PEP 517 is to be used
self.req.load_pyproject_toml()
# Set up the build isolation, if this requirement should be isolated
should_isolate = self.req.use_pep517 and build_isolation
if should_isolate:
self._setup_isolation(finder)
self.req.prepare_metadata()
self.req.assert_source_matches_version()
def _setup_isolation(self, finder):
def _raise_conflicts(conflicting_with, conflicting_reqs):
@ -49,7 +42,7 @@ class SourceDistribution(AbstractDistribution):
requirement=self.req,
conflicting_with=conflicting_with,
description=', '.join(
'%s is incompatible with %s' % (installed, wanted)
'{} is incompatible with {}'.format(installed, wanted)
for installed, wanted in sorted(conflicting)
)
)

View file

@ -1,578 +0,0 @@
# The following comment should be removed at some point in the future.
# mypy: disallow-untyped-defs=False
from __future__ import absolute_import
import cgi
import logging
import mimetypes
import os
import re
import shutil
import sys
from pip._vendor import requests
from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response
from pip._vendor.six import PY2
from pip._vendor.six.moves.urllib import parse as urllib_parse
from pip._internal.exceptions import HashMismatch, InstallationError
from pip._internal.models.index import PyPI
from pip._internal.network.session import PipSession
from pip._internal.utils.encoding import auto_decode
from pip._internal.utils.filesystem import copy2_fixed
from pip._internal.utils.misc import (
ask_path_exists,
backup_dir,
consume,
display_path,
format_size,
hide_url,
path_to_display,
rmtree,
splitext,
)
from pip._internal.utils.temp_dir import TempDirectory
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
from pip._internal.utils.ui import DownloadProgressProvider
from pip._internal.utils.unpacking import unpack_file
from pip._internal.utils.urls import get_url_scheme
from pip._internal.vcs import vcs
if MYPY_CHECK_RUNNING:
from typing import (
IO, Callable, List, Optional, Text, Tuple,
)
from mypy_extensions import TypedDict
from pip._internal.models.link import Link
from pip._internal.utils.hashes import Hashes
from pip._internal.vcs.versioncontrol import VersionControl
if PY2:
CopytreeKwargs = TypedDict(
'CopytreeKwargs',
{
'ignore': Callable[[str, List[str]], List[str]],
'symlinks': bool,
},
total=False,
)
else:
CopytreeKwargs = TypedDict(
'CopytreeKwargs',
{
'copy_function': Callable[[str, str], None],
'ignore': Callable[[str, List[str]], List[str]],
'ignore_dangling_symlinks': bool,
'symlinks': bool,
},
total=False,
)
__all__ = ['get_file_content',
'unpack_vcs_link',
'unpack_file_url',
'unpack_http_url', 'unpack_url',
'parse_content_disposition', 'sanitize_content_filename']
logger = logging.getLogger(__name__)
def get_file_content(url, comes_from=None, session=None):
# type: (str, Optional[str], Optional[PipSession]) -> Tuple[str, Text]
"""Gets the content of a file; it may be a filename, file: URL, or
http: URL. Returns (location, content). Content is unicode.
:param url: File path or url.
:param comes_from: Origin description of requirements.
:param session: Instance of pip.download.PipSession.
"""
if session is None:
raise TypeError(
"get_file_content() missing 1 required keyword argument: 'session'"
)
scheme = get_url_scheme(url)
if scheme in ['http', 'https']:
# FIXME: catch some errors
resp = session.get(url)
resp.raise_for_status()
return resp.url, resp.text
elif scheme == 'file':
if comes_from and comes_from.startswith('http'):
raise InstallationError(
'Requirements file %s references URL %s, which is local'
% (comes_from, url))
path = url.split(':', 1)[1]
path = path.replace('\\', '/')
match = _url_slash_drive_re.match(path)
if match:
path = match.group(1) + ':' + path.split('|', 1)[1]
path = urllib_parse.unquote(path)
if path.startswith('/'):
path = '/' + path.lstrip('/')
url = path
try:
with open(url, 'rb') as f:
content = auto_decode(f.read())
except IOError as exc:
raise InstallationError(
'Could not open requirements file: %s' % str(exc)
)
return url, content
_url_slash_drive_re = re.compile(r'/*([a-z])\|', re.I)
def unpack_vcs_link(link, location):
# type: (Link, str) -> None
vcs_backend = _get_used_vcs_backend(link)
assert vcs_backend is not None
vcs_backend.unpack(location, url=hide_url(link.url))
def _get_used_vcs_backend(link):
# type: (Link) -> Optional[VersionControl]
"""
Return a VersionControl object or None.
"""
for vcs_backend in vcs.backends:
if link.scheme in vcs_backend.schemes:
return vcs_backend
return None
def _progress_indicator(iterable, *args, **kwargs):
return iterable
def _download_url(
resp, # type: Response
link, # type: Link
content_file, # type: IO
hashes, # type: Optional[Hashes]
progress_bar # type: str
):
# type: (...) -> None
try:
total_length = int(resp.headers['content-length'])
except (ValueError, KeyError, TypeError):
total_length = 0
cached_resp = getattr(resp, "from_cache", False)
if logger.getEffectiveLevel() > logging.INFO:
show_progress = False
elif cached_resp:
show_progress = False
elif total_length > (40 * 1000):
show_progress = True
elif not total_length:
show_progress = True
else:
show_progress = False
show_url = link.show_url
def resp_read(chunk_size):
try:
# Special case for urllib3.
for chunk in resp.raw.stream(
chunk_size,
# We use decode_content=False here because we don't
# want urllib3 to mess with the raw bytes we get
# from the server. If we decompress inside of
# urllib3 then we cannot verify the checksum
# because the checksum will be of the compressed
# file. This breakage will only occur if the
# server adds a Content-Encoding header, which
# depends on how the server was configured:
# - Some servers will notice that the file isn't a
# compressible file and will leave the file alone
# and with an empty Content-Encoding
# - Some servers will notice that the file is
# already compressed and will leave the file
# alone and will add a Content-Encoding: gzip
# header
# - Some servers won't notice anything at all and
# will take a file that's already been compressed
# and compress it again and set the
# Content-Encoding: gzip header
#
# By setting this not to decode automatically we
# hope to eliminate problems with the second case.
decode_content=False):
yield chunk
except AttributeError:
# Standard file-like object.
while True:
chunk = resp.raw.read(chunk_size)
if not chunk:
break
yield chunk
def written_chunks(chunks):
for chunk in chunks:
content_file.write(chunk)
yield chunk
progress_indicator = _progress_indicator
if link.netloc == PyPI.netloc:
url = show_url
else:
url = link.url_without_fragment
if show_progress: # We don't show progress on cached responses
progress_indicator = DownloadProgressProvider(progress_bar,
max=total_length)
if total_length:
logger.info("Downloading %s (%s)", url, format_size(total_length))
else:
logger.info("Downloading %s", url)
elif cached_resp:
logger.info("Using cached %s", url)
else:
logger.info("Downloading %s", url)
downloaded_chunks = written_chunks(
progress_indicator(
resp_read(CONTENT_CHUNK_SIZE),
CONTENT_CHUNK_SIZE
)
)
if hashes:
hashes.check_against_chunks(downloaded_chunks)
else:
consume(downloaded_chunks)
def _copy_file(filename, location, link):
copy = True
download_location = os.path.join(location, link.filename)
if os.path.exists(download_location):
response = ask_path_exists(
'The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)abort' %
display_path(download_location), ('i', 'w', 'b', 'a'))
if response == 'i':
copy = False
elif response == 'w':
logger.warning('Deleting %s', display_path(download_location))
os.remove(download_location)
elif response == 'b':
dest_file = backup_dir(download_location)
logger.warning(
'Backing up %s to %s',
display_path(download_location),
display_path(dest_file),
)
shutil.move(download_location, dest_file)
elif response == 'a':
sys.exit(-1)
if copy:
shutil.copy(filename, download_location)
logger.info('Saved %s', display_path(download_location))
def unpack_http_url(
link, # type: Link
location, # type: str
download_dir=None, # type: Optional[str]
session=None, # type: Optional[PipSession]
hashes=None, # type: Optional[Hashes]
progress_bar="on" # type: str
):
# type: (...) -> None
if session is None:
raise TypeError(
"unpack_http_url() missing 1 required keyword argument: 'session'"
)
with TempDirectory(kind="unpack") as temp_dir:
# If a download dir is specified, is the file already downloaded there?
already_downloaded_path = None
if download_dir:
already_downloaded_path = _check_download_dir(link,
download_dir,
hashes)
if already_downloaded_path:
from_path = already_downloaded_path
content_type = mimetypes.guess_type(from_path)[0]
else:
# let's download to a tmp dir
from_path, content_type = _download_http_url(link,
session,
temp_dir.path,
hashes,
progress_bar)
# unpack the archive to the build dir location. even when only
# downloading archives, they have to be unpacked to parse dependencies
unpack_file(from_path, location, content_type)
# a download dir is specified; let's copy the archive there
if download_dir and not already_downloaded_path:
_copy_file(from_path, download_dir, link)
if not already_downloaded_path:
os.unlink(from_path)
def _copy2_ignoring_special_files(src, dest):
# type: (str, str) -> None
"""Copying special files is not supported, but as a convenience to users
we skip errors copying them. This supports tools that may create e.g.
socket files in the project source directory.
"""
try:
copy2_fixed(src, dest)
except shutil.SpecialFileError as e:
# SpecialFileError may be raised due to either the source or
# destination. If the destination was the cause then we would actually
# care, but since the destination directory is deleted prior to
# copy we ignore all of them assuming it is caused by the source.
logger.warning(
"Ignoring special file error '%s' encountered copying %s to %s.",
str(e),
path_to_display(src),
path_to_display(dest),
)
def _copy_source_tree(source, target):
# type: (str, str) -> None
def ignore(d, names):
# Pulling in those directories can potentially be very slow,
# exclude the following directories if they appear in the top
# level dir (and only it).
# See discussion at https://github.com/pypa/pip/pull/6770
return ['.tox', '.nox'] if d == source else []
kwargs = dict(ignore=ignore, symlinks=True) # type: CopytreeKwargs
if not PY2:
# Python 2 does not support copy_function, so we only ignore
# errors on special file copy in Python 3.
kwargs['copy_function'] = _copy2_ignoring_special_files
shutil.copytree(source, target, **kwargs)
def unpack_file_url(
link, # type: Link
location, # type: str
download_dir=None, # type: Optional[str]
hashes=None # type: Optional[Hashes]
):
# type: (...) -> None
"""Unpack link into location.
If download_dir is provided and link points to a file, make a copy
of the link file inside download_dir.
"""
link_path = link.file_path
# If it's a url to a local directory
if link.is_existing_dir():
if os.path.isdir(location):
rmtree(location)
_copy_source_tree(link_path, location)
if download_dir:
logger.info('Link is a directory, ignoring download_dir')
return
# If --require-hashes is off, `hashes` is either empty, the
# link's embedded hash, or MissingHashes; it is required to
# match. If --require-hashes is on, we are satisfied by any
# hash in `hashes` matching: a URL-based or an option-based
# one; no internet-sourced hash will be in `hashes`.
if hashes:
hashes.check_against_path(link_path)
# If a download dir is specified, is the file already there and valid?
already_downloaded_path = None
if download_dir:
already_downloaded_path = _check_download_dir(link,
download_dir,
hashes)
if already_downloaded_path:
from_path = already_downloaded_path
else:
from_path = link_path
content_type = mimetypes.guess_type(from_path)[0]
# unpack the archive to the build dir location. even when only downloading
# archives, they have to be unpacked to parse dependencies
unpack_file(from_path, location, content_type)
# a download dir is specified and not already downloaded
if download_dir and not already_downloaded_path:
_copy_file(from_path, download_dir, link)
def unpack_url(
link, # type: Link
location, # type: str
download_dir=None, # type: Optional[str]
session=None, # type: Optional[PipSession]
hashes=None, # type: Optional[Hashes]
progress_bar="on" # type: str
):
# type: (...) -> None
"""Unpack link.
If link is a VCS link:
if only_download, export into download_dir and ignore location
else unpack into location
for other types of link:
- unpack into location
- if download_dir, copy the file into download_dir
- if only_download, mark location for deletion
:param hashes: A Hashes object, one of whose embedded hashes must match,
or HashMismatch will be raised. If the Hashes is empty, no matches are
required, and unhashable types of requirements (like VCS ones, which
would ordinarily raise HashUnsupported) are allowed.
"""
# non-editable vcs urls
if link.is_vcs:
unpack_vcs_link(link, location)
# file urls
elif link.is_file:
unpack_file_url(link, location, download_dir, hashes=hashes)
# http urls
else:
if session is None:
session = PipSession()
unpack_http_url(
link,
location,
download_dir,
session,
hashes=hashes,
progress_bar=progress_bar
)
def sanitize_content_filename(filename):
# type: (str) -> str
"""
Sanitize the "filename" value from a Content-Disposition header.
"""
return os.path.basename(filename)
def parse_content_disposition(content_disposition, default_filename):
# type: (str, str) -> str
"""
Parse the "filename" value from a Content-Disposition header, and
return the default filename if the result is empty.
"""
_type, params = cgi.parse_header(content_disposition)
filename = params.get('filename')
if filename:
# We need to sanitize the filename to prevent directory traversal
# in case the filename contains ".." path parts.
filename = sanitize_content_filename(filename)
return filename or default_filename
def _download_http_url(
link, # type: Link
session, # type: PipSession
temp_dir, # type: str
hashes, # type: Optional[Hashes]
progress_bar # type: str
):
# type: (...) -> Tuple[str, str]
"""Download link url into temp_dir using provided session"""
target_url = link.url.split('#', 1)[0]
try:
resp = session.get(
target_url,
# We use Accept-Encoding: identity here because requests
# defaults to accepting compressed responses. This breaks in
# a variety of ways depending on how the server is configured.
# - Some servers will notice that the file isn't a compressible
# file and will leave the file alone and with an empty
# Content-Encoding
# - Some servers will notice that the file is already
# compressed and will leave the file alone and will add a
# Content-Encoding: gzip header
# - Some servers won't notice anything at all and will take
# a file that's already been compressed and compress it again
# and set the Content-Encoding: gzip header
# By setting this to request only the identity encoding We're
# hoping to eliminate the third case. Hopefully there does not
# exist a server which when given a file will notice it is
# already compressed and that you're not asking for a
# compressed file and will then decompress it before sending
# because if that's the case I don't think it'll ever be
# possible to make this work.
headers={"Accept-Encoding": "identity"},
stream=True,
)
resp.raise_for_status()
except requests.HTTPError as exc:
logger.critical(
"HTTP error %s while getting %s", exc.response.status_code, link,
)
raise
content_type = resp.headers.get('content-type', '')
filename = link.filename # fallback
# Have a look at the Content-Disposition header for a better guess
content_disposition = resp.headers.get('content-disposition')
if content_disposition:
filename = parse_content_disposition(content_disposition, filename)
ext = splitext(filename)[1] # type: Optional[str]
if not ext:
ext = mimetypes.guess_extension(content_type)
if ext:
filename += ext
if not ext and link.url != resp.url:
ext = os.path.splitext(resp.url)[1]
if ext:
filename += ext
file_path = os.path.join(temp_dir, filename)
with open(file_path, 'wb') as content_file:
_download_url(resp, link, content_file, hashes, progress_bar)
return file_path, content_type
def _check_download_dir(link, download_dir, hashes):
# type: (Link, str, Optional[Hashes]) -> Optional[str]
""" Check download_dir for previously downloaded file with correct hash
If a correct file is found return its path else None
"""
download_path = os.path.join(download_dir, link.filename)
if not os.path.exists(download_path):
return None
# If already downloaded, does its hash match?
logger.info('File was already downloaded %s', download_path)
if hashes:
try:
hashes.check_against_path(download_path)
except HashMismatch:
logger.warning(
'Previously-downloaded file %s has bad hash. '
'Re-downloading.',
download_path
)
os.unlink(download_path)
return None
return download_path

View file

@ -0,0 +1,2 @@
"""Index interaction code
"""

View file

@ -2,9 +2,6 @@
The main purpose of this module is to expose LinkCollector.collect_links().
"""
# The following comment should be removed at some point in the future.
# mypy: disallow-untyped-defs=False
import cgi
import itertools
import logging
@ -27,8 +24,8 @@ from pip._internal.vcs import is_url, vcs
if MYPY_CHECK_RUNNING:
from typing import (
Callable, Dict, Iterable, List, MutableMapping, Optional, Sequence,
Tuple, Union,
Callable, Iterable, List, MutableMapping, Optional, Sequence, Tuple,
Union,
)
import xml.etree.ElementTree
@ -290,6 +287,7 @@ class HTMLPage(object):
self.url = url
def __str__(self):
# type: () -> str
return redact_auth_from_url(self.url)
@ -385,6 +383,7 @@ def group_locations(locations, expand_dir=False):
# puts the url for the given file path into the appropriate list
def sort_path(path):
# type: (str) -> None
url = path_to_url(path)
if mimetypes.guess_type(url, strict=False)[0] == 'text/html':
urls.append(url)
@ -435,29 +434,36 @@ def group_locations(locations, expand_dir=False):
class CollectedLinks(object):
"""
Encapsulates all the Link objects collected by a call to
LinkCollector.collect_links(), stored separately as--
Encapsulates the return value of a call to LinkCollector.collect_links().
The return value includes both URLs to project pages containing package
links, as well as individual package Link objects collected from other
sources.
This info is stored separately as:
(1) links from the configured file locations,
(2) links from the configured find_links, and
(3) a dict mapping HTML page url to links from that page.
(3) urls to HTML project pages, as described by the PEP 503 simple
repository API.
"""
def __init__(
self,
files, # type: List[Link]
find_links, # type: List[Link]
pages, # type: Dict[str, List[Link]]
files, # type: List[Link]
find_links, # type: List[Link]
project_urls, # type: List[Link]
):
# type: (...) -> None
"""
:param files: Links from file locations.
:param find_links: Links from find_links.
:param pages: A dict mapping HTML page url to links from that page.
:param project_urls: URLs to HTML project pages, as described by
the PEP 503 simple repository API.
"""
self.files = files
self.find_links = find_links
self.pages = pages
self.project_urls = project_urls
class LinkCollector(object):
@ -483,18 +489,12 @@ class LinkCollector(object):
# type: () -> List[str]
return self.search_scope.find_links
def _get_pages(self, locations):
# type: (Iterable[Link]) -> Iterable[HTMLPage]
def fetch_page(self, location):
# type: (Link) -> Optional[HTMLPage]
"""
Yields (page, page_url) from the given locations, skipping
locations that have errors.
Fetch an HTML page containing package links.
"""
for location in locations:
page = _get_html_page(location, session=self.session)
if page is None:
continue
yield page
return _get_html_page(location, session=self.session)
def collect_links(self, project_name):
# type: (str) -> CollectedLinks
@ -537,12 +537,8 @@ class LinkCollector(object):
lines.append('* {}'.format(link))
logger.debug('\n'.join(lines))
pages_links = {}
for page in self._get_pages(url_locations):
pages_links[page.url] = list(parse_links(page))
return CollectedLinks(
files=file_links,
find_links=find_link_links,
pages=pages_links,
project_urls=url_locations,
)

View file

@ -19,6 +19,7 @@ from pip._internal.exceptions import (
InvalidWheelFilename,
UnsupportedWheel,
)
from pip._internal.index.collector import parse_links
from pip._internal.models.candidate import InstallationCandidate
from pip._internal.models.format_control import FormatControl
from pip._internal.models.link import Link
@ -38,7 +39,7 @@ if MYPY_CHECK_RUNNING:
FrozenSet, Iterable, List, Optional, Set, Text, Tuple, Union,
)
from pip._vendor.packaging.version import _BaseVersion
from pip._internal.collector import LinkCollector
from pip._internal.index.collector import LinkCollector
from pip._internal.models.search_scope import SearchScope
from pip._internal.req import InstallRequirement
from pip._internal.pep425tags import Pep425Tag
@ -115,7 +116,7 @@ class LinkEvaluator(object):
self,
project_name, # type: str
canonical_name, # type: str
formats, # type: FrozenSet
formats, # type: FrozenSet[str]
target_python, # type: TargetPython
allow_yanked, # type: bool
ignore_requires_python=None, # type: Optional[bool]
@ -473,12 +474,14 @@ class CandidateEvaluator(object):
c for c in candidates if str(c.version) in versions
]
return filter_unallowed_hashes(
filtered_applicable_candidates = filter_unallowed_hashes(
candidates=applicable_candidates,
hashes=self._hashes,
project_name=self._project_name,
)
return sorted(filtered_applicable_candidates, key=self._sort_key)
def _sort_key(self, candidate):
# type: (InstallationCandidate) -> CandidateSortingKey
"""
@ -758,7 +761,7 @@ class PackageFinder(object):
return None
return InstallationCandidate(
project=link_evaluator.project_name,
name=link_evaluator.project_name,
link=link,
# Convert the Text result to str since InstallationCandidate
# accepts str.
@ -778,6 +781,25 @@ class PackageFinder(object):
return candidates
def process_project_url(self, project_url, link_evaluator):
# type: (Link, LinkEvaluator) -> List[InstallationCandidate]
logger.debug(
'Fetching project page and analyzing links: %s', project_url,
)
html_page = self._link_collector.fetch_page(project_url)
if html_page is None:
return []
page_links = list(parse_links(html_page))
with indent_log():
package_links = self.evaluate_links(
link_evaluator,
links=page_links,
)
return package_links
def find_all_candidates(self, project_name):
# type: (str) -> List[InstallationCandidate]
"""Find all available InstallationCandidate for project_name
@ -798,14 +820,11 @@ class PackageFinder(object):
)
page_versions = []
for page_url, page_links in collected_links.pages.items():
logger.debug('Analyzing links from page %s', page_url)
with indent_log():
new_versions = self.evaluate_links(
link_evaluator,
links=page_links,
)
page_versions.extend(new_versions)
for project_url in collected_links.project_urls:
package_links = self.process_project_url(
project_url, link_evaluator=link_evaluator,
)
page_versions.extend(package_links)
file_versions = self.evaluate_links(
link_evaluator,

View file

@ -45,8 +45,7 @@ if MYPY_CHECK_RUNNING:
from pip._vendor import pkg_resources
from pip._internal.distributions import AbstractDistribution
from pip._internal.network.session import PipSession
from pip._internal.index import PackageFinder
from pip._internal.index.package_finder import PackageFinder
from pip._internal.operations.prepare import RequirementPreparer
from pip._internal.req.req_install import InstallRequirement
from pip._internal.req.req_set import RequirementSet
@ -54,6 +53,7 @@ if MYPY_CHECK_RUNNING:
InstallRequirementProvider = Callable[
[str, InstallRequirement], InstallRequirement
]
DiscoveredDependencies = DefaultDict[str, List[InstallRequirement]]
logger = logging.getLogger(__name__)
@ -116,7 +116,6 @@ class Resolver(object):
def __init__(
self,
preparer, # type: RequirementPreparer
session, # type: PipSession
finder, # type: PackageFinder
make_install_req, # type: InstallRequirementProvider
use_user_site, # type: bool
@ -140,10 +139,6 @@ class Resolver(object):
self.preparer = preparer
self.finder = finder
self.session = session
# This is set in resolve
self.require_hashes = None # type: Optional[bool]
self.upgrade_strategy = upgrade_strategy
self.force_reinstall = force_reinstall
@ -154,7 +149,7 @@ class Resolver(object):
self._make_install_req = make_install_req
self._discovered_dependencies = \
defaultdict(list) # type: DefaultDict[str, List]
defaultdict(list) # type: DiscoveredDependencies
def resolve(self, requirement_set):
# type: (RequirementSet) -> None
@ -178,16 +173,6 @@ class Resolver(object):
requirement_set.unnamed_requirements +
list(requirement_set.requirements.values())
)
self.require_hashes = (
requirement_set.require_hashes or
any(req.has_hash_options for req in root_reqs)
)
# Display where finder is looking for packages
search_scope = self.finder.search_scope
locations = search_scope.get_formatted_locations()
if locations:
logger.info(locations)
# Actually prepare the files, and collect any exceptions. Most hash
# exceptions cannot be checked ahead of time, because
@ -197,9 +182,7 @@ class Resolver(object):
hash_errors = HashErrors()
for req in chain(root_reqs, discovered_reqs):
try:
discovered_reqs.extend(
self._resolve_one(requirement_set, req)
)
discovered_reqs.extend(self._resolve_one(requirement_set, req))
except HashError as exc:
exc.req = req
hash_errors.append(exc)
@ -286,14 +269,8 @@ class Resolver(object):
"""Takes a InstallRequirement and returns a single AbstractDist \
representing a prepared variant of the same.
"""
assert self.require_hashes is not None, (
"require_hashes should have been set in Resolver.resolve()"
)
if req.editable:
return self.preparer.prepare_editable_requirement(
req, self.require_hashes, self.use_user_site, self.finder,
)
return self.preparer.prepare_editable_requirement(req)
# satisfied_by is only evaluated by calling _check_skip_installed,
# so it must be None here.
@ -302,16 +279,15 @@ class Resolver(object):
if req.satisfied_by:
return self.preparer.prepare_installed_requirement(
req, self.require_hashes, skip_reason
req, skip_reason
)
upgrade_allowed = self._is_upgrade_allowed(req)
# We eagerly populate the link, since that's our "legacy" behavior.
req.populate_link(self.finder, upgrade_allowed, self.require_hashes)
abstract_dist = self.preparer.prepare_linked_requirement(
req, self.session, self.finder, self.require_hashes
)
require_hashes = self.preparer.require_hashes
req.populate_link(self.finder, upgrade_allowed, require_hashes)
abstract_dist = self.preparer.prepare_linked_requirement(req)
# NOTE
# The following portion is for determining if a certain package is
@ -344,7 +320,7 @@ class Resolver(object):
def _resolve_one(
self,
requirement_set, # type: RequirementSet
req_to_install # type: InstallRequirement
req_to_install, # type: InstallRequirement
):
# type: (...) -> List[InstallRequirement]
"""Prepare a single requirements file.
@ -397,7 +373,9 @@ class Resolver(object):
# can refer to it when adding dependencies.
if not requirement_set.has_requirement(req_to_install.name):
# 'unnamed' requirements will get added here
req_to_install.is_direct = True
# 'unnamed' requirements can only come from being directly
# provided by the user.
assert req_to_install.is_direct
requirement_set.add_requirement(
req_to_install, parent_req_name=None,
)

View file

@ -2,7 +2,6 @@
# The following comment should be removed at some point in the future.
# mypy: strict-optional=False
# mypy: disallow-untyped-defs=False
from __future__ import absolute_import
@ -14,14 +13,18 @@ import sys
import sysconfig
from distutils import sysconfig as distutils_sysconfig
from distutils.command.install import SCHEME_KEYS # type: ignore
from distutils.command.install import install as distutils_install_command
from pip._internal.models.scheme import Scheme
from pip._internal.utils import appdirs
from pip._internal.utils.compat import WINDOWS
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
from pip._internal.utils.typing import MYPY_CHECK_RUNNING, cast
from pip._internal.utils.virtualenv import running_under_virtualenv
if MYPY_CHECK_RUNNING:
from typing import Any, Union, Dict, List, Optional
from typing import Dict, List, Optional, Union
from distutils.cmd import Command as DistutilsCommand
# Application Directories
@ -38,6 +41,7 @@ def get_major_minor_version():
def get_src_prefix():
# type: () -> str
if running_under_virtualenv():
src_prefix = os.path.join(sys.prefix, 'src')
else:
@ -88,29 +92,25 @@ else:
bin_py = '/usr/local/bin'
def distutils_scheme(dist_name, user=False, home=None, root=None,
isolated=False, prefix=None):
# type:(str, bool, str, str, bool, str) -> dict
def distutils_scheme(
dist_name, user=False, home=None, root=None, isolated=False, prefix=None
):
# type:(str, bool, str, str, bool, str) -> Dict[str, str]
"""
Return a distutils install scheme
"""
from distutils.dist import Distribution
scheme = {}
if isolated:
extra_dist_args = {"script_args": ["--no-user-cfg"]}
else:
extra_dist_args = {}
dist_args = {'name': dist_name} # type: Dict[str, Union[str, List[str]]]
dist_args.update(extra_dist_args)
if isolated:
dist_args["script_args"] = ["--no-user-cfg"]
d = Distribution(dist_args)
# Ignoring, typeshed issue reported python/typeshed/issues/2567
d.parse_config_files()
# NOTE: Ignoring type since mypy can't find attributes on 'Command'
i = d.get_command_obj('install', create=True) # type: Any
assert i is not None
obj = None # type: Optional[DistutilsCommand]
obj = d.get_command_obj('install', create=True)
assert obj is not None
i = cast(distutils_install_command, obj)
# NOTE: setting user or home has the side-effect of creating the home dir
# or user base for installations during finalize_options()
# ideally, we'd prefer a scheme class that has no side-effects.
@ -123,6 +123,8 @@ def distutils_scheme(dist_name, user=False, home=None, root=None,
i.home = home or i.home
i.root = root or i.root
i.finalize_options()
scheme = {}
for key in SCHEME_KEYS:
scheme[key] = getattr(i, 'install_' + key)
@ -131,9 +133,7 @@ def distutils_scheme(dist_name, user=False, home=None, root=None,
# platlib). Note, i.install_lib is *always* set after
# finalize_options(); we only want to override here if the user
# has explicitly requested it hence going back to the config
# Ignoring, typeshed issue reported python/typeshed/issues/2567
if 'install_lib' in d.get_option_dict('install'): # type: ignore
if 'install_lib' in d.get_option_dict('install'):
scheme.update(dict(purelib=i.install_lib, platlib=i.install_lib))
if running_under_virtualenv():
@ -154,3 +154,41 @@ def distutils_scheme(dist_name, user=False, home=None, root=None,
)
return scheme
def get_scheme(
dist_name, # type: str
user=False, # type: bool
home=None, # type: Optional[str]
root=None, # type: Optional[str]
isolated=False, # type: bool
prefix=None, # type: Optional[str]
):
# type: (...) -> Scheme
"""
Get the "scheme" corresponding to the input parameters. The distutils
documentation provides the context for the available schemes:
https://docs.python.org/3/install/index.html#alternate-installation
:param dist_name: the name of the package to retrieve the scheme for, used
in the headers scheme path
:param user: indicates to use the "user" scheme
:param home: indicates to use the "home" scheme and provides the base
directory for the same
:param root: root under which other directories are re-based
:param isolated: equivalent to --no-user-cfg, i.e. do not consider
~/.pydistutils.cfg (posix) or ~/pydistutils.cfg (non-posix) for
scheme paths
:param prefix: indicates to use the "prefix" scheme and provides the
base directory for the same
"""
scheme = distutils_scheme(
dist_name, user, home, root, isolated, prefix
)
return Scheme(
platlib=scheme["platlib"],
purelib=scheme["purelib"],
headers=scheme["headers"],
scripts=scheme["scripts"],
data=scheme["data"],
)

View file

@ -9,31 +9,30 @@ from pip._internal.utils.typing import MYPY_CHECK_RUNNING
if MYPY_CHECK_RUNNING:
from pip._vendor.packaging.version import _BaseVersion
from pip._internal.models.link import Link
from typing import Any
class InstallationCandidate(KeyBasedCompareMixin):
"""Represents a potential "candidate" for installation.
"""
def __init__(self, project, version, link):
# type: (Any, str, Link) -> None
self.project = project
def __init__(self, name, version, link):
# type: (str, str, Link) -> None
self.name = name
self.version = parse_version(version) # type: _BaseVersion
self.link = link
super(InstallationCandidate, self).__init__(
key=(self.project, self.version, self.link),
key=(self.name, self.version, self.link),
defining_class=InstallationCandidate
)
def __repr__(self):
# type: () -> str
return "<InstallationCandidate({!r}, {!r}, {!r})>".format(
self.project, self.version, self.link,
self.name, self.version, self.link,
)
def __str__(self):
return '{!r} candidate (version {} at {})'.format(
self.project, self.version, self.link,
self.name, self.version, self.link,
)

View file

@ -1,6 +1,5 @@
# The following comment should be removed at some point in the future.
# mypy: strict-optional=False
# mypy: disallow-untyped-defs=False
from pip._vendor.packaging.utils import canonicalize_name
@ -16,7 +15,7 @@ class FormatControl(object):
"""
def __init__(self, no_binary=None, only_binary=None):
# type: (Optional[Set], Optional[Set]) -> None
# type: (Optional[Set[str]], Optional[Set[str]]) -> None
if no_binary is None:
no_binary = set()
if only_binary is None:
@ -26,12 +25,15 @@ class FormatControl(object):
self.only_binary = only_binary
def __eq__(self, other):
# type: (object) -> bool
return self.__dict__ == other.__dict__
def __ne__(self, other):
# type: (object) -> bool
return not self.__eq__(other)
def __repr__(self):
# type: () -> str
return "{}({}, {})".format(
self.__class__.__name__,
self.no_binary,
@ -40,7 +42,7 @@ class FormatControl(object):
@staticmethod
def handle_mutual_excludes(value, target, other):
# type: (str, Optional[Set], Optional[Set]) -> None
# type: (str, Optional[Set[str]], Optional[Set[str]]) -> None
if value.startswith('-'):
raise CommandError(
"--no-binary / --only-binary option requires 1 argument."
@ -63,7 +65,7 @@ class FormatControl(object):
target.add(name)
def get_allowed_formats(self, canonical_name):
# type: (str) -> FrozenSet
# type: (str) -> FrozenSet[str]
result = {"binary", "source"}
if canonical_name in self.only_binary:
result.discard('source')

View file

@ -19,7 +19,7 @@ from pip._internal.utils.urls import path_to_url, url_to_path
if MYPY_CHECK_RUNNING:
from typing import Optional, Text, Tuple, Union
from pip._internal.collector import HTMLPage
from pip._internal.index.collector import HTMLPage
from pip._internal.utils.hashes import Hashes

View file

@ -0,0 +1,25 @@
"""
For types associated with installation schemes.
For a general overview of available schemes and their context, see
https://docs.python.org/3/install/index.html#alternate-installation.
"""
class Scheme(object):
"""A Scheme holds paths which are used as the base directories for
artifacts associated with a Python package.
"""
def __init__(
self,
platlib, # type: str
purelib, # type: str
headers, # type: str
scripts, # type: str
data, # type: str
):
self.platlib = platlib
self.purelib = purelib
self.headers = headers
self.scripts = scripts
self.data = data

View file

@ -91,12 +91,12 @@ class TargetPython(object):
# versions=None uses special default logic.
py_version_info = self._given_py_version_info
if py_version_info is None:
versions = None
version = None
else:
versions = [version_info_to_nodot(py_version_info)]
version = version_info_to_nodot(py_version_info)
tags = get_supported(
versions=versions,
version=version,
platform=self.platform,
abi=self.abi,
impl=self.implementation,

View file

@ -9,6 +9,7 @@ from contextlib import contextmanager
from pip._vendor.cachecontrol.cache import BaseCache
from pip._vendor.cachecontrol.caches import FileCache
from pip._vendor.requests.models import Response
from pip._internal.utils.filesystem import adjacent_tmp_file, replace
from pip._internal.utils.misc import ensure_dir
@ -18,6 +19,11 @@ if MYPY_CHECK_RUNNING:
from typing import Optional
def is_from_cache(response):
# type: (Response) -> bool
return getattr(response, "from_cache", False)
@contextmanager
def suppressed_cache_errors():
"""If we can't access the cache then we can just skip caching and process

View file

@ -212,8 +212,9 @@ class LocalFSAdapter(BaseAdapter):
class InsecureHTTPAdapter(HTTPAdapter):
def cert_verify(self, conn, url, verify, cert):
conn.cert_reqs = 'CERT_NONE'
conn.ca_certs = None
super(InsecureHTTPAdapter, self).cert_verify(
conn=conn, url=url, verify=False, cert=cert
)
class PipSession(requests.Session):
@ -360,22 +361,13 @@ class PipSession(requests.Session):
continue
try:
# We need to do this decode dance to ensure that we have a
# unicode object, even on Python 2.x.
addr = ipaddress.ip_address(
origin_host
if (
isinstance(origin_host, six.text_type) or
origin_host is None
)
else origin_host.decode("utf8")
None
if origin_host is None
else six.ensure_text(origin_host)
)
network = ipaddress.ip_network(
secure_host
if isinstance(secure_host, six.text_type)
# setting secure_host to proper Union[bytes, str]
# creates problems in other places
else secure_host.decode("utf8") # type: ignore
six.ensure_text(secure_host)
)
except ValueError:
# We don't have both a valid address or a valid network, so

View file

@ -0,0 +1,48 @@
from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
if MYPY_CHECK_RUNNING:
from typing import Iterator
def response_chunks(response, chunk_size=CONTENT_CHUNK_SIZE):
# type: (Response, int) -> Iterator[bytes]
"""Given a requests Response, provide the data chunks.
"""
try:
# Special case for urllib3.
for chunk in response.raw.stream(
chunk_size,
# We use decode_content=False here because we don't
# want urllib3 to mess with the raw bytes we get
# from the server. If we decompress inside of
# urllib3 then we cannot verify the checksum
# because the checksum will be of the compressed
# file. This breakage will only occur if the
# server adds a Content-Encoding header, which
# depends on how the server was configured:
# - Some servers will notice that the file isn't a
# compressible file and will leave the file alone
# and with an empty Content-Encoding
# - Some servers will notice that the file is
# already compressed and will leave the file
# alone and will add a Content-Encoding: gzip
# header
# - Some servers won't notice anything at all and
# will take a file that's already been compressed
# and compress it again and set the
# Content-Encoding: gzip header
#
# By setting this not to decode automatically we
# hope to eliminate problems with the second case.
decode_content=False,
):
yield chunk
except AttributeError:
# Standard file-like object.
while True:
chunk = response.raw.read(chunk_size)
if not chunk:
break
yield chunk

View file

@ -0,0 +1,44 @@
"""Metadata generation logic for source distributions.
"""
import atexit
import logging
import os
from pip._internal.utils.subprocess import runner_with_spinner_message
from pip._internal.utils.temp_dir import TempDirectory
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
if MYPY_CHECK_RUNNING:
from pip._internal.req.req_install import InstallRequirement
logger = logging.getLogger(__name__)
def generate_metadata(install_req):
# type: (InstallRequirement) -> str
"""Generate metadata using mechanisms described in PEP 517.
Returns the generated metadata directory.
"""
assert install_req.pep517_backend is not None
build_env = install_req.build_env
backend = install_req.pep517_backend
# NOTE: This needs to be refactored to stop using atexit
metadata_tmpdir = TempDirectory(kind="modern-metadata")
atexit.register(metadata_tmpdir.cleanup)
metadata_dir = metadata_tmpdir.path
with build_env:
# Note that Pep517HookCaller implements a fallback for
# prepare_metadata_for_build_wheel, so we don't have to
# consider the possibility that this hook doesn't exist.
runner = runner_with_spinner_message("Preparing wheel metadata")
with backend.subprocess_runner(runner):
distinfo_dir = backend.prepare_metadata_for_build_wheel(
metadata_dir
)
return os.path.join(metadata_dir, distinfo_dir)

View file

@ -1,4 +1,4 @@
"""Metadata generation logic for source distributions.
"""Metadata generation logic for legacy source distributions.
"""
import logging
@ -6,32 +6,19 @@ import os
from pip._internal.exceptions import InstallationError
from pip._internal.utils.misc import ensure_dir
from pip._internal.utils.setuptools_build import make_setuptools_shim_args
from pip._internal.utils.setuptools_build import make_setuptools_egg_info_args
from pip._internal.utils.subprocess import call_subprocess
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
from pip._internal.vcs import vcs
if MYPY_CHECK_RUNNING:
from typing import Callable, List
from typing import List, Optional
from pip._internal.req.req_install import InstallRequirement
logger = logging.getLogger(__name__)
def get_metadata_generator(install_req):
# type: (InstallRequirement) -> Callable[[InstallRequirement], str]
"""Return a callable metadata generator for this InstallRequirement.
A metadata generator takes an InstallRequirement (install_req) as an input,
generates metadata via the appropriate process for that install_req and
returns the generated metadata directory.
"""
if not install_req.use_pep517:
return _generate_metadata_legacy
return _generate_metadata
def _find_egg_info(source_directory, is_editable):
# type: (str, bool) -> str
"""Find an .egg-info in `source_directory`, based on `is_editable`.
@ -79,7 +66,7 @@ def _find_egg_info(source_directory, is_editable):
if not filenames:
raise InstallationError(
"Files/directories not found in %s" % base
"Files/directories not found in {}".format(base)
)
# If we have more than one match, we pick the toplevel one. This
@ -91,35 +78,41 @@ def _find_egg_info(source_directory, is_editable):
return os.path.join(base, filenames[0])
def _generate_metadata_legacy(install_req):
def generate_metadata(install_req):
# type: (InstallRequirement) -> str
"""Generate metadata using setup.py-based defacto mechanisms.ArithmeticError
Returns the generated metadata directory.
"""
assert install_req.unpacked_source_directory
req_details_str = install_req.name or "from {}".format(install_req.link)
logger.debug(
'Running setup.py (path:%s) egg_info for package %s',
install_req.setup_py_path, req_details_str,
)
# Compose arguments for subprocess call
base_cmd = make_setuptools_shim_args(install_req.setup_py_path)
if install_req.isolated:
base_cmd += ["--no-user-cfg"]
egg_info_dir = None # type: Optional[str]
# For non-editable installs, don't put the .egg-info files at the root,
# to avoid confusion due to the source code being considered an installed
# egg.
egg_base_option = [] # type: List[str]
if not install_req.editable:
egg_info_dir = os.path.join(
install_req.unpacked_source_directory, 'pip-egg-info',
)
egg_base_option = ['--egg-base', egg_info_dir]
# setuptools complains if the target directory does not exist.
ensure_dir(egg_info_dir)
args = make_setuptools_egg_info_args(
install_req.setup_py_path,
egg_info_dir=egg_info_dir,
no_user_config=install_req.isolated,
)
with install_req.build_env:
call_subprocess(
base_cmd + ["egg_info"] + egg_base_option,
args,
cwd=install_req.unpacked_source_directory,
command_desc='python setup.py egg_info',
)
@ -129,8 +122,3 @@ def _generate_metadata_legacy(install_req):
install_req.unpacked_source_directory,
install_req.editable,
)
def _generate_metadata(install_req):
# type: (InstallRequirement) -> str
return install_req.prepare_pep517_metadata()

Some files were not shown because too many files have changed in this diff Show more