diff --git a/.azure-pipelines/jobs/package.yml b/.azure-pipelines/jobs/package.yml deleted file mode 100644 index bdb0254a1..000000000 --- a/.azure-pipelines/jobs/package.yml +++ /dev/null @@ -1,36 +0,0 @@ -parameters: - vmImage: - -jobs: -- job: Package - dependsOn: - - Test_Primary - - Test_Secondary - pool: - vmImage: ${{ parameters.vmImage }} - - steps: - - task: UsePythonVersion@0 - displayName: Use Python 3 latest - inputs: - versionSpec: '3' - - - bash: | - git config --global user.email "distutils-sig@python.org" - git config --global user.name "pip" - displayName: Setup Git credentials - - - bash: pip install nox - displayName: Install dependencies - - - bash: nox -s prepare-release -- 99.9 - displayName: Prepare dummy release - - - bash: nox -s build-release -- 99.9 - displayName: Generate distributions for the dummy release - - - task: PublishBuildArtifacts@1 - displayName: 'Publish Artifact: dist' - inputs: - pathtoPublish: dist - artifactName: dist diff --git a/.azure-pipelines/jobs/test-windows.yml b/.azure-pipelines/jobs/test-windows.yml deleted file mode 100644 index 99cd8a836..000000000 --- a/.azure-pipelines/jobs/test-windows.yml +++ /dev/null @@ -1,53 +0,0 @@ -parameters: - vmImage: - -jobs: -- job: Test_Primary - displayName: Tests / - - pool: - vmImage: ${{ parameters.vmImage }} - strategy: - matrix: - "3.6": # lowest Python version - python.version: '3.6' - python.architecture: x64 - "3.8": # current - python.version: '3.8' - python.architecture: x64 - maxParallel: 6 - - steps: - - template: ../steps/run-tests-windows.yml - parameters: - runIntegrationTests: true - -- job: Test_Secondary - displayName: Tests / - # Don't run integration tests for these runs - # Run after Test_Primary so we don't devour time and jobs if tests are going to fail - dependsOn: Test_Primary - - pool: - vmImage: ${{ parameters.vmImage }} - strategy: - matrix: - "3.7": - python.version: '3.7' - python.architecture: x64 - # This is for Windows, so test x86 builds - "3.6-x86": - python.version: '3.6' - python.architecture: x86 - "3.7-x86": - python.version: '3.7' - python.architecture: x86 - "3.8-x86": - python.version: '3.8' - python.architecture: x86 - maxParallel: 6 - - steps: - - template: ../steps/run-tests-windows.yml - parameters: - runIntegrationTests: false diff --git a/.azure-pipelines/jobs/test.yml b/.azure-pipelines/jobs/test.yml deleted file mode 100644 index a3a0ef80b..000000000 --- a/.azure-pipelines/jobs/test.yml +++ /dev/null @@ -1,38 +0,0 @@ -parameters: - vmImage: - -jobs: -- job: Test_Primary - displayName: Tests / - - pool: - vmImage: ${{ parameters.vmImage }} - strategy: - matrix: - "3.6": # lowest Python version - python.version: '3.6' - python.architecture: x64 - "3.8": - python.version: '3.8' - python.architecture: x64 - maxParallel: 2 - - steps: - - template: ../steps/run-tests.yml - -- job: Test_Secondary - displayName: Tests / - # Run after Test_Primary so we don't devour time and jobs if tests are going to fail - dependsOn: Test_Primary - - pool: - vmImage: ${{ parameters.vmImage }} - strategy: - matrix: - "3.7": - python.version: '3.7' - python.architecture: x64 - maxParallel: 4 - - steps: - - template: ../steps/run-tests.yml diff --git a/.azure-pipelines/linux.yml b/.azure-pipelines/linux.yml deleted file mode 100644 index e55980743..000000000 --- a/.azure-pipelines/linux.yml +++ /dev/null @@ -1,11 +0,0 @@ -variables: - CI: true - -jobs: -- template: jobs/test.yml - parameters: - vmImage: ubuntu-16.04 - -- template: jobs/package.yml - parameters: - vmImage: ubuntu-16.04 diff --git a/.azure-pipelines/steps/run-tests-windows.yml b/.azure-pipelines/steps/run-tests-windows.yml deleted file mode 100644 index 39282a3cc..000000000 --- a/.azure-pipelines/steps/run-tests-windows.yml +++ /dev/null @@ -1,54 +0,0 @@ -parameters: - runIntegrationTests: - -steps: -- task: UsePythonVersion@0 - displayName: Use Python $(python.version) - inputs: - versionSpec: '$(python.version)' - architecture: '$(python.architecture)' - -- task: PowerShell@2 - inputs: - filePath: .azure-pipelines/scripts/New-RAMDisk.ps1 - arguments: "-Drive R -Size 1GB" - displayName: Setup RAMDisk - -- powershell: | - mkdir R:\Temp - $acl = Get-Acl "R:\Temp" - $rule = New-Object System.Security.AccessControl.FileSystemAccessRule( - "Everyone", "FullControl", "ContainerInherit,ObjectInherit", "None", "Allow" - ) - $acl.AddAccessRule($rule) - Set-Acl "R:\Temp" $acl - displayName: Set RAMDisk Permissions - -- bash: pip install --upgrade 'virtualenv<20' setuptools tox - displayName: Install Tox - -- script: tox -e py -- -m unit -n auto --junit-xml=junit/unit-test.xml - env: - TEMP: "R:\\Temp" - displayName: Tox run unit tests - -- ${{ if eq(parameters.runIntegrationTests, 'true') }}: - - powershell: | - # Fix Git SSL errors - pip install certifi tox - python -m certifi > cacert.txt - $env:GIT_SSL_CAINFO = $(Get-Content cacert.txt) - - # Shorten paths to get under MAX_PATH or else integration tests will fail - # https://bugs.python.org/issue18199 - $env:TEMP = "R:\Temp" - - tox -e py -- -m integration -n auto --durations=5 --junit-xml=junit/integration-test.xml - displayName: Tox run integration tests - -- task: PublishTestResults@2 - displayName: Publish Test Results - inputs: - testResultsFiles: junit/*.xml - testRunTitle: 'Python $(python.version)' - condition: succeededOrFailed() diff --git a/.azure-pipelines/steps/run-tests.yml b/.azure-pipelines/steps/run-tests.yml deleted file mode 100644 index 5b9a9c50c..000000000 --- a/.azure-pipelines/steps/run-tests.yml +++ /dev/null @@ -1,25 +0,0 @@ -steps: -- task: UsePythonVersion@0 - displayName: Use Python $(python.version) - inputs: - versionSpec: '$(python.version)' - -- bash: pip install --upgrade 'virtualenv<20' setuptools tox - displayName: Install Tox - -- script: tox -e py -- -m unit -n auto --junit-xml=junit/unit-test.xml - displayName: Tox run unit tests - -# Run integration tests in two groups so we will fail faster if there is a failure in the first group -- script: tox -e py -- -m integration -n auto --durations=5 -k "not test_install" --junit-xml=junit/integration-test-group0.xml - displayName: Tox run Group 0 integration tests - -- script: tox -e py -- -m integration -n auto --durations=5 -k "test_install" --junit-xml=junit/integration-test-group1.xml - displayName: Tox run Group 1 integration tests - -- task: PublishTestResults@2 - displayName: Publish Test Results - inputs: - testResultsFiles: junit/*.xml - testRunTitle: 'Python $(python.version)' - condition: succeededOrFailed() diff --git a/.azure-pipelines/windows.yml b/.azure-pipelines/windows.yml deleted file mode 100644 index f56b8f504..000000000 --- a/.azure-pipelines/windows.yml +++ /dev/null @@ -1,11 +0,0 @@ -variables: - CI: true - -jobs: -- template: jobs/test-windows.yml - parameters: - vmImage: vs2017-win2016 - -- template: jobs/package.yml - parameters: - vmImage: vs2017-win2016 diff --git a/.gitattributes b/.gitattributes index 7b547a58c..6a0fc6943 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1,4 +1,4 @@ # Patches must have Unix-style line endings, even on Windows -tools/automation/vendoring/patches/* eol=lf +tools/vendoring/patches/* eol=lf # The CA Bundle should always use Unix-style line endings, even on Windows src/pip/_vendor/certifi/*.pem eol=lf diff --git a/.github/ISSUE_TEMPLATE/bug-report.yml b/.github/ISSUE_TEMPLATE/bug-report.yml index 6b18de7bd..f9f4a73c2 100644 --- a/.github/ISSUE_TEMPLATE/bug-report.yml +++ b/.github/ISSUE_TEMPLATE/bug-report.yml @@ -1,81 +1,62 @@ ---- name: Bug report -about: Something is not working correctly. +description: Something is not working correctly. title: "" labels: "S: needs triage, type: bug" -issue_body: true # default: true, adds a classic WSYWIG textarea, if on + body: -- type: markdown - attributes: - value: | - ⚠ - If you're reporting an issue for `--use-feature=2020-resolver`, - use the "Dependency resolver failures / errors" template instead. -- type: markdown - attributes: - value: "**Environment**" -- type: input - attributes: - label: pip version - validations: - required: true -- type: input - attributes: - label: Python version - validations: - required: true -- type: input - attributes: - label: OS - validations: - required: true -- type: textarea - attributes: - label: Additional information - description: >- - Feel free to add more information about your environment here. - -- type: textarea - attributes: - label: Description - description: >- - A clear and concise description of what the bug is. - -- type: textarea - attributes: - label: Expected behavior - description: >- - A clear and concise description of what you expected to happen. - -- type: textarea - attributes: - label: How to Reproduce - description: >- - Describe the steps to reproduce this bug. - value: | - 1. Get package from '...' - 2. Then run '...' - 3. An error occurs. - -- type: textarea - attributes: - label: Output - description: >- - Paste the output of the steps above, including the commands - themselves and pip's output/traceback etc. - value: | - ```console - - ``` - -- type: checkboxes - attributes: - label: Code of Conduct - description: | - Read the [PSF Code of Conduct][CoC] first. - - [CoC]: https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md - options: - - label: I agree to follow the PSF Code of Conduct + - type: textarea + attributes: + label: Description + description: >- + A clear and concise description of what the bug is. + validations: required: true -... + + - type: textarea + attributes: + label: Expected behavior + description: >- + A clear and concise description of what you expected to happen. + + - type: input + attributes: + label: pip version + validations: + required: true + - type: input + attributes: + label: Python version + validations: + required: true + - type: input + attributes: + label: OS + validations: + required: true + + - type: textarea + attributes: + label: How to Reproduce + description: Please provide steps to reproduce this bug. + value: | + 1. Get package from '...' + 2. Then run '...' + 3. An error occurs. + validations: + required: true + + - type: textarea + attributes: + label: Output + description: >- + Provide the output of the steps above, including the commands + themselves and pip's output/traceback etc. + render: sh-session + + - type: checkboxes + attributes: + label: Code of Conduct + options: + - label: >- + I agree to follow the [PSF Code of Conduct](https://www.python.org/psf/conduct/). + required: true diff --git a/.github/ISSUE_TEMPLATE/resolver-failure.md b/.github/ISSUE_TEMPLATE/resolver-failure.md deleted file mode 100644 index b5215cef9..000000000 --- a/.github/ISSUE_TEMPLATE/resolver-failure.md +++ /dev/null @@ -1,34 +0,0 @@ ---- -name: Dependency resolver failures / errors -about: Report when the pip dependency resolver fails -labels: ["K: UX", "K: crash", "C: new resolver", "C: dependency resolution"] ---- - - - -**What did you want to do?** - - -**Output** - -``` -Paste what pip outputted in a code block. https://github.github.com/gfm/#fenced-code-blocks -``` - -**Additional information** - - diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 000000000..420bbfab2 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,187 @@ +name: CI + +on: + push: + branches: [master] + tags: + # Tags for all potential release numbers till 2030. + - "2[0-9].[0-3]" # 20.0 -> 29.3 + - "2[0-9].[0-3].[0-9]+" # 20.0.0 -> 29.3.[0-9]+ + pull_request: + schedule: + - cron: 0 0 * * MON # Run every Monday at 00:00 UTC + +jobs: + determine-changes: + runs-on: ubuntu-latest + outputs: + tests: ${{ steps.filter.outputs.tests }} + vendoring: ${{ steps.filter.outputs.vendoring }} + steps: + # For pull requests it's not necessary to checkout the code + - uses: dorny/paths-filter@v2 + id: filter + with: + filters: | + vendoring: + # Anything that's touching "vendored code" + - "src/pip/_vendor/**" + - "pyproject.toml" + tests: + # Anything that's touching testable stuff + - ".github/workflows/ci.yml" + - "tools/requirements/tests.txt" + - "src/**" + - "tests/**" + + pre-commit: + name: pre-commit + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-python@v2 + - uses: pre-commit/action@v2.0.0 + with: + extra_args: --hook-stage=manual + + packaging: + name: packaging + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-python@v2 + - name: Set up git credentials + run: | + git config --global user.email "pypa-dev@googlegroups.com" + git config --global user.name "pip" + + - run: pip install nox + - run: nox -s prepare-release -- 99.9 + - run: nox -s build-release -- 99.9 + + vendoring: + name: vendoring + runs-on: ubuntu-latest + + needs: [determine-changes] + if: ${{ needs.determine-changes.outputs.vendoring == 'true' }} + + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-python@v2 + + - run: pip install vendoring + - run: vendoring sync . --verbose + - run: git diff --exit-code + + tests-unix: + name: tests / ${{ matrix.python }} / ${{ matrix.os }} + runs-on: ${{ matrix.os }}-latest + + needs: [pre-commit, packaging, determine-changes] + if: ${{ needs.determine-changes.outputs.tests == 'true' }} + + strategy: + fail-fast: true + matrix: + os: [Ubuntu, MacOS] + python: + - 3.6 + - 3.7 + - 3.8 + - 3.9 + + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python }} + + - run: pip install tox 'virtualenv<20' + + # Main check + - name: Run unit tests + run: >- + tox -e py -- + -m unit + --verbose --numprocesses auto --showlocals + - name: Run integration tests + run: >- + tox -e py -- + -m integration + --verbose --numprocesses auto --showlocals + --durations=5 + + tests-windows: + name: tests / ${{ matrix.python }} / ${{ matrix.os }} / ${{ matrix.group }} + runs-on: ${{ matrix.os }}-latest + + needs: [pre-commit, packaging, determine-changes] + if: ${{ needs.determine-changes.outputs.tests == 'true' }} + + strategy: + fail-fast: true + matrix: + os: [Windows] + python: + - 3.6 + # Commented out, since Windows tests are expensively slow. + # - 3.7 + # - 3.8 + - 3.9 + group: [1, 2] + + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python }} + + # We use a RAMDisk on Windows, since filesystem IO is a big slowdown + # for our tests. + - name: Create a RAMDisk + run: ./tools/ci/New-RAMDisk.ps1 -Drive R -Size 1GB + + - name: Setup RAMDisk permissions + run: | + mkdir R:\Temp + $acl = Get-Acl "R:\Temp" + $rule = New-Object System.Security.AccessControl.FileSystemAccessRule( + "Everyone", "FullControl", "ContainerInherit,ObjectInherit", "None", "Allow" + ) + $acl.AddAccessRule($rule) + Set-Acl "R:\Temp" $acl + + - run: pip install tox 'virtualenv<20' + env: + TEMP: "R:\\Temp" + + # Main check + - name: Run unit tests + if: matrix.group == 1 + run: >- + tox -e py -- + -m unit + --verbose --numprocesses auto --showlocals + env: + TEMP: "R:\\Temp" + + - name: Run integration tests (group 1) + if: matrix.group == 1 + run: >- + tox -e py -- + -m integration -k "not test_install" + --verbose --numprocesses auto --showlocals + env: + TEMP: "R:\\Temp" + + - name: Run integration tests (group 2) + if: matrix.group == 2 + run: >- + tox -e py -- + -m integration -k "test_install" + --verbose --numprocesses auto --showlocals + env: + TEMP: "R:\\Temp" diff --git a/.github/workflows/linting.yml b/.github/workflows/linting.yml deleted file mode 100644 index 71459d660..000000000 --- a/.github/workflows/linting.yml +++ /dev/null @@ -1,53 +0,0 @@ -name: Linting - -on: - push: - pull_request: - schedule: - # Run every Friday at 18:02 UTC - - cron: 2 18 * * 5 - -jobs: - lint: - name: ${{ matrix.os }} - runs-on: ${{ matrix.os }}-latest - env: - TOXENV: lint,docs,vendoring - - strategy: - matrix: - os: - - Ubuntu - - Windows - - steps: - - uses: actions/checkout@v2 - - name: Set up Python 3.9 - uses: actions/setup-python@v2 - with: - python-version: 3.9 - - # Setup Caching - - name: pip cache - uses: actions/cache@v1 - with: - path: ~/.cache/pip - key: ${{ runner.os }}-pip-${{ hashFiles('tools/requirements/tests.txt') }}-${{ hashFiles('tools/requirements/docs.txt') }}-${{ hashFiles('tox.ini') }} - restore-keys: | - ${{ runner.os }}-pip- - ${{ runner.os }}- - - - name: Set PY (for pre-commit cache) - run: echo "PY=$(python -c 'import hashlib, sys;print(hashlib.sha256(sys.version.encode()+sys.executable.encode()).hexdigest())')" >> $GITHUB_ENV - - name: pre-commit cache - uses: actions/cache@v1 - with: - path: ~/.cache/pre-commit - key: pre-commit|2020-02-14|${{ env.PY }}|${{ hashFiles('.pre-commit-config.yaml') }} - - # Get the latest tox - - name: Install tox - run: python -m pip install tox - - # Main check - - run: python -m tox diff --git a/.github/workflows/macos.yml b/.github/workflows/macos.yml deleted file mode 100644 index de226389d..000000000 --- a/.github/workflows/macos.yml +++ /dev/null @@ -1,127 +0,0 @@ -name: MacOS - -on: - push: - pull_request: - schedule: - # Run every Friday at 18:02 UTC - - cron: 2 18 * * 5 - -jobs: - dev-tools: - name: Quality Check - runs-on: macos-latest - - steps: - # Caches - - name: pip cache - uses: actions/cache@v1 - with: - path: ~/.cache/pip - key: ${{ runner.os }}-pip-${{ hashFiles('tools/requirements/tests.txt') }}-${{ hashFiles('tools/requirements/docs.txt') }}-${{ hashFiles('tox.ini') }} - restore-keys: | - ${{ runner.os }}-pip- - ${{ runner.os }}- - - name: Set PY (for pre-commit cache) - run: echo "PY=$(python -c 'import hashlib, sys;print(hashlib.sha256(sys.version.encode()+sys.executable.encode()).hexdigest())')" >> $GITHUB_ENV - - name: pre-commit cache - uses: actions/cache@v1 - with: - path: ~/.cache/pre-commit - key: pre-commit|2020-02-14|${{ env.PY }}|${{ hashFiles('.pre-commit-config.yaml') }} - - # Setup - - uses: actions/checkout@v2 - - name: Set up Python 3.8 - uses: actions/setup-python@v2 - with: - python-version: 3.8 - - - name: Install tox - run: python -m pip install tox - - # Main check - - run: python -m tox -e "lint,docs" - - packaging: - name: Packaging - runs-on: macos-latest - - steps: - # Caches - - name: pip cache - uses: actions/cache@v1 - with: - path: ~/.cache/pip - key: ${{ runner.os }}-pip-${{ hashFiles('tools/requirements/tests.txt') }}-${{ hashFiles('tools/requirements/docs.txt') }}-${{ hashFiles('tox.ini') }} - restore-keys: | - ${{ runner.os }}-pip- - ${{ runner.os }}- - # Setup - - name: Set up git credentials - run: | - git config --global user.email "pypa-dev@googlegroups.com" - git config --global user.name "pip" - - uses: actions/checkout@v2 - - name: Set up Python 3.8 - uses: actions/setup-python@v2 - with: - python-version: 3.8 - - name: Install tox and nox - run: python -m pip install tox nox - - # Main check - - name: Check vendored packages - run: python -m tox -e "vendoring" - - - name: Prepare dummy release - run: nox -s prepare-release -- 99.9 - - - name: Generate distributions for the dummy release - run: nox -s build-release -- 99.9 - - tests: - name: Tests / ${{ matrix.python }} - runs-on: macos-latest - - needs: dev-tools - - strategy: - fail-fast: false - matrix: - python: [3.6, 3.7, 3.8, 3.9] - - steps: - # Caches - - name: pip cache - uses: actions/cache@v1 - with: - path: ~/.cache/pip - key: ${{ runner.os }}-pip-${{ hashFiles('tools/requirements/tests.txt') }}-${{ hashFiles('tools/requirements/docs.txt') }}-${{ hashFiles('tox.ini') }} - restore-keys: | - ${{ runner.os }}-pip- - ${{ runner.os }}- - # Setup - - uses: actions/checkout@v2 - - uses: actions/setup-python@v2 - with: - python-version: ${{ matrix.python }} - - - name: Install tox - run: python -m pip install tox 'virtualenv<20' - - # Main check - - name: Run unit tests - run: >- - python -m tox -e py -- - -m unit - --verbose - --numprocesses auto - - - name: Run integration tests - run: >- - python -m tox -e py -- - -m integration - --verbose - --numprocesses auto - --durations=5 diff --git a/.gitignore b/.gitignore index dc6244855..da9a31ab5 100644 --- a/.gitignore +++ b/.gitignore @@ -48,3 +48,6 @@ tests/data/common_wheels/ # Mac .DS_Store + +# Profiling related artifacts +*.prof diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 7b06692b2..a343d1a99 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -22,36 +22,23 @@ repos: - id: black exclude: | (?x) - ^docs/| - ^src/pip/_internal/cli| ^src/pip/_internal/commands| - ^src/pip/_internal/distributions| ^src/pip/_internal/index| ^src/pip/_internal/models| ^src/pip/_internal/network| ^src/pip/_internal/operations| ^src/pip/_internal/req| - ^src/pip/_internal/resolution| - ^src/pip/_internal/utils| ^src/pip/_internal/vcs| ^src/pip/_internal/\w+\.py$| - ^src/pip/__main__.py$| ^tools/| # Tests - ^tests/conftest.py| - ^tests/yaml| - ^tests/lib| ^tests/data| ^tests/unit| ^tests/functional/(?!test_install)| ^tests/functional/test_install| - # Files in the root of the repository - ^setup.py| - ^noxfile.py| # A blank ignore, to avoid merge conflicts later. ^$ - - repo: https://gitlab.com/pycqa/flake8 rev: 3.8.4 hooks: @@ -72,7 +59,7 @@ repos: rev: v0.800 hooks: - id: mypy - exclude: docs|tests + exclude: tests args: ["--pretty"] additional_dependencies: ['nox==2020.12.31'] diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 6610b6eb0..000000000 --- a/.travis.yml +++ /dev/null @@ -1,32 +0,0 @@ -language: python -cache: pip -dist: xenial -python: 3.9 -addons: - apt: - packages: - - bzr - -stages: -- primary -- secondary - -jobs: - include: - # Basic Checks - - stage: primary - env: TOXENV=docs - - env: TOXENV=lint - - env: TOXENV=vendoring - - # Complete checking for ensuring compatibility - # PyPy - - stage: secondary - env: GROUP=1 - python: pypy3.6-7.3.1 - - env: GROUP=2 - python: pypy3.6-7.3.1 - -before_install: tools/travis/setup.sh -install: travis_retry tools/travis/install.sh -script: tools/travis/run.sh diff --git a/MANIFEST.in b/MANIFEST.in index 24d455378..9148af0b6 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -22,7 +22,7 @@ exclude noxfile.py recursive-include src/pip/_vendor *.pem recursive-include src/pip/_vendor py.typed -recursive-include docs *.css *.rst *.py +recursive-include docs *.css *.py *.rst *.md exclude src/pip/_vendor/six exclude src/pip/_vendor/six/moves diff --git a/docs/docs_feedback_sphinxext.py b/docs/docs_feedback_sphinxext.py deleted file mode 100644 index d0ff1f03d..000000000 --- a/docs/docs_feedback_sphinxext.py +++ /dev/null @@ -1,160 +0,0 @@ -"""A sphinx extension for collecting per doc feedback.""" - -from __future__ import annotations - -from itertools import chain -from typing import Dict, List, Union - -from sphinx.application import Sphinx - -DEFAULT_DOC_LINES_THRESHOLD = 250 -RST_INDENT = 4 -EMAIL_INDENT = 6 - - -def _modify_rst_document_source_on_read( - app: Sphinx, - docname: str, - source: List[str], -) -> None: - """Add info block to top and bottom of each document source. - - This function modifies RST source in-place by adding an admonition - block at the top and the bottom of each document right after it's - been read from disk preserving :orphan: at top, if present. - """ - admonition_type = app.config.docs_feedback_admonition_type - big_doc_lines = app.config.docs_feedback_big_doc_lines - escaped_email = app.config.docs_feedback_email.replace(' ', r'\ ') - excluded_documents = set(app.config.docs_feedback_excluded_documents) - questions_list = app.config.docs_feedback_questions_list - - valid_admonitions = { - 'attention', 'caution', 'danger', 'error', 'hint', - 'important', 'note', 'tip', 'warning', 'admonition', - } - - if admonition_type not in valid_admonitions: - raise ValueError( - 'Expected `docs_feedback_admonition_type` to be one of ' - f'{valid_admonitions} but got {admonition_type}.' - ) - - if not questions_list: - raise ValueError( - 'Expected `docs_feedback_questions_list` to list questions ' - 'but got none.' - ) - - if docname in excluded_documents: - # NOTE: Completely ignore any document - # NOTE: listed in 'docs_feedback_excluded_documents'. - return - - is_doc_big = source[0].count('\n') >= big_doc_lines - - questions_list_rst = '\n'.join( - f'{" " * RST_INDENT}{number!s}. {question}' - for number, question in enumerate(questions_list, 1) - ) - questions_list_urlencoded = ( - '\n'.join( - f'\n{" " * RST_INDENT}{number!s}. {question} ' - for number, question in enumerate( - chain( - (f'Document: {docname}. Page URL: https://', ), - questions_list, - ), - ) - ). - rstrip('\r\n\t '). - replace('\r', '%0D'). - replace('\n', '%0A'). - replace(' ', '%20') - ) - - admonition_msg = rf""" - **Did this article help?** - - We are currently doing research to improve pip's documentation - and would love your feedback. - Please `email us`_ and let us know{{let_us_know_ending}} - -{{questions_list_rst}} - - .. _email us: - mailto:{escaped_email}\ - ?subject=[Doc:\ {docname}]\ Pip\ docs\ feedback\ \ - (URL\:\ https\://)\ - &body={questions_list_urlencoded} - """ - let_us_know_ending = ':' - - info_block_bottom = ( - f'.. {admonition_type}::\n\t\t{admonition_msg.format_map(locals())}\n' - ) - - questions_list_rst = '' - let_us_know_ending = ( - ' why you came to this page and what on it helped ' - 'you and what did not. ' - '(:issue:`Read more about this research <8517>`)' - ) - info_block_top = '' if is_doc_big else ( - f'.. {admonition_type}::\n\t\t{admonition_msg.format_map(locals())}\n' - ) - - orphan_mark = ':orphan:' - is_orphan = orphan_mark in source[0] - if is_orphan: - source[0] = source[0].replace(orphan_mark, '') - else: - orphan_mark = '' - - source[0] = '\n\n'.join(( - orphan_mark, info_block_top, source[0], info_block_bottom, - )) - - -def setup(app: Sphinx) -> Dict[str, Union[bool, str]]: - """Initialize the Sphinx extension. - - This function adds a callback for modifying the document sources - in-place on read. - - It also declares the extension settings changable via :file:`conf.py`. - """ - rebuild_trigger = 'html' # rebuild full html on settings change - app.add_config_value( - 'docs_feedback_admonition_type', - default='important', - rebuild=rebuild_trigger, - ) - app.add_config_value( - 'docs_feedback_big_doc_lines', - default=DEFAULT_DOC_LINES_THRESHOLD, - rebuild=rebuild_trigger, - ) - app.add_config_value( - 'docs_feedback_email', - default='Docs UX Team ', - rebuild=rebuild_trigger, - ) - app.add_config_value( - 'docs_feedback_excluded_documents', - default=set(), - rebuild=rebuild_trigger, - ) - app.add_config_value( - 'docs_feedback_questions_list', - default=(), - rebuild=rebuild_trigger, - ) - - app.connect('source-read', _modify_rst_document_source_on_read) - - return { - 'parallel_read_safe': True, - 'parallel_write_safe': True, - 'version': 'builtin', - } diff --git a/docs/html/cli/index.md b/docs/html/cli/index.md new file mode 100644 index 000000000..f608da521 --- /dev/null +++ b/docs/html/cli/index.md @@ -0,0 +1,48 @@ +# Commands + +The general options that apply to all the commands listed below can be +found [under the `pip` page in this section](pip). + +```{toctree} +:maxdepth: 1 +:hidden: + +pip +``` + +```{toctree} +:maxdepth: 1 +:caption: Environment Management and Introspection + +pip_install +pip_uninstall +pip_list +pip_freeze +pip_check +``` + +```{toctree} +:maxdepth: 1 +:caption: Handling Distribution Files + +pip_download +pip_wheel +pip_hash +``` + +```{toctree} +:maxdepth: 1 +:caption: Package Index information + +pip_show +pip_search +``` + +```{toctree} +:maxdepth: 1 +:caption: Managing pip itself + +pip_cache +pip_config +pip_debug +``` diff --git a/docs/html/cli/pip.rst b/docs/html/cli/pip.rst new file mode 100644 index 000000000..1f52630f6 --- /dev/null +++ b/docs/html/cli/pip.rst @@ -0,0 +1,255 @@ +=== +pip +=== + + +Usage +***** + +.. tab:: Unix/macOS + + .. code-block:: shell + + python -m pip [options] + +.. tab:: Windows + + .. code-block:: shell + + py -m pip [options] + +Description +*********** + + +.. _`Logging`: + + +Logging +======= + +Console logging +~~~~~~~~~~~~~~~ + +pip offers :ref:`-v, --verbose <--verbose>` and :ref:`-q, --quiet <--quiet>` +to control the console log level. By default, some messages (error and warnings) +are colored in the terminal. If you want to suppress the colored output use +:ref:`--no-color <--no-color>`. + + +.. _`FileLogging`: + +File logging +~~~~~~~~~~~~ + +pip offers the :ref:`--log <--log>` option for specifying a file where a maximum +verbosity log will be kept. This option is empty by default. This log appends +to previous logging. + +Like all pip options, ``--log`` can also be set as an environment variable, or +placed into the pip config file. See the :ref:`Configuration` section. + +.. _`exists-action`: + +--exists-action option +====================== + +This option specifies default behavior when path already exists. +Possible cases: downloading files or checking out repositories for installation, +creating archives. If ``--exists-action`` is not defined, pip will prompt +when decision is needed. + +*(s)witch* + Only relevant to VCS checkout. Attempt to switch the checkout + to the appropriate URL and/or revision. +*(i)gnore* + Abort current operation (e.g. don't copy file, don't create archive, + don't modify a checkout). +*(w)ipe* + Delete the file or VCS checkout before trying to create, download, or checkout a new one. +*(b)ackup* + Rename the file or checkout to ``{name}{'.bak' * n}``, where n is some number + of ``.bak`` extensions, such that the file didn't exist at some point. + So the most recent backup will be the one with the largest number after ``.bak``. +*(a)abort* + Abort pip and return non-zero exit status. + +.. _`build-interface`: + + +Build System Interface +====================== + +pip builds packages by invoking the build system. By default, builds will use +``setuptools``, but if a project specifies a different build system using a +``pyproject.toml`` file, as per :pep:`517`, pip will use that instead. As well +as package building, the build system is also invoked to install packages +direct from source. This is handled by invoking the build system to build a +wheel, and then installing from that wheel. The built wheel is cached locally +by pip to avoid repeated identical builds. + +The current interface to the build system is via the ``setup.py`` command line +script - all build actions are defined in terms of the specific ``setup.py`` +command line that will be run to invoke the required action. + +Setuptools Injection +~~~~~~~~~~~~~~~~~~~~ + +When :pep:`517` is not used, the supported build system is ``setuptools``. +However, not all packages use ``setuptools`` in their build scripts. To support +projects that use "pure ``distutils``", pip injects ``setuptools`` into +``sys.modules`` before invoking ``setup.py``. The injection should be +transparent to ``distutils``-based projects, but 3rd party build tools wishing +to provide a ``setup.py`` emulating the commands pip requires may need to be +aware that it takes place. + +Projects using :pep:`517` *must* explicitly use setuptools - pip does not do +the above injection process in this case. + +Build System Output +~~~~~~~~~~~~~~~~~~~ + +Any output produced by the build system will be read by pip (for display to the +user if requested). In order to correctly read the build system output, pip +requires that the output is written in a well-defined encoding, specifically +the encoding the user has configured for text output (which can be obtained in +Python using ``locale.getpreferredencoding``). If the configured encoding is +ASCII, pip assumes UTF-8 (to account for the behaviour of some Unix systems). + +Build systems should ensure that any tools they invoke (compilers, etc) produce +output in the correct encoding. In practice - and in particular on Windows, +where tools are inconsistent in their use of the "OEM" and "ANSI" codepages - +this may not always be possible. pip will therefore attempt to recover cleanly +if presented with incorrectly encoded build tool output, by translating +unexpected byte sequences to Python-style hexadecimal escape sequences +(``"\x80\xff"``, etc). However, it is still possible for output to be displayed +using an incorrect encoding (mojibake). + +Under :pep:`517`, handling of build tool output is the backend's responsibility, +and pip simply displays the output produced by the backend. (Backends, however, +will likely still have to address the issues described above). + +PEP 517 and 518 Support +~~~~~~~~~~~~~~~~~~~~~~~ + +As of version 10.0, pip supports projects declaring dependencies that are +required at install time using a ``pyproject.toml`` file, in the form described +in :pep:`518`. When building a project, pip will install the required +dependencies locally, and make them available to the build process. +Furthermore, from version 19.0 onwards, pip supports projects specifying the +build backend they use in ``pyproject.toml``, in the form described in +:pep:`517`. + +When making build requirements available, pip does so in an *isolated +environment*. That is, pip does not install those requirements into the user's +``site-packages``, but rather installs them in a temporary directory which it +adds to the user's ``sys.path`` for the duration of the build. This ensures +that build requirements are handled independently of the user's runtime +environment. For example, a project that needs a recent version of setuptools +to build can still be installed, even if the user has an older version +installed (and without silently replacing that version). + +In certain cases, projects (or redistributors) may have workflows that +explicitly manage the build environment. For such workflows, build isolation +can be problematic. If this is the case, pip provides a +``--no-build-isolation`` flag to disable build isolation. Users supplying this +flag are responsible for ensuring the build environment is managed +appropriately (including ensuring that all required build dependencies are +installed). + +By default, pip will continue to use the legacy (direct ``setup.py`` execution +based) build processing for projects that do not have a ``pyproject.toml`` file. +Projects with a ``pyproject.toml`` file will use a :pep:`517` backend. Projects +with a ``pyproject.toml`` file, but which don't have a ``build-system`` section, +will be assumed to have the following backend settings:: + + [build-system] + requires = ["setuptools>=40.8.0", "wheel"] + build-backend = "setuptools.build_meta:__legacy__" + +.. note:: + + ``setuptools`` 40.8.0 is the first version of setuptools that offers a + :pep:`517` backend that closely mimics directly executing ``setup.py``. + +If a project has ``[build-system]``, but no ``build-backend``, pip will also use +``setuptools.build_meta:__legacy__``, but will expect the project requirements +to include ``setuptools`` and ``wheel`` (and will report an error if the +installed version of ``setuptools`` is not recent enough). + +If a user wants to explicitly request :pep:`517` handling even though a project +doesn't have a ``pyproject.toml`` file, this can be done using the +``--use-pep517`` command line option. Similarly, to request legacy processing +even though ``pyproject.toml`` is present, the ``--no-use-pep517`` option is +available (although obviously it is an error to choose ``--no-use-pep517`` if +the project has no ``setup.py``, or explicitly requests a build backend). As +with other command line flags, pip recognises the ``PIP_USE_PEP517`` +environment veriable and a ``use-pep517`` config file option (set to true or +false) to set this option globally. Note that overriding pip's choice of +whether to use :pep:`517` processing in this way does *not* affect whether pip +will use an isolated build environment (which is controlled via +``--no-build-isolation`` as noted above). + +Except in the case noted above (projects with no :pep:`518` ``[build-system]`` +section in ``pyproject.toml``), pip will never implicitly install a build +system. Projects **must** ensure that the correct build system is listed in +their ``requires`` list (this applies even if pip assumes that the +``setuptools`` backend is being used, as noted above). + +.. _pep-518-limitations: + +**Historical Limitations**: + +* ``pip<18.0``: only supports installing build requirements from wheels, and + does not support the use of environment markers and extras (only version + specifiers are respected). + +* ``pip<18.1``: build dependencies using .pth files are not properly supported; + as a result namespace packages do not work under Python 3.2 and earlier. + +Future Developments +~~~~~~~~~~~~~~~~~~~ + +:pep:`426` notes that the intention is to add hooks to project metadata in +version 2.1 of the metadata spec, to explicitly define how to build a project +from its source. Once this version of the metadata spec is final, pip will +migrate to using that interface. At that point, the ``setup.py`` interface +documented here will be retained solely for legacy purposes, until projects +have migrated. + +Specifically, applications should *not* expect to rely on there being any form +of backward compatibility guarantees around the ``setup.py`` interface. + + +Build Options +~~~~~~~~~~~~~ + +The ``--global-option`` and ``--build-option`` arguments to the ``pip install`` +and ``pip wheel`` inject additional arguments into the ``setup.py`` command +(``--build-option`` is only available in ``pip wheel``). These arguments are +included in the command as follows: + +.. tab:: Unix/macOS + + .. code-block:: console + + python setup.py BUILD COMMAND + +.. tab:: Windows + + .. code-block:: shell + + py setup.py BUILD COMMAND + +The options are passed unmodified, and presently offer direct access to the +distutils command line. Use of ``--global-option`` and ``--build-option`` +should be considered as build system dependent, and may not be supported in the +current form if support for alternative build systems is added to pip. + + +.. _`General Options`: + +General Options +*************** + +.. pip-general-options:: diff --git a/docs/html/cli/pip_cache.rst b/docs/html/cli/pip_cache.rst new file mode 100644 index 000000000..0a23c510d --- /dev/null +++ b/docs/html/cli/pip_cache.rst @@ -0,0 +1,27 @@ + +.. _`pip cache`: + +pip cache +--------- + + +Usage +***** + +.. tab:: Unix/macOS + + .. pip-command-usage:: cache "python -m pip" + +.. tab:: Windows + + .. pip-command-usage:: cache "py -m pip" + +Description +*********** + +.. pip-command-description:: cache + +Options +******* + +.. pip-command-options:: cache diff --git a/docs/html/cli/pip_check.rst b/docs/html/cli/pip_check.rst new file mode 100644 index 000000000..268cf9a14 --- /dev/null +++ b/docs/html/cli/pip_check.rst @@ -0,0 +1,87 @@ +.. _`pip check`: + +========= +pip check +========= + + +Usage +===== + +.. tab:: Unix/macOS + + .. pip-command-usage:: check "python -m pip" + +.. tab:: Windows + + .. pip-command-usage:: check "py -m pip" + + +Description +=========== + +.. pip-command-description:: check + + +Examples +======== + +#. If all dependencies are compatible: + + .. tab:: Unix/macOS + + .. code-block:: console + + $ python -m pip check + No broken requirements found. + $ echo $? + 0 + + .. tab:: Windows + + .. code-block:: console + + C:\> py -m pip check + No broken requirements found. + C:\> echo %errorlevel% + 0 + +#. If a package is missing: + + .. tab:: Unix/macOS + + .. code-block:: console + + $ python -m pip check + pyramid 1.5.2 requires WebOb, which is not installed. + $ echo $? + 1 + + .. tab:: Windows + + .. code-block:: console + + C:\> py -m pip check + pyramid 1.5.2 requires WebOb, which is not installed. + C:\> echo %errorlevel% + 1 + +#. If a package has the wrong version: + + .. tab:: Unix/macOS + + .. code-block:: console + + $ python -m pip check + pyramid 1.5.2 has requirement WebOb>=1.3.1, but you have WebOb 0.8. + $ echo $? + 1 + + .. tab:: Windows + + .. code-block:: console + + C:\> py -m pip check + pyramid 1.5.2 has requirement WebOb>=1.3.1, but you have WebOb 0.8. + C:\> echo %errorlevel% + 1 diff --git a/docs/html/cli/pip_config.rst b/docs/html/cli/pip_config.rst new file mode 100644 index 000000000..8b2f84630 --- /dev/null +++ b/docs/html/cli/pip_config.rst @@ -0,0 +1,30 @@ + +.. _`pip config`: + +========== +pip config +========== + + +Usage +===== + +.. tab:: Unix/macOS + + .. pip-command-usage:: config "python -m pip" + +.. tab:: Windows + + .. pip-command-usage:: config "py -m pip" + + +Description +=========== + +.. pip-command-description:: config + + +Options +======= + +.. pip-command-options:: config diff --git a/docs/html/cli/pip_debug.rst b/docs/html/cli/pip_debug.rst new file mode 100644 index 000000000..4023533c9 --- /dev/null +++ b/docs/html/cli/pip_debug.rst @@ -0,0 +1,35 @@ +.. _`pip debug`: + +========= +pip debug +========= + + +Usage +===== + +.. tab:: Unix/macOS + + .. pip-command-usage:: debug "python -m pip" + +.. tab:: Windows + + .. pip-command-usage:: debug "py -m pip" + + +.. warning:: + + This command is only meant for debugging. + Its options and outputs are provisional and may change without notice. + + +Description +=========== + +.. pip-command-description:: debug + + +Options +======= + +.. pip-command-options:: debug diff --git a/docs/html/cli/pip_download.rst b/docs/html/cli/pip_download.rst new file mode 100644 index 000000000..4f15314d7 --- /dev/null +++ b/docs/html/cli/pip_download.rst @@ -0,0 +1,226 @@ + +.. _`pip download`: + +============ +pip download +============ + + +Usage +===== + +.. tab:: Unix/macOS + + .. pip-command-usage:: download "python -m pip" + +.. tab:: Windows + + .. pip-command-usage:: download "py -m pip" + + +Description +=========== + +.. pip-command-description:: download + +Overview +-------- + +``pip download`` does the same resolution and downloading as ``pip install``, +but instead of installing the dependencies, it collects the downloaded +distributions into the directory provided (defaulting to the current +directory). This directory can later be passed as the value to ``pip install +--find-links`` to facilitate offline or locked down package installation. + +``pip download`` with the ``--platform``, ``--python-version``, +``--implementation``, and ``--abi`` options provides the ability to fetch +dependencies for an interpreter and system other than the ones that pip is +running on. ``--only-binary=:all:`` or ``--no-deps`` is required when using any +of these options. It is important to note that these options all default to the +current system/interpreter, and not to the most restrictive constraints (e.g. +platform any, abi none, etc). To avoid fetching dependencies that happen to +match the constraint of the current interpreter (but not your target one), it +is recommended to specify all of these options if you are specifying one of +them. Generic dependencies (e.g. universal wheels, or dependencies with no +platform, abi, or implementation constraints) will still match an over- +constrained download requirement. + + + +Options +======= + +.. pip-command-options:: download + +.. pip-index-options:: download + + +Examples +======== + +#. Download a package and all of its dependencies + + .. tab:: Unix/macOS + + .. code-block:: shell + + python -m pip download SomePackage + python -m pip download -d . SomePackage # equivalent to above + python -m pip download --no-index --find-links=/tmp/wheelhouse -d /tmp/otherwheelhouse SomePackage + + .. tab:: Windows + + .. code-block:: shell + + py -m pip download SomePackage + py -m pip download -d . SomePackage # equivalent to above + py -m pip download --no-index --find-links=/tmp/wheelhouse -d /tmp/otherwheelhouse SomePackage + + +#. Download a package and all of its dependencies with OSX specific interpreter constraints. + This forces OSX 10.10 or lower compatibility. Since OSX deps are forward compatible, + this will also match ``macosx-10_9_x86_64``, ``macosx-10_8_x86_64``, ``macosx-10_8_intel``, + etc. + It will also match deps with platform ``any``. Also force the interpreter version to ``27`` + (or more generic, i.e. ``2``) and implementation to ``cp`` (or more generic, i.e. ``py``). + + .. tab:: Unix/macOS + + .. code-block:: shell + + python -m pip download \ + --only-binary=:all: \ + --platform macosx-10_10_x86_64 \ + --python-version 27 \ + --implementation cp \ + SomePackage + + .. tab:: Windows + + .. code-block:: shell + + py -m pip download ^ + --only-binary=:all: ^ + --platform macosx-10_10_x86_64 ^ + --python-version 27 ^ + --implementation cp ^ + SomePackage + +#. Download a package and its dependencies with linux specific constraints. + Force the interpreter to be any minor version of py3k, and only accept + ``cp34m`` or ``none`` as the abi. + + .. tab:: Unix/macOS + + .. code-block:: shell + + python -m pip download \ + --only-binary=:all: \ + --platform linux_x86_64 \ + --python-version 3 \ + --implementation cp \ + --abi cp34m \ + SomePackage + + .. tab:: Windows + + .. code-block:: shell + + py -m pip download ^ + --only-binary=:all: ^ + --platform linux_x86_64 ^ + --python-version 3 ^ + --implementation cp ^ + --abi cp34m ^ + SomePackage + +#. Force platform, implementation, and abi agnostic deps. + + .. tab:: Unix/macOS + + .. code-block:: shell + + python -m pip download \ + --only-binary=:all: \ + --platform any \ + --python-version 3 \ + --implementation py \ + --abi none \ + SomePackage + + .. tab:: Windows + + .. code-block:: shell + + py -m pip download ^ + --only-binary=:all: ^ + --platform any ^ + --python-version 3 ^ + --implementation py ^ + --abi none ^ + SomePackage + +#. Even when overconstrained, this will still correctly fetch the pip universal wheel. + + .. tab:: Unix/macOS + + .. code-block:: console + + $ python -m pip download \ + --only-binary=:all: \ + --platform linux_x86_64 \ + --python-version 33 \ + --implementation cp \ + --abi cp34m \ + pip>=8 + + .. code-block:: console + + $ ls pip-8.1.1-py2.py3-none-any.whl + pip-8.1.1-py2.py3-none-any.whl + + .. tab:: Windows + + .. code-block:: console + + C:\> py -m pip download ^ + --only-binary=:all: ^ + --platform linux_x86_64 ^ + --python-version 33 ^ + --implementation cp ^ + --abi cp34m ^ + pip>=8 + + .. code-block:: console + + C:\> dir pip-8.1.1-py2.py3-none-any.whl + pip-8.1.1-py2.py3-none-any.whl + +#. Download a package supporting one of several ABIs and platforms. + This is useful when fetching wheels for a well-defined interpreter, whose + supported ABIs and platforms are known and fixed, different than the one pip is + running under. + + .. tab:: Unix/macOS + + .. code-block:: console + + $ python -m pip download \ + --only-binary=:all: \ + --platform manylinux1_x86_64 --platform linux_x86_64 --platform any \ + --python-version 36 \ + --implementation cp \ + --abi cp36m --abi cp36 --abi abi3 --abi none \ + SomePackage + + .. tab:: Windows + + .. code-block:: console + + C:> py -m pip download ^ + --only-binary=:all: ^ + --platform manylinux1_x86_64 --platform linux_x86_64 --platform any ^ + --python-version 36 ^ + --implementation cp ^ + --abi cp36m --abi cp36 --abi abi3 --abi none ^ + SomePackage diff --git a/docs/html/cli/pip_freeze.rst b/docs/html/cli/pip_freeze.rst new file mode 100644 index 000000000..3533db793 --- /dev/null +++ b/docs/html/cli/pip_freeze.rst @@ -0,0 +1,92 @@ + +.. _`pip freeze`: + +========== +pip freeze +========== + + +Usage +===== + +.. tab:: Unix/macOS + + .. pip-command-usage:: freeze "python -m pip" + +.. tab:: Windows + + .. pip-command-usage:: freeze "py -m pip" + + +Description +=========== + +.. pip-command-description:: freeze + + +Options +======= + +.. pip-command-options:: freeze + + +Examples +======== + +#. Generate output suitable for a requirements file. + + .. tab:: Unix/macOS + + .. code-block:: console + + $ python -m pip freeze + docutils==0.11 + Jinja2==2.7.2 + MarkupSafe==0.19 + Pygments==1.6 + Sphinx==1.2.2 + + .. tab:: Windows + + .. code-block:: console + + C:\> py -m pip freeze + docutils==0.11 + Jinja2==2.7.2 + MarkupSafe==0.19 + Pygments==1.6 + Sphinx==1.2.2 + +#. Generate a requirements file and then install from it in another environment. + + .. tab:: Unix/macOS + + .. code-block:: shell + + env1/bin/python -m pip freeze > requirements.txt + env2/bin/python -m pip install -r requirements.txt + + .. tab:: Windows + + .. code-block:: shell + + env1\bin\python -m pip freeze > requirements.txt + env2\bin\python -m pip install -r requirements.txt + + +Fixing "Permission denied:" errors +================================== + +The purpose of this section of documentation is to provide practical +suggestions to users seeing a `"Permission denied" error `__ on ``pip freeze``. + +This error occurs, for instance, when the command is installed only for another +user, and the current user doesn't have the permission to execute the other +user's command. + +To solve that issue, you can try one of the followings: + +- Install the command for yourself (e.g. in your home directory). +- Ask the system admin to allow this command for all users. +- Check and correct the PATH variable of your own environment. +- Check the `ACL (Access-Control List) `_ for this command. diff --git a/docs/html/cli/pip_hash.rst b/docs/html/cli/pip_hash.rst new file mode 100644 index 000000000..7df0d5a4f --- /dev/null +++ b/docs/html/cli/pip_hash.rst @@ -0,0 +1,72 @@ +.. _`pip hash`: + +======== +pip hash +======== + + +Usage +===== + +.. tab:: Unix/macOS + + .. pip-command-usage:: hash "python -m pip" + +.. tab:: Windows + + .. pip-command-usage:: hash "py -m pip" + + +Description +=========== + +.. pip-command-description:: hash + +Overview +-------- + +``pip hash`` is a convenient way to get a hash digest for use with +:ref:`hash-checking mode`, especially for packages with multiple archives. The +error message from ``pip install --require-hashes ...`` will give you one +hash, but, if there are multiple archives (like source and binary ones), you +will need to manually download and compute a hash for the others. Otherwise, a +spurious hash mismatch could occur when :ref:`pip install` is passed a +different set of options, like :ref:`--no-binary `. + + +Options +======= + +.. pip-command-options:: hash + + +Example +======= + +Compute the hash of a downloaded archive: + +.. tab:: Unix/macOS + + .. code-block:: console + + $ python -m pip download SomePackage + Collecting SomePackage + Downloading SomePackage-2.2.tar.gz + Saved ./pip_downloads/SomePackage-2.2.tar.gz + Successfully downloaded SomePackage + $ python -m pip hash ./pip_downloads/SomePackage-2.2.tar.gz + ./pip_downloads/SomePackage-2.2.tar.gz: + --hash=sha256:93e62e05c7ad3da1a233def6731e8285156701e3419a5fe279017c429ec67ce0 + +.. tab:: Windows + + .. code-block:: console + + C:\> py -m pip download SomePackage + Collecting SomePackage + Downloading SomePackage-2.2.tar.gz + Saved ./pip_downloads/SomePackage-2.2.tar.gz + Successfully downloaded SomePackage + C:\> py -m pip hash ./pip_downloads/SomePackage-2.2.tar.gz + ./pip_downloads/SomePackage-2.2.tar.gz: + --hash=sha256:93e62e05c7ad3da1a233def6731e8285156701e3419a5fe279017c429ec67ce0 diff --git a/docs/html/cli/pip_install.rst b/docs/html/cli/pip_install.rst new file mode 100644 index 000000000..9ebb6e3f7 --- /dev/null +++ b/docs/html/cli/pip_install.rst @@ -0,0 +1,1227 @@ +.. _`pip install`: + +=========== +pip install +=========== + + + +Usage +===== + +.. tab:: Unix/macOS + + .. pip-command-usage:: install "python -m pip" + +.. tab:: Windows + + .. pip-command-usage:: install "py -m pip" + + + +Description +=========== + +.. pip-command-description:: install + +Overview +-------- + +pip install has several stages: + +1. Identify the base requirements. The user supplied arguments are processed + here. +2. Resolve dependencies. What will be installed is determined here. +3. Build wheels. All the dependencies that can be are built into wheels. +4. Install the packages (and uninstall anything being upgraded/replaced). + +Note that ``pip install`` prefers to leave the installed version as-is +unless ``--upgrade`` is specified. + +Argument Handling +----------------- + +When looking at the items to be installed, pip checks what type of item +each is, in the following order: + +1. Project or archive URL. +2. Local directory (which must contain a ``setup.py``, or pip will report + an error). +3. Local file (a sdist or wheel format archive, following the naming + conventions for those formats). +4. A requirement, as specified in :pep:`440`. + +Each item identified is added to the set of requirements to be satisfied by +the install. + +Working Out the Name and Version +-------------------------------- + +For each candidate item, pip needs to know the project name and version. For +wheels (identified by the ``.whl`` file extension) this can be obtained from +the filename, as per the Wheel spec. For local directories, or explicitly +specified sdist files, the ``setup.py egg_info`` command is used to determine +the project metadata. For sdists located via an index, the filename is parsed +for the name and project version (this is in theory slightly less reliable +than using the ``egg_info`` command, but avoids downloading and processing +unnecessary numbers of files). + +Any URL may use the ``#egg=name`` syntax (see :ref:`VCS Support`) to +explicitly state the project name. + +Satisfying Requirements +----------------------- + +Once pip has the set of requirements to satisfy, it chooses which version of +each requirement to install using the simple rule that the latest version that +satisfies the given constraints will be installed (but see :ref:`here
`
+for an exception regarding pre-release versions). Where more than one source of
+the chosen version is available, it is assumed that any source is acceptable
+(as otherwise the versions would differ).
+
+Installation Order
+------------------
+
+.. note::
+
+   This section is only about installation order of runtime dependencies, and
+   does not apply to build dependencies (those are specified using PEP 518).
+
+As of v6.1.0, pip installs dependencies before their dependents, i.e. in
+"topological order."  This is the only commitment pip currently makes related
+to order.  While it may be coincidentally true that pip will install things in
+the order of the install arguments or in the order of the items in a
+requirements file, this is not a promise.
+
+In the event of a dependency cycle (aka "circular dependency"), the current
+implementation (which might possibly change later) has it such that the first
+encountered member of the cycle is installed last.
+
+For instance, if quux depends on foo which depends on bar which depends on baz,
+which depends on foo:
+
+.. tab:: Unix/macOS
+
+   .. code-block:: console
+
+      $ python -m pip install quux
+      ...
+      Installing collected packages baz, bar, foo, quux
+
+      $ python -m pip install bar
+      ...
+      Installing collected packages foo, baz, bar
+
+.. tab:: Windows
+
+   .. code-block:: console
+
+      C:\> py -m pip install quux
+      ...
+      Installing collected packages baz, bar, foo, quux
+
+      C:\> py -m pip install bar
+      ...
+      Installing collected packages foo, baz, bar
+
+
+Prior to v6.1.0, pip made no commitments about install order.
+
+The decision to install topologically is based on the principle that
+installations should proceed in a way that leaves the environment usable at each
+step. This has two main practical benefits:
+
+1. Concurrent use of the environment during the install is more likely to work.
+2. A failed install is less likely to leave a broken environment.  Although pip
+   would like to support failure rollbacks eventually, in the mean time, this is
+   an improvement.
+
+Although the new install order is not intended to replace (and does not replace)
+the use of ``setup_requires`` to declare build dependencies, it may help certain
+projects install from sdist (that might previously fail) that fit the following
+profile:
+
+1. They have build dependencies that are also declared as install dependencies
+   using ``install_requires``.
+2. ``python setup.py egg_info`` works without their build dependencies being
+   installed.
+3. For whatever reason, they don't or won't declare their build dependencies using
+   ``setup_requires``.
+
+
+.. _`Requirements File Format`:
+
+Requirements File Format
+------------------------
+
+Each line of the requirements file indicates something to be installed,
+and like arguments to :ref:`pip install`, the following forms are supported::
+
+    [[--option]...]
+     [; markers] [[--option]...]
+    
+    [-e] 
+    [-e] 
+
+For details on requirement specifiers, see :ref:`Requirement Specifiers`.
+
+See the :ref:`pip install Examples` for examples of all these forms.
+
+A line that begins with ``#`` is treated as a comment and ignored. Whitespace
+followed by a ``#`` causes the ``#`` and the remainder of the line to be
+treated as a comment.
+
+A line ending in an unescaped ``\`` is treated as a line continuation
+and the newline following it is effectively ignored.
+
+Comments are stripped *after* line continuations are processed.
+
+To interpret the requirements file in UTF-8 format add a comment
+``# -*- coding: utf-8 -*-`` to the first or second line of the file.
+
+The following options are supported:
+
+.. pip-requirements-file-options-ref-list::
+
+Please note that the above options are global options, and should be specified on their individual lines.
+The options which can be applied to individual requirements are
+:ref:`--install-option `, :ref:`--global-option ` and ``--hash``.
+
+For example, to specify :ref:`--pre `, :ref:`--no-index ` and two
+:ref:`--find-links ` locations:
+
+::
+
+--pre
+--no-index
+--find-links /my/local/archives
+--find-links http://some.archives.com/archives
+
+
+If you wish, you can refer to other requirements files, like this::
+
+    -r more_requirements.txt
+
+You can also refer to :ref:`constraints files `, like this::
+
+    -c some_constraints.txt
+
+.. _`Using Environment Variables`:
+
+Using Environment Variables
+^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Since version 10, pip supports the use of environment variables inside the
+requirements file. You can now store sensitive data (tokens, keys, etc.) in
+environment variables and only specify the variable name for your requirements,
+letting pip lookup the value at runtime. This approach aligns with the commonly
+used `12-factor configuration pattern `_.
+
+You have to use the POSIX format for variable names including brackets around
+the uppercase name as shown in this example: ``${API_TOKEN}``. pip will attempt
+to find the corresponding environment variable defined on the host system at
+runtime.
+
+.. note::
+
+   There is no support for other variable expansion syntaxes such as
+   ``$VARIABLE`` and ``%VARIABLE%``.
+
+
+.. _`Example Requirements File`:
+
+Example Requirements File
+^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Use ``pip install -r example-requirements.txt`` to install::
+
+    #
+    ####### example-requirements.txt #######
+    #
+    ###### Requirements without Version Specifiers ######
+    nose
+    nose-cov
+    beautifulsoup4
+    #
+    ###### Requirements with Version Specifiers ######
+    #   See https://www.python.org/dev/peps/pep-0440/#version-specifiers
+    docopt == 0.6.1             # Version Matching. Must be version 0.6.1
+    keyring >= 4.1.1            # Minimum version 4.1.1
+    coverage != 3.5             # Version Exclusion. Anything except version 3.5
+    Mopidy-Dirble ~= 1.1        # Compatible release. Same as >= 1.1, == 1.*
+    #
+    ###### Refer to other requirements files ######
+    -r other-requirements.txt
+    #
+    #
+    ###### A particular file ######
+    ./downloads/numpy-1.9.2-cp34-none-win32.whl
+    http://wxpython.org/Phoenix/snapshot-builds/wxPython_Phoenix-3.0.3.dev1820+49a8884-cp34-none-win_amd64.whl
+    #
+    ###### Additional Requirements without Version Specifiers ######
+    #   Same as 1st section, just here to show that you can put things in any order.
+    rejected
+    green
+    #
+
+.. _`Requirement Specifiers`:
+
+Requirement Specifiers
+----------------------
+
+pip supports installing from a package index using a :term:`requirement
+specifier `. Generally speaking, a requirement
+specifier is composed of a project name followed by optional :term:`version
+specifiers `.  :pep:`508` contains a full specification
+of the format of a requirement. Since version 18.1 pip supports the
+``url_req``-form specification.
+
+Some examples:
+
+ ::
+
+  SomeProject
+  SomeProject == 1.3
+  SomeProject >=1.2,<2.0
+  SomeProject[foo, bar]
+  SomeProject~=1.4.2
+
+Since version 6.0, pip also supports specifiers containing `environment markers
+`__ like so:
+
+ ::
+
+  SomeProject ==5.4 ; python_version < '3.8'
+  SomeProject; sys_platform == 'win32'
+
+Since version 19.1, pip also supports `direct references
+`__ like so:
+
+ ::
+
+  SomeProject @ file:///somewhere/...
+
+Environment markers are supported in the command line and in requirements files.
+
+.. note::
+
+   Use quotes around specifiers in the shell when using ``>``, ``<``, or when
+   using environment markers. Don't use quotes in requirement files. [1]_
+
+
+.. _`Per-requirement Overrides`:
+
+Per-requirement Overrides
+-------------------------
+
+Since version 7.0 pip supports controlling the command line options given to
+``setup.py`` via requirements files. This disables the use of wheels (cached or
+otherwise) for that package, as ``setup.py`` does not exist for wheels.
+
+The ``--global-option`` and ``--install-option`` options are used to pass
+options to ``setup.py``. For example:
+
+ ::
+
+    FooProject >= 1.2 --global-option="--no-user-cfg" \
+                      --install-option="--prefix='/usr/local'" \
+                      --install-option="--no-compile"
+
+The above translates roughly into running FooProject's ``setup.py``
+script as:
+
+ ::
+
+   python setup.py --no-user-cfg install --prefix='/usr/local' --no-compile
+
+Note that the only way of giving more than one option to ``setup.py``
+is through multiple ``--global-option`` and ``--install-option``
+options, as shown in the example above. The value of each option is
+passed as a single argument to the ``setup.py`` script. Therefore, a
+line such as the following is invalid and would result in an
+installation error.
+
+::
+
+   # Invalid. Please use '--install-option' twice as shown above.
+   FooProject >= 1.2 --install-option="--prefix=/usr/local --no-compile"
+
+
+.. _`Pre Release Versions`:
+
+Pre-release Versions
+--------------------
+
+Starting with v1.4, pip will only install stable versions as specified by
+`pre-releases`_ by default. If a version cannot be parsed as a compliant :pep:`440`
+version then it is assumed to be a pre-release.
+
+If a Requirement specifier includes a pre-release or development version
+(e.g. ``>=0.0.dev0``) then pip will allow pre-release and development versions
+for that requirement. This does not include the != flag.
+
+The ``pip install`` command also supports a :ref:`--pre ` flag
+that enables installation of pre-releases and development releases.
+
+
+.. _pre-releases: https://www.python.org/dev/peps/pep-0440/#handling-of-pre-releases
+
+
+.. _`VCS Support`:
+
+VCS Support
+-----------
+
+pip supports installing from Git, Mercurial, Subversion and Bazaar, and detects
+the type of VCS using URL prefixes: ``git+``, ``hg+``, ``svn+``, and ``bzr+``.
+
+pip requires a working VCS command on your path: ``git``, ``hg``, ``svn``, or
+``bzr``.
+
+VCS projects can be installed in :ref:`editable mode ` (using
+the :ref:`--editable ` option) or not.
+
+* For editable installs, the clone location by default is ``/src/SomeProject`` in virtual environments, and
+  ``/src/SomeProject``
+  for global installs.  The :ref:`--src ` option can be used to
+  modify this location.
+* For non-editable installs, the project is built locally in a temp dir and then
+  installed normally. Note that if a satisfactory version of the package is
+  already installed, the VCS source will not overwrite it without an
+  ``--upgrade`` flag. VCS requirements pin the package version (specified
+  in the ``setup.py`` file) of the target commit, not necessarily the commit
+  itself.
+* The :ref:`pip freeze` subcommand will record the VCS requirement specifier
+  (referencing a specific commit) if and only if the install is done using the
+  editable option.
+
+The "project name" component of the URL suffix ``egg=``
+is used by pip in its dependency logic to identify the project prior
+to pip downloading and analyzing the metadata. For projects
+where ``setup.py`` is not in the root of project, the "subdirectory" component
+is used. The value of the "subdirectory" component should be a path starting
+from the root of the project to where ``setup.py`` is located.
+
+If your repository layout is::
+
+   pkg_dir
+   ├── setup.py  # setup.py for package "pkg"
+   └── some_module.py
+   other_dir
+   └── some_file
+   some_other_file
+
+Then, to install from this repository, the syntax would be:
+
+.. tab:: Unix/macOS
+
+   .. code-block:: shell
+
+      python -m pip install -e "vcs+protocol://repo_url/#egg=pkg&subdirectory=pkg_dir"
+
+.. tab:: Windows
+
+   .. code-block:: shell
+
+      py -m pip install -e "vcs+protocol://repo_url/#egg=pkg&subdirectory=pkg_dir"
+
+
+Git
+^^^
+
+pip currently supports cloning over ``git``, ``git+http``, ``git+https``,
+``git+ssh``, ``git+git`` and ``git+file``.
+
+.. warning::
+
+    Note that the use of ``git``, ``git+git``, and ``git+http`` is discouraged.
+    The former two use `the Git Protocol`_, which lacks authentication, and HTTP is
+    insecure due to lack of TLS based encryption.
+
+Here are the supported forms::
+
+    [-e] git+http://git.example.com/MyProject#egg=MyProject
+    [-e] git+https://git.example.com/MyProject#egg=MyProject
+    [-e] git+ssh://git.example.com/MyProject#egg=MyProject
+    [-e] git+file:///home/user/projects/MyProject#egg=MyProject
+
+Passing a branch name, a commit hash, a tag name or a git ref is possible like so::
+
+    [-e] git+https://git.example.com/MyProject.git@main#egg=MyProject
+    [-e] git+https://git.example.com/MyProject.git@v1.0#egg=MyProject
+    [-e] git+https://git.example.com/MyProject.git@da39a3ee5e6b4b0d3255bfef95601890afd80709#egg=MyProject
+    [-e] git+https://git.example.com/MyProject.git@refs/pull/123/head#egg=MyProject
+
+When passing a commit hash, specifying a full hash is preferable to a partial
+hash because a full hash allows pip to operate more efficiently (e.g. by
+making fewer network calls).
+
+.. _`the Git Protocol`: https://git-scm.com/book/en/v2/Git-on-the-Server-The-Protocols
+
+Mercurial
+^^^^^^^^^
+
+The supported schemes are: ``hg+file``, ``hg+http``, ``hg+https``,
+``hg+static-http``, and ``hg+ssh``.
+
+Here are the supported forms::
+
+    [-e] hg+http://hg.myproject.org/MyProject#egg=MyProject
+    [-e] hg+https://hg.myproject.org/MyProject#egg=MyProject
+    [-e] hg+ssh://hg.myproject.org/MyProject#egg=MyProject
+    [-e] hg+file:///home/user/projects/MyProject#egg=MyProject
+
+You can also specify a revision number, a revision hash, a tag name or a local
+branch name like so::
+
+    [-e] hg+http://hg.example.com/MyProject@da39a3ee5e6b#egg=MyProject
+    [-e] hg+http://hg.example.com/MyProject@2019#egg=MyProject
+    [-e] hg+http://hg.example.com/MyProject@v1.0#egg=MyProject
+    [-e] hg+http://hg.example.com/MyProject@special_feature#egg=MyProject
+
+Subversion
+^^^^^^^^^^
+
+pip supports the URL schemes ``svn``, ``svn+svn``, ``svn+http``, ``svn+https``, ``svn+ssh``.
+
+Here are some of the supported forms::
+
+    [-e] svn+https://svn.example.com/MyProject#egg=MyProject
+    [-e] svn+ssh://svn.example.com/MyProject#egg=MyProject
+    [-e] svn+ssh://user@svn.example.com/MyProject#egg=MyProject
+
+You can also give specific revisions to an SVN URL, like so::
+
+    [-e] svn+svn://svn.example.com/svn/MyProject#egg=MyProject
+    [-e] svn+http://svn.example.com/svn/MyProject/trunk@2019#egg=MyProject
+
+which will check out revision 2019.  ``@{20080101}`` would also check
+out the revision from 2008-01-01. You can only check out specific
+revisions using ``-e svn+...``.
+
+Bazaar
+^^^^^^
+
+pip supports Bazaar using the ``bzr+http``, ``bzr+https``, ``bzr+ssh``,
+``bzr+sftp``, ``bzr+ftp`` and ``bzr+lp`` schemes.
+
+Here are the supported forms::
+
+    [-e] bzr+http://bzr.example.com/MyProject/trunk#egg=MyProject
+    [-e] bzr+sftp://user@example.com/MyProject/trunk#egg=MyProject
+    [-e] bzr+ssh://user@example.com/MyProject/trunk#egg=MyProject
+    [-e] bzr+ftp://user@example.com/MyProject/trunk#egg=MyProject
+    [-e] bzr+lp:MyProject#egg=MyProject
+
+Tags or revisions can be installed like so::
+
+    [-e] bzr+https://bzr.example.com/MyProject/trunk@2019#egg=MyProject
+    [-e] bzr+http://bzr.example.com/MyProject/trunk@v1.0#egg=MyProject
+
+Using Environment Variables
+^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Since version 10, pip also makes it possible to use environment variables which
+makes it possible to reference private repositories without having to store
+access tokens in the requirements file. For example, a private git repository
+allowing Basic Auth for authentication can be refenced like this::
+
+    [-e] git+http://${AUTH_USER}:${AUTH_PASSWORD}@git.example.com/MyProject#egg=MyProject
+    [-e] git+https://${AUTH_USER}:${AUTH_PASSWORD}@git.example.com/MyProject#egg=MyProject
+
+.. note::
+
+   Only ``${VARIABLE}`` is supported, other formats like ``$VARIABLE`` or
+   ``%VARIABLE%`` won't work.
+
+Finding Packages
+----------------
+
+pip searches for packages on `PyPI`_ using the
+`HTTP simple interface `_,
+which is documented `here `_
+and `there `_.
+
+pip offers a number of package index options for modifying how packages are
+found.
+
+pip looks for packages in a number of places: on PyPI (if not disabled via
+``--no-index``), in the local filesystem, and in any additional repositories
+specified via ``--find-links`` or ``--index-url``. There is no ordering in
+the locations that are searched. Rather they are all checked, and the "best"
+match for the requirements (in terms of version number - see :pep:`440` for
+details) is selected.
+
+See the :ref:`pip install Examples`.
+
+
+.. _`SSL Certificate Verification`:
+
+SSL Certificate Verification
+----------------------------
+
+Starting with v1.3, pip provides SSL certificate verification over HTTP, to
+prevent man-in-the-middle attacks against PyPI downloads. This does not use
+the system certificate store but instead uses a bundled CA certificate
+store. The default bundled CA certificate store certificate store may be
+overridden by using ``--cert`` option or by using ``PIP_CERT``,
+``REQUESTS_CA_BUNDLE``, or ``CURL_CA_BUNDLE`` environment variables.
+
+
+.. _`Caching`:
+
+Caching
+-------
+
+Starting with v6.0, pip provides an on-by-default cache which functions
+similarly to that of a web browser. While the cache is on by default and is
+designed do the right thing by default you can disable the cache and always
+access PyPI by utilizing the ``--no-cache-dir`` option.
+
+When making any HTTP request pip will first check its local cache to determine
+if it has a suitable response stored for that request which has not expired. If
+it does then it simply returns that response and doesn't make the request.
+
+If it has a response stored, but it has expired, then it will attempt to make a
+conditional request to refresh the cache which will either return an empty
+response telling pip to simply use the cached item (and refresh the expiration
+timer) or it will return a whole new response which pip can then store in the
+cache.
+
+While this cache attempts to minimize network activity, it does not prevent
+network access altogether. If you want a local install solution that
+circumvents accessing PyPI, see :ref:`Installing from local packages`.
+
+The default location for the cache directory depends on the operating system:
+
+Unix
+  :file:`~/.cache/pip` and it respects the ``XDG_CACHE_HOME`` directory.
+macOS
+  :file:`~/Library/Caches/pip`.
+Windows
+  :file:`\\pip\\Cache`
+
+Run ``pip cache dir`` to show the cache directory and see :ref:`pip cache` to
+inspect and manage pip’s cache.
+
+
+.. _`Wheel cache`:
+
+Wheel Cache
+^^^^^^^^^^^
+
+pip will read from the subdirectory ``wheels`` within the pip cache directory
+and use any packages found there. This is disabled via the same
+``--no-cache-dir`` option that disables the HTTP cache. The internal structure
+of that is not part of the pip API. As of 7.0, pip makes a subdirectory for
+each sdist that wheels are built from and places the resulting wheels inside.
+
+As of version 20.0, pip also caches wheels when building from an immutable Git
+reference (i.e. a commit hash).
+
+pip attempts to choose the best wheels from those built in preference to
+building a new wheel. Note that this means when a package has both optional
+C extensions and builds ``py`` tagged wheels when the C extension can't be built
+that pip will not attempt to build a better wheel for Pythons that would have
+supported it, once any generic wheel is built. To correct this, make sure that
+the wheels are built with Python specific tags - e.g. pp on PyPy.
+
+When no wheels are found for an sdist, pip will attempt to build a wheel
+automatically and insert it into the wheel cache.
+
+
+.. _`hash-checking mode`:
+
+Hash-Checking Mode
+------------------
+
+Since version 8.0, pip can check downloaded package archives against local
+hashes to protect against remote tampering. To verify a package against one or
+more hashes, add them to the end of the line::
+
+    FooProject == 1.2 --hash=sha256:2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824 \
+                      --hash=sha256:486ea46224d1bb4fb680f34f7c9ad96a8f24ec88be73ea8e5a6c65260e9cb8a7
+
+(The ability to use multiple hashes is important when a package has both
+binary and source distributions or when it offers binary distributions for a
+variety of platforms.)
+
+The recommended hash algorithm at the moment is sha256, but stronger ones are
+allowed, including all those supported by ``hashlib``. However, weaker ones
+such as md5, sha1, and sha224 are excluded to avoid giving a false sense of
+security.
+
+Hash verification is an all-or-nothing proposition. Specifying a ``--hash``
+against any requirement not only checks that hash but also activates a global
+*hash-checking mode*, which imposes several other security restrictions:
+
+* Hashes are required for all requirements. This is because a partially-hashed
+  requirements file is of little use and thus likely an error: a malicious
+  actor could slip bad code into the installation via one of the unhashed
+  requirements. Note that hashes embedded in URL-style requirements via the
+  ``#md5=...`` syntax suffice to satisfy this rule (regardless of hash
+  strength, for legacy reasons), though you should use a stronger
+  hash like sha256 whenever possible.
+* Hashes are required for all dependencies. An error results if there is a
+  dependency that is not spelled out and hashed in the requirements file.
+* Requirements that take the form of project names (rather than URLs or local
+  filesystem paths) must be pinned to a specific version using ``==``. This
+  prevents a surprising hash mismatch upon the release of a new version
+  that matches the requirement specifier.
+* ``--egg`` is disallowed, because it delegates installation of dependencies
+  to setuptools, giving up pip's ability to enforce any of the above.
+
+.. _`--require-hashes`:
+
+Hash-checking mode can be forced on with the ``--require-hashes`` command-line
+option:
+
+.. tab:: Unix/macOS
+
+   .. code-block:: console
+
+      $ python -m pip install --require-hashes -r requirements.txt
+      ...
+      Hashes are required in --require-hashes mode (implicitly on when a hash is
+      specified for any package). These requirements were missing hashes,
+      leaving them open to tampering. These are the hashes the downloaded
+      archives actually had. You can add lines like these to your requirements
+      files to prevent tampering.
+         pyelasticsearch==1.0 --hash=sha256:44ddfb1225054d7d6b1d02e9338e7d4809be94edbe9929a2ec0807d38df993fa
+         more-itertools==2.2 --hash=sha256:93e62e05c7ad3da1a233def6731e8285156701e3419a5fe279017c429ec67ce0
+
+.. tab:: Windows
+
+   .. code-block:: console
+
+      C:\> py -m pip install --require-hashes -r requirements.txt
+      ...
+      Hashes are required in --require-hashes mode (implicitly on when a hash is
+      specified for any package). These requirements were missing hashes,
+      leaving them open to tampering. These are the hashes the downloaded
+      archives actually had. You can add lines like these to your requirements
+      files to prevent tampering.
+         pyelasticsearch==1.0 --hash=sha256:44ddfb1225054d7d6b1d02e9338e7d4809be94edbe9929a2ec0807d38df993fa
+         more-itertools==2.2 --hash=sha256:93e62e05c7ad3da1a233def6731e8285156701e3419a5fe279017c429ec67ce0
+
+
+This can be useful in deploy scripts, to ensure that the author of the
+requirements file provided hashes. It is also a convenient way to bootstrap
+your list of hashes, since it shows the hashes of the packages it fetched. It
+fetches only the preferred archive for each package, so you may still need to
+add hashes for alternatives archives using :ref:`pip hash`: for instance if
+there is both a binary and a source distribution.
+
+The :ref:`wheel cache ` is disabled in hash-checking mode to
+prevent spurious hash mismatch errors. These would otherwise occur while
+installing sdists that had already been automatically built into cached wheels:
+those wheels would be selected for installation, but their hashes would not
+match the sdist ones from the requirements file. A further complication is that
+locally built wheels are nondeterministic: contemporary modification times make
+their way into the archive, making hashes unpredictable across machines and
+cache flushes. Compilation of C code adds further nondeterminism, as many
+compilers include random-seeded values in their output. However, wheels fetched
+from index servers are the same every time. They land in pip's HTTP cache, not
+its wheel cache, and are used normally in hash-checking mode. The only downside
+of having the wheel cache disabled is thus extra build time for sdists, and
+this can be solved by making sure pre-built wheels are available from the index
+server.
+
+Hash-checking mode also works with :ref:`pip download` and :ref:`pip wheel`. A
+:ref:`comparison of hash-checking mode with other repeatability strategies
+` is available in the User Guide.
+
+.. warning::
+
+   Beware of the ``setup_requires`` keyword arg in :file:`setup.py`. The
+   (rare) packages that use it will cause those dependencies to be downloaded
+   by setuptools directly, skipping pip's hash-checking. If you need to use
+   such a package, see :ref:`Controlling
+   setup_requires`.
+
+.. warning::
+
+   Be careful not to nullify all your security work when you install your
+   actual project by using setuptools directly: for example, by calling
+   ``python setup.py install``, ``python setup.py develop``, or
+   ``easy_install``. Setuptools will happily go out and download, unchecked,
+   anything you missed in your requirements file—and it’s easy to miss things
+   as your project evolves. To be safe, install your project using pip and
+   :ref:`--no-deps `.
+
+   Instead of ``python setup.py develop``, use...
+
+   .. tab:: Unix/macOS
+
+      .. code-block:: shell
+
+         python -m pip install --no-deps -e .
+
+   .. tab:: Windows
+
+      .. code-block:: shell
+
+         py -m pip install --no-deps -e .
+
+
+   Instead of ``python setup.py install``, use...
+
+   .. tab:: Unix/macOS
+
+      .. code-block:: shell
+
+         python -m pip install --no-deps .
+
+   .. tab:: Windows
+
+      .. code-block:: shell
+
+         py -m pip install --no-deps .
+
+Hashes from PyPI
+^^^^^^^^^^^^^^^^
+
+PyPI provides an MD5 hash in the fragment portion of each package download URL,
+like ``#md5=123...``, which pip checks as a protection against download
+corruption. Other hash algorithms that have guaranteed support from ``hashlib``
+are also supported here: sha1, sha224, sha384, sha256, and sha512. Since this
+hash originates remotely, it is not a useful guard against tampering and thus
+does not satisfy the ``--require-hashes`` demand that every package have a
+local hash.
+
+
+Local project installs
+----------------------
+
+pip supports installing local project in both regular mode and editable mode.
+You can install local projects by specifying the project path to pip:
+
+.. tab:: Unix/macOS
+
+   .. code-block:: shell
+
+      python -m pip install path/to/SomeProject
+
+.. tab:: Windows
+
+   .. code-block:: shell
+
+      py -m pip install path/to/SomeProject
+
+During regular installation, pip will copy the entire project directory to a
+temporary location and install from there. The exception is that pip will
+exclude .tox and .nox directories present in the top level of the project from
+being copied. This approach is the cause of several performance and correctness
+issues, so it is planned that pip 21.3 will change to install directly from the
+local project directory. Depending on the build backend used by the project,
+this may generate secondary build artifacts in the project directory, such as
+the ``.egg-info`` and ``build`` directories in the case of the setuptools
+backend.
+
+To opt in to the future behavior, specify the ``--use-feature=in-tree-build``
+option in pip's command line.
+
+
+.. _`editable-installs`:
+
+"Editable" Installs
+^^^^^^^^^^^^^^^^^^^
+
+"Editable" installs are fundamentally `"setuptools develop mode"
+`_
+installs.
+
+You can install local projects or VCS projects in "editable" mode:
+
+.. tab:: Unix/macOS
+
+   .. code-block:: shell
+
+      python -m pip install -e path/to/SomeProject
+      python -m pip install -e git+http://repo/my_project.git#egg=SomeProject
+
+.. tab:: Windows
+
+   .. code-block:: shell
+
+      py -m pip install -e path/to/SomeProject
+      py -m pip install -e git+http://repo/my_project.git#egg=SomeProject
+
+
+(See the :ref:`VCS Support` section above for more information on VCS-related syntax.)
+
+For local projects, the "SomeProject.egg-info" directory is created relative to
+the project path.  This is one advantage over just using ``setup.py develop``,
+which creates the "egg-info" directly relative the current working directory.
+
+
+.. _`controlling-setup-requires`:
+
+Controlling setup_requires
+--------------------------
+
+Setuptools offers the ``setup_requires`` `setup() keyword
+`_
+for specifying dependencies that need to be present in order for the
+``setup.py`` script to run.  Internally, Setuptools uses ``easy_install``
+to fulfill these dependencies.
+
+pip has no way to control how these dependencies are located.  None of the
+package index options have an effect.
+
+The solution is to configure a "system" or "personal" `Distutils configuration
+file
+`_ to
+manage the fulfillment.
+
+For example, to have the dependency located at an alternate index, add this:
+
+::
+
+  [easy_install]
+  index_url = https://my.index-mirror.com
+
+To have the dependency located from a local directory and not crawl PyPI, add this:
+
+::
+
+  [easy_install]
+  allow_hosts = ''
+  find_links = file:///path/to/local/archives/
+
+
+Build System Interface
+----------------------
+
+In order for pip to install a package from source, ``setup.py`` must implement
+the following commands::
+
+    setup.py egg_info [--egg-base XXX]
+    setup.py install --record XXX [--single-version-externally-managed] [--root XXX] [--compile|--no-compile] [--install-headers XXX]
+
+The ``egg_info`` command should create egg metadata for the package, as
+described in the setuptools documentation at
+https://setuptools.readthedocs.io/en/latest/setuptools.html#egg-info-create-egg-metadata-and-set-build-tags
+
+The ``install`` command should implement the complete process of installing the
+package to the target directory XXX.
+
+To install a package in "editable" mode (``pip install -e``), ``setup.py`` must
+implement the following command::
+
+    setup.py develop --no-deps
+
+This should implement the complete process of installing the package in
+"editable" mode.
+
+All packages will be attempted to built into wheels::
+
+    setup.py bdist_wheel -d XXX
+
+One further ``setup.py`` command is invoked by ``pip install``::
+
+    setup.py clean
+
+This command is invoked to clean up temporary commands from the build. (TODO:
+Investigate in more detail when this command is required).
+
+No other build system commands are invoked by the ``pip install`` command.
+
+Installing a package from a wheel does not invoke the build system at all.
+
+.. _PyPI: https://pypi.org/
+.. _setuptools extras: https://setuptools.readthedocs.io/en/latest/userguide/dependency_management.html#optional-dependencies
+
+
+
+.. _`pip install Options`:
+
+
+Options
+=======
+
+.. pip-command-options:: install
+
+.. pip-index-options:: install
+
+
+.. _`pip install Examples`:
+
+
+Examples
+========
+
+#. Install ``SomePackage`` and its dependencies from `PyPI`_ using :ref:`Requirement Specifiers`
+
+   .. tab:: Unix/macOS
+
+      .. code-block:: shell
+
+         python -m pip install SomePackage            # latest version
+         python -m pip install SomePackage==1.0.4     # specific version
+         python -m pip install 'SomePackage>=1.0.4'   # minimum version
+
+   .. tab:: Windows
+
+      .. code-block:: shell
+
+         py -m pip install SomePackage            # latest version
+         py -m pip install SomePackage==1.0.4     # specific version
+         py -m pip install 'SomePackage>=1.0.4'   # minimum version
+
+
+#. Install a list of requirements specified in a file.  See the :ref:`Requirements files `.
+
+   .. tab:: Unix/macOS
+
+      .. code-block:: shell
+
+         python -m pip install -r requirements.txt
+
+   .. tab:: Windows
+
+      .. code-block:: shell
+
+         py -m pip install -r requirements.txt
+
+
+#. Upgrade an already installed ``SomePackage`` to the latest from PyPI.
+
+   .. tab:: Unix/macOS
+
+      .. code-block:: shell
+
+         python -m pip install --upgrade SomePackage
+
+   .. tab:: Windows
+
+      .. code-block:: shell
+
+         py -m pip install --upgrade SomePackage
+
+    .. note::
+
+      This will guarantee an update to ``SomePackage`` as it is a direct
+      requirement, and possibly upgrade dependencies if their installed
+      versions do not meet the minimum requirements of ``SomePackage``.
+      Any non-requisite updates of its dependencies (indirect requirements)
+      will be affected by the ``--upgrade-strategy`` command.
+
+#. Install a local project in "editable" mode. See the section on :ref:`Editable Installs `.
+
+   .. tab:: Unix/macOS
+
+      .. code-block:: shell
+
+         python -m pip install -e .                # project in current directory
+         python -m pip install -e path/to/project  # project in another directory
+
+   .. tab:: Windows
+
+      .. code-block:: shell
+
+         py -m pip install -e .                 # project in current directory
+         py -m pip install -e path/to/project   # project in another directory
+
+
+#. Install a project from VCS
+
+   .. tab:: Unix/macOS
+
+      .. code-block:: shell
+
+         python -m pip install SomeProject@git+https://git.repo/some_pkg.git@1.3.1
+
+   .. tab:: Windows
+
+      .. code-block:: shell
+
+         py -m pip install SomeProject@git+https://git.repo/some_pkg.git@1.3.1
+
+
+#. Install a project from VCS in "editable" mode. See the sections on :ref:`VCS Support ` and :ref:`Editable Installs `.
+
+   .. tab:: Unix/macOS
+
+      .. code-block:: shell
+
+         python -m pip install -e git+https://git.repo/some_pkg.git#egg=SomePackage          # from git
+         python -m pip install -e hg+https://hg.repo/some_pkg.git#egg=SomePackage            # from mercurial
+         python -m pip install -e svn+svn://svn.repo/some_pkg/trunk/#egg=SomePackage         # from svn
+         python -m pip install -e git+https://git.repo/some_pkg.git@feature#egg=SomePackage  # from 'feature' branch
+         python -m pip install -e "git+https://git.repo/some_repo.git#egg=subdir&subdirectory=subdir_path" # install a python package from a repo subdirectory
+
+   .. tab:: Windows
+
+      .. code-block:: shell
+
+         py -m pip install -e git+https://git.repo/some_pkg.git#egg=SomePackage          # from git
+         py -m pip install -e hg+https://hg.repo/some_pkg.git#egg=SomePackage            # from mercurial
+         py -m pip install -e svn+svn://svn.repo/some_pkg/trunk/#egg=SomePackage         # from svn
+         py -m pip install -e git+https://git.repo/some_pkg.git@feature#egg=SomePackage  # from 'feature' branch
+         py -m pip install -e "git+https://git.repo/some_repo.git#egg=subdir&subdirectory=subdir_path" # install a python package from a repo subdirectory
+
+#. Install a package with `setuptools extras`_.
+
+   .. tab:: Unix/macOS
+
+      .. code-block:: shell
+
+         python -m pip install SomePackage[PDF]
+         python -m pip install "SomePackage[PDF] @ git+https://git.repo/SomePackage@main#subdirectory=subdir_path"
+         python -m pip install .[PDF]  # project in current directory
+         python -m pip install SomePackage[PDF]==3.0
+         python -m pip install SomePackage[PDF,EPUB]  # multiple extras
+
+   .. tab:: Windows
+
+      .. code-block:: shell
+
+         py -m pip install SomePackage[PDF]
+         py -m pip install "SomePackage[PDF] @ git+https://git.repo/SomePackage@main#subdirectory=subdir_path"
+         py -m pip install .[PDF]  # project in current directory
+         py -m pip install SomePackage[PDF]==3.0
+         py -m pip install SomePackage[PDF,EPUB]  # multiple extras
+
+#. Install a particular source archive file.
+
+   .. tab:: Unix/macOS
+
+      .. code-block:: shell
+
+         python -m pip install ./downloads/SomePackage-1.0.4.tar.gz
+         python -m pip install http://my.package.repo/SomePackage-1.0.4.zip
+
+   .. tab:: Windows
+
+      .. code-block:: shell
+
+         py -m pip install ./downloads/SomePackage-1.0.4.tar.gz
+         py -m pip install http://my.package.repo/SomePackage-1.0.4.zip
+
+#. Install a particular source archive file following :pep:`440` direct references.
+
+   .. tab:: Unix/macOS
+
+      .. code-block:: shell
+
+         python -m pip install SomeProject@http://my.package.repo/SomeProject-1.2.3-py33-none-any.whl
+         python -m pip install "SomeProject @ http://my.package.repo/SomeProject-1.2.3-py33-none-any.whl"
+         python -m pip install SomeProject@http://my.package.repo/1.2.3.tar.gz
+
+   .. tab:: Windows
+
+      .. code-block:: shell
+
+         py -m pip install SomeProject@http://my.package.repo/SomeProject-1.2.3-py33-none-any.whl
+         py -m pip install "SomeProject @ http://my.package.repo/SomeProject-1.2.3-py33-none-any.whl"
+         py -m pip install SomeProject@http://my.package.repo/1.2.3.tar.gz
+
+#. Install from alternative package repositories.
+
+   Install from a different index, and not `PyPI`_
+
+   .. tab:: Unix/macOS
+
+      .. code-block:: shell
+
+         python -m pip install --index-url http://my.package.repo/simple/ SomePackage
+
+   .. tab:: Windows
+
+      .. code-block:: shell
+
+         py -m pip install --index-url http://my.package.repo/simple/ SomePackage
+
+   Install from a local flat directory containing archives (and don't scan indexes):
+
+   .. tab:: Unix/macOS
+
+      .. code-block:: shell
+
+         python -m pip install --no-index --find-links=file:///local/dir/ SomePackage
+         python -m pip install --no-index --find-links=/local/dir/ SomePackage
+         python -m pip install --no-index --find-links=relative/dir/ SomePackage
+
+   .. tab:: Windows
+
+      .. code-block:: shell
+
+         py -m pip install --no-index --find-links=file:///local/dir/ SomePackage
+         py -m pip install --no-index --find-links=/local/dir/ SomePackage
+         py -m pip install --no-index --find-links=relative/dir/ SomePackage
+
+   Search an additional index during install, in addition to `PyPI`_
+
+   .. warning::
+
+       Using this option to search for packages which are not in the main
+       repository (such as private packages) is unsafe, per a security
+       vulnerability called
+       `dependency confusion `_:
+       an attacker can claim the package on the public repository in a way that
+       will ensure it gets chosen over the private package.
+
+   .. tab:: Unix/macOS
+
+      .. code-block:: shell
+
+         python -m pip install --extra-index-url http://my.package.repo/simple SomePackage
+
+   .. tab:: Windows
+
+      .. code-block:: shell
+
+         py -m pip install --extra-index-url http://my.package.repo/simple SomePackage
+
+
+#. Find pre-release and development versions, in addition to stable versions.  By default, pip only finds stable versions.
+
+   .. tab:: Unix/macOS
+
+      .. code-block:: shell
+
+         python -m pip install --pre SomePackage
+
+   .. tab:: Windows
+
+      .. code-block:: shell
+
+         py -m pip install --pre SomePackage
+
+
+#. Install packages from source.
+
+   Do not use any binary packages
+
+   .. tab:: Unix/macOS
+
+      .. code-block:: shell
+
+         python -m pip install SomePackage1 SomePackage2 --no-binary :all:
+
+   .. tab:: Windows
+
+      .. code-block:: shell
+
+         py -m pip install SomePackage1 SomePackage2 --no-binary :all:
+
+   Specify ``SomePackage1`` to be installed from source:
+
+   .. tab:: Unix/macOS
+
+      .. code-block:: shell
+
+         python -m pip install SomePackage1 SomePackage2 --no-binary SomePackage1
+
+   .. tab:: Windows
+
+      .. code-block:: shell
+
+         py -m pip install SomePackage1 SomePackage2 --no-binary SomePackage1
+
+----
+
+.. [1] This is true with the exception that pip v7.0 and v7.0.1 required quotes
+       around specifiers containing environment markers in requirement files.
diff --git a/docs/html/cli/pip_list.rst b/docs/html/cli/pip_list.rst
new file mode 100644
index 000000000..5119a804c
--- /dev/null
+++ b/docs/html/cli/pip_list.rst
@@ -0,0 +1,201 @@
+.. _`pip list`:
+
+========
+pip list
+========
+
+
+
+Usage
+=====
+
+.. tab:: Unix/macOS
+
+   .. pip-command-usage:: list "python -m pip"
+
+.. tab:: Windows
+
+   .. pip-command-usage:: list "py -m pip"
+
+
+Description
+===========
+
+.. pip-command-description:: list
+
+
+Options
+=======
+
+.. pip-command-options:: list
+
+.. pip-index-options:: list
+
+
+Examples
+========
+
+#. List installed packages.
+
+   .. tab:: Unix/macOS
+
+      .. code-block:: console
+
+         $ python -m pip list
+         docutils (0.10)
+         Jinja2 (2.7.2)
+         MarkupSafe (0.18)
+         Pygments (1.6)
+         Sphinx (1.2.1)
+
+   .. tab:: Windows
+
+      .. code-block:: console
+
+         C:\> py -m pip list
+         docutils (0.10)
+         Jinja2 (2.7.2)
+         MarkupSafe (0.18)
+         Pygments (1.6)
+         Sphinx (1.2.1)
+
+#. List outdated packages (excluding editables), and the latest version available.
+
+   .. tab:: Unix/macOS
+
+      .. code-block:: console
+
+         $ python -m pip list --outdated
+         docutils (Current: 0.10 Latest: 0.11)
+         Sphinx (Current: 1.2.1 Latest: 1.2.2)
+
+   .. tab:: Windows
+
+      .. code-block:: console
+
+         C:\> py -m pip list --outdated
+         docutils (Current: 0.10 Latest: 0.11)
+         Sphinx (Current: 1.2.1 Latest: 1.2.2)
+
+#. List installed packages with column formatting.
+
+   .. tab:: Unix/macOS
+
+      .. code-block:: console
+
+         $ python -m pip list --format columns
+         Package Version
+         ------- -------
+         docopt  0.6.2
+         idlex   1.13
+         jedi    0.9.0
+
+   .. tab:: Windows
+
+      .. code-block:: console
+
+         C:\> py -m pip list --format columns
+         Package Version
+         ------- -------
+         docopt  0.6.2
+         idlex   1.13
+         jedi    0.9.0
+
+#. List outdated packages with column formatting.
+
+   .. tab:: Unix/macOS
+
+      .. code-block:: console
+
+         $ python -m pip list -o --format columns
+         Package    Version Latest Type
+         ---------- ------- ------ -----
+         retry      0.8.1   0.9.1  wheel
+         setuptools 20.6.7  21.0.0 wheel
+
+   .. tab:: Windows
+
+      .. code-block:: console
+
+         C:\> py -m pip list -o --format columns
+         Package    Version Latest Type
+         ---------- ------- ------ -----
+         retry      0.8.1   0.9.1  wheel
+         setuptools 20.6.7  21.0.0 wheel
+
+#. List packages that are not dependencies of other packages. Can be combined with
+   other options.
+
+   .. tab:: Unix/macOS
+
+      .. code-block:: console
+
+         $ python -m pip list --outdated --not-required
+         docutils (Current: 0.10 Latest: 0.11)
+
+   .. tab:: Windows
+
+      .. code-block:: console
+
+         C:\> py -m pip list --outdated --not-required
+         docutils (Current: 0.10 Latest: 0.11)
+
+#. Use legacy formatting
+
+   .. tab:: Unix/macOS
+
+      .. code-block:: console
+
+         $ python -m pip list --format=legacy
+         colorama (0.3.7)
+         docopt (0.6.2)
+         idlex (1.13)
+         jedi (0.9.0)
+
+   .. tab:: Windows
+
+      .. code-block:: console
+
+         C:\> py -m pip list --format=legacy
+         colorama (0.3.7)
+         docopt (0.6.2)
+         idlex (1.13)
+         jedi (0.9.0)
+
+#. Use json formatting
+
+   .. tab:: Unix/macOS
+
+      .. code-block:: console
+
+         $ python -m pip list --format=json
+         [{'name': 'colorama', 'version': '0.3.7'}, {'name': 'docopt', 'version': '0.6.2'}, ...
+
+   .. tab:: Windows
+
+      .. code-block:: console
+
+         C:\> py -m pip list --format=json
+         [{'name': 'colorama', 'version': '0.3.7'}, {'name': 'docopt', 'version': '0.6.2'}, ...
+
+#. Use freeze formatting
+
+   .. tab:: Unix/macOS
+
+      .. code-block:: console
+
+         $ python -m pip list --format=freeze
+         colorama==0.3.7
+         docopt==0.6.2
+         idlex==1.13
+         jedi==0.9.0
+
+   .. tab:: Windows
+
+      .. code-block:: console
+
+         C:\> py -m pip list --format=freeze
+         colorama==0.3.7
+         docopt==0.6.2
+         idlex==1.13
+         jedi==0.9.0
diff --git a/docs/html/cli/pip_search.rst b/docs/html/cli/pip_search.rst
new file mode 100644
index 000000000..9905a1baf
--- /dev/null
+++ b/docs/html/cli/pip_search.rst
@@ -0,0 +1,52 @@
+.. _`pip search`:
+
+==========
+pip search
+==========
+
+
+
+Usage
+=====
+
+.. tab:: Unix/macOS
+
+   .. pip-command-usage:: search "python -m pip"
+
+.. tab:: Windows
+
+   .. pip-command-usage:: search "py -m pip"
+
+
+Description
+===========
+
+.. pip-command-description:: search
+
+
+Options
+=======
+
+.. pip-command-options:: search
+
+
+Examples
+========
+
+#. Search for "peppercorn"
+
+   .. tab:: Unix/macOS
+
+      .. code-block:: console
+
+         $ python -m pip search peppercorn
+         pepperedform    - Helpers for using peppercorn with formprocess.
+         peppercorn      - A library for converting a token stream into [...]
+
+   .. tab:: Windows
+
+      .. code-block:: console
+
+         C:\> py -m pip search peppercorn
+         pepperedform    - Helpers for using peppercorn with formprocess.
+         peppercorn      - A library for converting a token stream into [...]
diff --git a/docs/html/cli/pip_show.rst b/docs/html/cli/pip_show.rst
new file mode 100644
index 000000000..b603f786f
--- /dev/null
+++ b/docs/html/cli/pip_show.rst
@@ -0,0 +1,154 @@
+.. _`pip show`:
+
+========
+pip show
+========
+
+
+
+Usage
+=====
+
+.. tab:: Unix/macOS
+
+   .. pip-command-usage:: show "python -m pip"
+
+.. tab:: Windows
+
+   .. pip-command-usage:: show "py -m pip"
+
+
+Description
+===========
+
+.. pip-command-description:: show
+
+
+Options
+=======
+
+.. pip-command-options:: show
+
+
+Examples
+========
+
+#. Show information about a package:
+
+   .. tab:: Unix/macOS
+
+      .. code-block:: console
+
+         $ python -m pip show sphinx
+         Name: Sphinx
+         Version: 1.4.5
+         Summary: Python documentation generator
+         Home-page: http://sphinx-doc.org/
+         Author: Georg Brandl
+         Author-email: georg@python.org
+         License: BSD
+         Location: /my/env/lib/python2.7/site-packages
+         Requires: docutils, snowballstemmer, alabaster, Pygments, imagesize, Jinja2, babel, six
+
+   .. tab:: Windows
+
+      .. code-block:: console
+
+         C:\> py -m pip show sphinx
+         Name: Sphinx
+         Version: 1.4.5
+         Summary: Python documentation generator
+         Home-page: http://sphinx-doc.org/
+         Author: Georg Brandl
+         Author-email: georg@python.org
+         License: BSD
+         Location: /my/env/lib/python2.7/site-packages
+         Requires: docutils, snowballstemmer, alabaster, Pygments, imagesize, Jinja2, babel, six
+
+#. Show all information about a package
+
+   .. tab:: Unix/macOS
+
+      .. code-block:: console
+
+         $ python -m pip show --verbose sphinx
+         Name: Sphinx
+         Version: 1.4.5
+         Summary: Python documentation generator
+         Home-page: http://sphinx-doc.org/
+         Author: Georg Brandl
+         Author-email: georg@python.org
+         License: BSD
+         Location: /my/env/lib/python2.7/site-packages
+         Requires: docutils, snowballstemmer, alabaster, Pygments, imagesize, Jinja2, babel, six
+         Metadata-Version: 2.0
+         Installer:
+         Classifiers:
+            Development Status :: 5 - Production/Stable
+            Environment :: Console
+            Environment :: Web Environment
+            Intended Audience :: Developers
+            Intended Audience :: Education
+            License :: OSI Approved :: BSD License
+            Operating System :: OS Independent
+            Programming Language :: Python
+            Programming Language :: Python :: 2
+            Programming Language :: Python :: 3
+            Framework :: Sphinx
+            Framework :: Sphinx :: Extension
+            Framework :: Sphinx :: Theme
+            Topic :: Documentation
+            Topic :: Documentation :: Sphinx
+            Topic :: Text Processing
+            Topic :: Utilities
+         Entry-points:
+            [console_scripts]
+            sphinx-apidoc = sphinx.apidoc:main
+            sphinx-autogen = sphinx.ext.autosummary.generate:main
+            sphinx-build = sphinx:main
+            sphinx-quickstart = sphinx.quickstart:main
+            [distutils.commands]
+            build_sphinx = sphinx.setup_command:BuildDoc
+
+   .. tab:: Windows
+
+      .. code-block:: console
+
+         C:\> py -m pip show --verbose sphinx
+         Name: Sphinx
+         Version: 1.4.5
+         Summary: Python documentation generator
+         Home-page: http://sphinx-doc.org/
+         Author: Georg Brandl
+         Author-email: georg@python.org
+         License: BSD
+         Location: /my/env/lib/python2.7/site-packages
+         Requires: docutils, snowballstemmer, alabaster, Pygments, imagesize, Jinja2, babel, six
+         Metadata-Version: 2.0
+         Installer:
+         Classifiers:
+            Development Status :: 5 - Production/Stable
+            Environment :: Console
+            Environment :: Web Environment
+            Intended Audience :: Developers
+            Intended Audience :: Education
+            License :: OSI Approved :: BSD License
+            Operating System :: OS Independent
+            Programming Language :: Python
+            Programming Language :: Python :: 2
+            Programming Language :: Python :: 3
+            Framework :: Sphinx
+            Framework :: Sphinx :: Extension
+            Framework :: Sphinx :: Theme
+            Topic :: Documentation
+            Topic :: Documentation :: Sphinx
+            Topic :: Text Processing
+            Topic :: Utilities
+         Entry-points:
+            [console_scripts]
+            sphinx-apidoc = sphinx.apidoc:main
+            sphinx-autogen = sphinx.ext.autosummary.generate:main
+            sphinx-build = sphinx:main
+            sphinx-quickstart = sphinx.quickstart:main
+            [distutils.commands]
+            build_sphinx = sphinx.setup_command:BuildDoc
diff --git a/docs/html/cli/pip_uninstall.rst b/docs/html/cli/pip_uninstall.rst
new file mode 100644
index 000000000..e6eeb5ebf
--- /dev/null
+++ b/docs/html/cli/pip_uninstall.rst
@@ -0,0 +1,58 @@
+.. _`pip uninstall`:
+
+=============
+pip uninstall
+=============
+
+
+
+Usage
+=====
+
+.. tab:: Unix/macOS
+
+   .. pip-command-usage:: uninstall "python -m pip"
+
+.. tab:: Windows
+
+   .. pip-command-usage:: uninstall "py -m pip"
+
+
+Description
+===========
+
+.. pip-command-description:: uninstall
+
+
+Options
+=======
+
+.. pip-command-options:: uninstall
+
+
+Examples
+========
+
+#. Uninstall a package.
+
+   .. tab:: Unix/macOS
+
+      .. code-block:: console
+
+         $ python -m pip uninstall simplejson
+         Uninstalling simplejson:
+            /home/me/env/lib/python3.9/site-packages/simplejson
+            /home/me/env/lib/python3.9/site-packages/simplejson-2.2.1-py3.9.egg-info
+         Proceed (y/n)? y
+            Successfully uninstalled simplejson
+
+   .. tab:: Windows
+
+      .. code-block:: console
+
+         C:\> py -m pip uninstall simplejson
+         Uninstalling simplejson:
+            /home/me/env/lib/python3.9/site-packages/simplejson
+            /home/me/env/lib/python3.9/site-packages/simplejson-2.2.1-py3.9.egg-info
+         Proceed (y/n)? y
+            Successfully uninstalled simplejson
diff --git a/docs/html/cli/pip_wheel.rst b/docs/html/cli/pip_wheel.rst
new file mode 100644
index 000000000..c2a9543fc
--- /dev/null
+++ b/docs/html/cli/pip_wheel.rst
@@ -0,0 +1,125 @@
+
+.. _`pip wheel`:
+
+=========
+pip wheel
+=========
+
+
+
+Usage
+=====
+
+.. tab:: Unix/macOS
+
+   .. pip-command-usage:: wheel "python -m pip"
+
+.. tab:: Windows
+
+   .. pip-command-usage:: wheel "py -m pip"
+
+
+Description
+===========
+
+.. pip-command-description:: wheel
+
+
+Build System Interface
+----------------------
+
+In order for pip to build a wheel, ``setup.py`` must implement the
+``bdist_wheel`` command with the following syntax:
+
+.. tab:: Unix/macOS
+
+   .. code-block:: shell
+
+      python setup.py bdist_wheel -d TARGET
+
+.. tab:: Windows
+
+   .. code-block:: shell
+
+      py setup.py bdist_wheel -d TARGET
+
+
+This command must create a wheel compatible with the invoking Python
+interpreter, and save that wheel in the directory TARGET.
+
+No other build system commands are invoked by the ``pip wheel`` command.
+
+Customising the build
+^^^^^^^^^^^^^^^^^^^^^
+
+It is possible using ``--global-option`` to include additional build commands
+with their arguments in the ``setup.py`` command. This is currently the only
+way to influence the building of C extensions from the command line. For
+example:
+
+.. tab:: Unix/macOS
+
+   .. code-block:: shell
+
+      python -m pip wheel --global-option bdist_ext --global-option -DFOO wheel
+
+.. tab:: Windows
+
+   .. code-block:: shell
+
+      py -m pip wheel --global-option bdist_ext --global-option -DFOO wheel
+
+
+will result in a build command of
+
+::
+
+    setup.py bdist_ext -DFOO bdist_wheel -d TARGET
+
+which passes a preprocessor symbol to the extension build.
+
+Such usage is considered highly build-system specific and more an accident of
+the current implementation than a supported interface.
+
+
+
+Options
+=======
+
+.. pip-command-options:: wheel
+
+.. pip-index-options:: wheel
+
+
+Examples
+========
+
+#. Build wheels for a requirement (and all its dependencies), and then install
+
+   .. tab:: Unix/macOS
+
+      .. code-block:: shell
+
+         python -m pip wheel --wheel-dir=/tmp/wheelhouse SomePackage
+         python -m pip install --no-index --find-links=/tmp/wheelhouse SomePackage
+
+   .. tab:: Windows
+
+      .. code-block:: shell
+
+         py -m pip wheel --wheel-dir=/tmp/wheelhouse SomePackage
+         py -m pip install --no-index --find-links=/tmp/wheelhouse SomePackage
+
+#. Build a wheel for a package from source
+
+   .. tab:: Unix/macOS
+
+      .. code-block:: shell
+
+         python -m pip wheel --no-binary SomePackage SomePackage
+
+   .. tab:: Windows
+
+      .. code-block:: shell
+
+         py -m pip wheel --no-binary SomePackage SomePackage
diff --git a/docs/html/conf.py b/docs/html/conf.py
index 2efb71358..2a4387a35 100644
--- a/docs/html/conf.py
+++ b/docs/html/conf.py
@@ -1,325 +1,128 @@
-# pip documentation build configuration file, created by
-# sphinx-quickstart on Tue Apr 22 22:08:49 2008
-#
-# This file is execfile()d with the current directory set to its containing dir
-#
-# Note that not all possible configuration values are present in this
-# autogenerated file.
-#
-# All configuration values have a default; values that are commented out
-# serve to show the default.
+"""Sphinx configuration file for pip's documentation."""
 
 import glob
 import os
 import pathlib
 import re
 import sys
+from typing import List, Tuple
 
-on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
-
+# Add the docs/ directory to sys.path, because pip_sphinxext.py is there.
 docs_dir = os.path.dirname(os.path.dirname(__file__))
-# If extensions (or modules to document with autodoc) are in another directory,
-# add these directories to sys.path here. If the directory is relative to the
-# documentation root, use os.path.abspath to make it absolute, like shown here.
 sys.path.insert(0, docs_dir)
-# sys.path.append(os.path.join(os.path.dirname(__file__), '../'))
 
-# -- General configuration ----------------------------------------------------
+# -- General configuration ------------------------------------------------------------
 
-# Add any Sphinx extension module names here, as strings. They can be
-# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
-# extensions = ['sphinx.ext.autodoc']
 extensions = [
-    # native:
-    'sphinx.ext.extlinks',
-    'sphinx.ext.intersphinx',
-    # third-party:
-    'sphinx_inline_tabs',
-    'sphinxcontrib.towncrier',
-    # in-tree:
-    'docs_feedback_sphinxext',
-    'pip_sphinxext',
+    # first-party extensions
+    "sphinx.ext.autodoc",
+    "sphinx.ext.todo",
+    "sphinx.ext.extlinks",
+    "sphinx.ext.intersphinx",
+    # our extensions
+    "pip_sphinxext",
+    # third-party extensions
+    "myst_parser",
+    "sphinx_copybutton",
+    "sphinx_inline_tabs",
+    "sphinxcontrib.towncrier",
 ]
 
-# intersphinx
-intersphinx_cache_limit = 0
-intersphinx_mapping = {
-    'pypug': ('https://packaging.python.org/', None),
-    'pypa': ('https://www.pypa.io/en/latest/', None),
-}
-
-
-# Add any paths that contain templates here, relative to this directory.
-templates_path = []
-
-# The suffix of source filenames.
-source_suffix = '.rst'
-
-# The encoding of source files.
-# source_encoding = 'utf-8'
-
-# The master toctree document.
-master_doc = 'index'
-
 # General information about the project.
-project = 'pip'
-copyright = '2008-2020, PyPA'
+project = "pip"
+copyright = "2008-2020, PyPA"
 
-# The version info for the project you're documenting, acts as replacement for
-# |version| and |release|, also used in various other places throughout the
-# built documents.
-#
-# The short X.Y version.
-
-version = release = 'dev'
-
-# Readthedocs seems to install pip as an egg (via setup.py install) which
-# is somehow resulting in "import pip" picking up an older copy of pip.
-# Rather than trying to force RTD to install pip properly, we'll simply
-# read the version direct from the __init__.py file. (Yes, this is
-# fragile, but it works...)
-
-pip_init = os.path.join(docs_dir, '..', 'src', 'pip', '__init__.py')
-with open(pip_init) as f:
+# Find the version and release information.
+# We have a single source of truth for our version number: pip's __init__.py file.
+# This next bit of code reads from it.
+file_with_version = os.path.join(docs_dir, "..", "src", "pip", "__init__.py")
+with open(file_with_version) as f:
     for line in f:
         m = re.match(r'__version__ = "(.*)"', line)
         if m:
             __version__ = m.group(1)
             # The short X.Y version.
-            version = '.'.join(__version__.split('.')[:2])
+            version = ".".join(__version__.split(".")[:2])
             # The full version, including alpha/beta/rc tags.
             release = __version__
             break
+    else:  # AKA no-break
+        version = release = "dev"
 
-# We have this here because readthedocs plays tricks sometimes and there seems
-# to be a heisenbug, related to the version of pip discovered. This is here to
-# help debug that if someone decides to do that in the future.
 print("pip version:", version)
 print("pip release:", release)
 
-# The language for content autogenerated by Sphinx. Refer to documentation
-# for a list of supported languages.
-# language = None
+# -- Options for smartquotes ----------------------------------------------------------
 
-# There are two options for replacing |today|: either, you set today to some
-# non-false value, then it is used:
-# today = ''
-# Else, today_fmt is used as the format for a strftime call.
-today_fmt = '%B %d, %Y'
-
-# List of documents that shouldn't be included in the build.
-# unused_docs = []
-
-# List of directories, relative to source directory, that shouldn't be searched
-# for source files.
-exclude_patterns = ['build/']
-
-# The reST default role (used for this markup: `text`) to use for all documents
-# default_role = None
-
-# If true, '()' will be appended to :func: etc. cross-reference text.
-# add_function_parentheses = True
-
-# If true, the current module name will be prepended to all description
-# unit titles (such as .. function::).
-# add_module_names = True
-
-# If true, sectionauthor and moduleauthor directives will be shown in the
-# output. They are ignored by default.
-# show_authors = False
-
-# A list of ignored prefixes for module index sorting.
-# modindex_common_prefix = []
-
-extlinks = {
-    'issue': ('https://github.com/pypa/pip/issues/%s', '#'),
-    'pull': ('https://github.com/pypa/pip/pull/%s', 'PR #'),
-    'pypi': ('https://pypi.org/project/%s/', ''),
-}
-
-# Turn off sphinx build warnings because of sphinx tabs during man pages build
-sphinx_tabs_nowarn = True
-
-# -- Options for HTML output --------------------------------------------------
-
-# The theme to use for HTML and HTML Help pages.  Major themes that come with
-# Sphinx are currently 'default' and 'sphinxdoc'.
-html_theme = "furo"
-
-# Theme options are theme-specific and customize the look and feel of a theme
-# further.  For a list of options available for each theme, see the
-# documentation.
-html_theme_options = {}
-
-# Add any paths that contain custom themes here, relative to this directory.
-
-# The name for this set of Sphinx documents.  If None, it defaults to
-# " v documentation".
-html_title = f"{project} documentation v{release}"
-
-# A shorter title for the navigation bar.  Default is the same as html_title.
-# html_short_title = None
-
-# The name of an image file (relative to this directory) to place at the top
-# of the sidebar.
-# html_logo = '_static/piplogo.png'
-
-# The name of an image file (within the static path) to use as favicon of the
-# docs.  This file should be a Windows icon file (.ico) being 16x16 or 32x32
-# pixels large.
-# html_favicon = 'favicon.png'
-
-# Add any paths that contain custom static files (such as style sheets) here,
-# relative to this directory. They are copied after the builtin static files,
-# so a file named "default.css" will overwrite the builtin "default.css".
-html_static_path = []
-
-# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
-# using the given strftime format.
-html_last_updated_fmt = '%b %d, %Y'
-
-# If true, the Docutils Smart Quotes transform (originally based on
-# SmartyPants) will be used to convert characters like quotes and dashes
-# to typographically correct entities.  The default is True.
-smartquotes = True
-
-# This string, for use with Docutils 0.14 or later, customizes the
-# SmartQuotes transform. The default of "qDe" converts normal quote
-# characters ('"' and "'"), en and em dashes ("--" and "---"), and
-# ellipses "...".
-#    For now, we disable the conversion of dashes so that long options
-# like "--find-links" won't render as "-find-links" if included in the
-# text in places where monospaced type can't be used. For example, backticks
-# can't be used inside roles like :ref:`--no-index <--no-index>` because
-# of nesting.
+# Disable the conversion of dashes so that long options like "--find-links" won't
+# render as "-find-links" if included in the text.The default of "qDe" converts normal
+# quote characters ('"' and "'"), en and em dashes ("--" and "---"), and ellipses "..."
 smartquotes_action = "qe"
 
-# Custom sidebar templates, maps document names to template names.
-html_sidebars = {}
+# -- Options for intersphinx ----------------------------------------------------------
 
-# Additional templates that should be rendered to pages, maps page names to
-# template names.
-# html_additional_pages = {}
-
-# If false, no module index is generated.
-html_use_modindex = False
-
-# If false, no index is generated.
-html_use_index = False
-
-# If true, the index is split into individual pages for each letter.
-# html_split_index = False
-
-# If true, links to the reST sources are added to the pages.
-html_show_sourcelink = False
-
-# If true, an OpenSearch description file will be output, and all pages will
-# contain a  tag referring to it.  The value of this option must be the
-# base URL from which the finished HTML is served.
-# html_use_opensearch = ''
-
-# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
-# html_file_suffix = ''
-
-# Output file base name for HTML help builder.
-htmlhelp_basename = 'pipdocs'
-
-
-# -- Options for LaTeX output -------------------------------------------------
-
-# The paper size ('letter' or 'a4').
-# latex_paper_size = 'letter'
-
-# The font size ('10pt', '11pt' or '12pt').
-# latex_font_size = '10pt'
-
-# Grouping the document tree into LaTeX files. List of tuples
-# (source start file, target name, title, author, documentclass [howto/manual])
-latex_documents = [
-    (
-        'index',
-        'pip.tex',
-        'pip Documentation',
-        'pip developers',
-        'manual',
-    ),
-]
-
-# The name of an image file (relative to this directory) to place at the top of
-# the title page.
-# latex_logo = None
-
-# For "manual" documents, if this is true, then toplevel headings are parts,
-# not chapters.
-# latex_use_parts = False
-
-# Additional stuff for the LaTeX preamble.
-# latex_preamble = ''
-
-# Documents to append as an appendix to all manuals.
-# latex_appendices = []
-
-# If false, no module index is generated.
-# latex_use_modindex = True
-
-# -- Options for Manual Pages -------------------------------------------------
-
-# List of manual pages generated
-man_pages = [
-    (
-        'index',
-        'pip',
-        'package manager for Python packages',
-        'pip developers',
-        1
-    )
-]
-
-
-def to_document_name(path, base_dir):
-    """Convert a provided path to a Sphinx "document name".
-    """
-    relative_path = os.path.relpath(path, base_dir)
-    root, _ = os.path.splitext(relative_path)
-    return root.replace(os.sep, '/')
-
-
-# Here, we crawl the entire man/commands/ directory and list every file with
-# appropriate name and details
-man_dir = os.path.join(docs_dir, 'man')
-raw_subcommands = glob.glob(os.path.join(man_dir, 'commands/*.rst'))
-if not raw_subcommands:
-    raise FileNotFoundError(
-        'The individual subcommand manpages could not be found!'
-    )
-for fname in raw_subcommands:
-    fname_base = to_document_name(fname, man_dir)
-    outname = 'pip-' + fname_base.split('/')[1]
-    description = 'description of {} command'.format(
-        outname.replace('-', ' ')
-    )
-
-    man_pages.append((fname_base, outname, description, 'pip developers', 1))
-
-# -- Options for docs_feedback_sphinxext --------------------------------------
-
-# NOTE: Must be one of 'attention', 'caution', 'danger', 'error', 'hint',
-# NOTE: 'important', 'note', 'tip', 'warning' or 'admonition'.
-docs_feedback_admonition_type = 'important'
-docs_feedback_big_doc_lines = 50  # bigger docs will have a banner on top
-docs_feedback_email = 'Docs UX Team '
-docs_feedback_excluded_documents = {  # these won't have any banners
-    'news', 'reference/index',
+intersphinx_mapping = {
+    "python": ("https://docs.python.org/3", None),
+    "pypug": ("https://packaging.python.org", None),
 }
-docs_feedback_questions_list = (
-    'What problem were you trying to solve when you came to this page?',
-    'What content was useful?',
-    'What content was not useful?',
-)
 
-# -- Options for towncrier_draft extension -----------------------------------
+# -- Options for extlinks -------------------------------------------------------------
 
-towncrier_draft_autoversion_mode = 'draft'  # or: 'sphinx-release', 'sphinx-version'
-towncrier_draft_include_empty = False
+extlinks = {
+    "issue": ("https://github.com/pypa/pip/issues/%s", "#"),
+    "pull": ("https://github.com/pypa/pip/pull/%s", "PR #"),
+    "pypi": ("https://pypi.org/project/%s/", ""),
+}
+
+# -- Options for towncrier_draft extension --------------------------------------------
+
+towncrier_draft_autoversion_mode = "draft"  # or: 'sphinx-release', 'sphinx-version'
+towncrier_draft_include_empty = True
 towncrier_draft_working_directory = pathlib.Path(docs_dir).parent
 # Not yet supported: towncrier_draft_config_path = 'pyproject.toml'  # relative to cwd
+
+# -- Options for HTML -----------------------------------------------------------------
+
+html_theme = "furo"
+html_title = f"{project} documentation v{release}"
+
+# Disable the generation of the various indexes
+html_use_modindex = False
+html_use_index = False
+
+# -- Options for Manual Pages ---------------------------------------------------------
+
+
+# List of manual pages generated
+def determine_man_pages() -> List[Tuple[str, str, str, str, int]]:
+    """Determine which man pages need to be generated."""
+
+    def to_document_name(path: str, base_dir: str) -> str:
+        """Convert a provided path to a Sphinx "document name"."""
+        relative_path = os.path.relpath(path, base_dir)
+        root, _ = os.path.splitext(relative_path)
+        return root.replace(os.sep, "/")
+
+    # Crawl the entire man/commands/ directory and list every file with appropriate
+    # name and details.
+    man_dir = os.path.join(docs_dir, "man")
+    raw_subcommands = glob.glob(os.path.join(man_dir, "commands/*.rst"))
+    if not raw_subcommands:
+        raise FileNotFoundError(
+            "The individual subcommand manpages could not be found!"
+        )
+
+    retval = [
+        ("index", "pip", "package manager for Python packages", "pip developers", 1),
+    ]
+    for fname in raw_subcommands:
+        fname_base = to_document_name(fname, man_dir)
+        outname = "pip-" + fname_base.split("/")[1]
+        description = "description of {} command".format(outname.replace("-", " "))
+
+        retval.append((fname_base, outname, description, "pip developers", 1))
+
+    return retval
+
+
+man_pages = determine_man_pages()
diff --git a/docs/html/copyright.rst b/docs/html/copyright.rst
index fd0212f53..0e2ede5ee 100644
--- a/docs/html/copyright.rst
+++ b/docs/html/copyright.rst
@@ -6,4 +6,4 @@ Copyright
 
 pip and this documentation is:
 
-Copyright © 2008-2020 The pip developers (see `AUTHORS.txt `_ file). All rights reserved.
+Copyright © 2008-2020 The pip developers (see `AUTHORS.txt `_ file). All rights reserved.
diff --git a/docs/html/development/architecture/anatomy.rst b/docs/html/development/architecture/anatomy.rst
index 46bba4489..4d58b4cff 100644
--- a/docs/html/development/architecture/anatomy.rst
+++ b/docs/html/development/architecture/anatomy.rst
@@ -51,7 +51,6 @@ The ``README``, license, ``pyproject.toml``, ``setup.py``, and so on are in the
   * ``functional/`` *[functional tests of pip’s CLI -- end-to-end, invoke pip in subprocess & check results of execution against desired result. This also is what makes test suite slow]*
   * ``lib/`` *[helpers for tests]*
   * ``unit/`` *[unit tests -- fast and small and nice!]*
-  * ``yaml/`` *[resolver tests! They’re written in YAML. This folder just contains .yaml files -- actual code for reading/running them is in lib/yaml.py . This is fine!]*
 
 * ``tools`` *[misc development workflow tools, like requirements files & Travis CI files & helpers for tox]*
 * ``.azure-pipelines``
@@ -105,5 +104,5 @@ Within ``src/``:
 
 .. _`tracking issue`: https://github.com/pypa/pip/issues/6831
 .. _GitHub repository: https://github.com/pypa/pip/
-.. _tox.ini: https://github.com/pypa/pip/blob/master/tox.ini
+.. _tox.ini: https://github.com/pypa/pip/blob/main/tox.ini
 .. _improving the pip dependency resolver: https://github.com/pypa/pip/issues/988
diff --git a/docs/html/development/architecture/upgrade-options.rst b/docs/html/development/architecture/upgrade-options.rst
index 6196413ef..76c7d1fc0 100644
--- a/docs/html/development/architecture/upgrade-options.rst
+++ b/docs/html/development/architecture/upgrade-options.rst
@@ -30,7 +30,8 @@ candidate.
 ``--upgrade-strategy``
 
 This option affects which packages are allowed to be installed. It is only
-relevant if ``--upgrade`` is specified. The base behaviour is to allow
+relevant if ``--upgrade`` is specified (except for the ``to-satisfy-only``
+option mentioned below). The base behaviour is to allow
 packages specified on pip's command line to be upgraded. This option controls
 what *other* packages can be upgraded:
 
@@ -43,9 +44,15 @@ what *other* packages can be upgraded:
   pip command or a requirement file (i.e, they are direct requirements), or
   an upgraded parent needs a later version of the dependency than is
   currently installed.
-* ``to-satisfy-only`` (**undocumented**) - packages are not upgraded (not
-  even direct requirements) unless the currently installed version fails to
-  satisfy a requirement (either explicitly specified or a dependency).
+* ``to-satisfy-only`` (**undocumented, please avoid**) - packages are not
+  upgraded (not even direct requirements) unless the currently installed
+  version fails to satisfy a requirement (either explicitly specified or a
+  dependency).
+
+  * This is actually the "default" upgrade strategy when ``--upgrade`` is
+    *not set*, i.e. ``pip install AlreadyInstalled`` and
+    ``pip install --upgrade --upgrade-strategy=to-satisfy-only AlreadyInstalled``
+    yield the same behavior.
 
 ``--force-reinstall``
 
diff --git a/docs/html/development/ci.rst b/docs/html/development/ci.rst
index 5c33231b1..ac51e9ffa 100644
--- a/docs/html/development/ci.rst
+++ b/docs/html/development/ci.rst
@@ -1,7 +1,8 @@
 .. note::
 
-    This section of the documentation is currently being written. pip
-    developers welcome your help to complete this documentation. If
+    This section of the documentation is currently out of date.
+
+    pip developers welcome your help to update this documentation. If
     you're interested in helping out, please let us know in the
     `tracking issue`_, or just submit a pull request and mention it in
     that tracking issue.
@@ -133,11 +134,11 @@ Actual testing
 |           |          +-------+---------------+-----------------+
 |           |          | PyPy3 |               |                 |
 |   MacOS   +----------+-------+---------------+-----------------+
-|           |          | CP3.6 |   Azure       |   Azure         |
+|           |          | CP3.6 |   Github      |   Github        |
 |           |          +-------+---------------+-----------------+
-|           |   x64    | CP3.7 |   Azure       |   Azure         |
+|           |   x64    | CP3.7 |   Github      |   Github        |
 |           |          +-------+---------------+-----------------+
-|           |          | CP3.8 |   Azure       |   Azure         |
+|           |          | CP3.8 |   Github      |   Github        |
 |           |          +-------+---------------+-----------------+
 |           |          | PyPy3 |               |                 |
 +-----------+----------+-------+---------------+-----------------+
diff --git a/docs/html/development/contributing.rst b/docs/html/development/contributing.rst
index 63eb4c33e..7d2e64902 100644
--- a/docs/html/development/contributing.rst
+++ b/docs/html/development/contributing.rst
@@ -11,7 +11,7 @@ We have an in-progress guide to the
 Submitting Pull Requests
 ========================
 
-Submit pull requests against the ``master`` branch, providing a good
+Submit pull requests against the ``main`` branch, providing a good
 description of what you're doing and why. You must have legal permission to
 distribute any code you contribute to pip and it must be available under the
 MIT License.
@@ -39,7 +39,7 @@ separately, as a "formatting cleanup" PR, if needed.
 Automated Testing
 =================
 
-All pull requests and merges to 'master' branch are tested using `Travis CI`_,
+All pull requests and merges to 'main' branch are tested using `Travis CI`_,
 `Azure Pipelines`_ and `GitHub Actions`_ based on our `.travis.yml`_,
 `.azure-pipelines`_ and `.github/workflows`_ files. More details about pip's
 Continuous Integration can be found in the `CI Documentation`_
@@ -131,8 +131,8 @@ updating deprecation policy, etc.
 Updating your branch
 ====================
 
-As you work, you might need to update your local master branch up-to-date with
-the ``master`` branch in the main pip repository, which moves forward as the
+As you work, you might need to update your local main branch up-to-date with
+the ``main`` branch in the main pip repository, which moves forward as the
 maintainers merge pull requests. Most people working on the project use the
 following workflow.
 
@@ -160,24 +160,24 @@ First, fetch the latest changes from the main pip repository, ``upstream``:
 
     git fetch upstream
 
-Then, check out your local ``master`` branch, and rebase the changes on top of
+Then, check out your local ``main`` branch, and rebase the changes on top of
 it:
 
 .. code-block:: console
 
-    git checkout master
-    git rebase upstream/master
+    git checkout main
+    git rebase upstream/main
 
 At this point, you might have to `resolve merge conflicts`_. Once this is done,
-push the updates you have just made to your local ``master`` branch to your
+push the updates you have just made to your local ``main`` branch to your
 ``origin`` repository on GitHub:
 
 .. code-block:: console
 
-    git checkout master
-    git push origin master
+    git checkout main
+    git push origin main
 
-Now your local ``master`` branch and the ``master`` branch in your ``origin``
+Now your local ``main`` branch and the ``main`` branch in your ``origin``
 repo have been updated with the most recent changes from the main pip
 repository.
 
@@ -187,10 +187,10 @@ To keep your branches updated, the process is similar:
 
     git checkout awesome-feature
     git fetch upstream
-    git rebase upstream/master
+    git rebase upstream/main
 
 Now your branch has been updated with the latest changes from the
-``master`` branch on the upstream pip repository.
+``main`` branch on the upstream pip repository.
 
 It's good practice to back up your branches by pushing them to your
 ``origin`` on GitHub as you are working on them. To push a branch,
@@ -230,7 +230,7 @@ If you get an error message like this:
 
 Try force-pushing your branch with ``push -f``.
 
-The ``master`` branch in the main pip repository gets updated frequently, so
+The ``main`` branch in the main pip repository gets updated frequently, so
 you might have to update your branch at least once while you are working on it.
 
 Thank you for your contribution!
@@ -267,9 +267,9 @@ will initiate a vote among the existing maintainers.
 .. _`Travis CI`: https://travis-ci.org/
 .. _`Azure Pipelines`: https://azure.microsoft.com/en-in/services/devops/pipelines/
 .. _`GitHub Actions`: https://github.com/features/actions
-.. _`.travis.yml`: https://github.com/pypa/pip/blob/master/.travis.yml
-.. _`.azure-pipelines`: https://github.com/pypa/pip/blob/master/.azure-pipelines
-.. _`.github/workflows`: https://github.com/pypa/pip/blob/master/.github/workflows
+.. _`.travis.yml`: https://github.com/pypa/pip/blob/main/.travis.yml
+.. _`.azure-pipelines`: https://github.com/pypa/pip/blob/main/.azure-pipelines
+.. _`.github/workflows`: https://github.com/pypa/pip/blob/main/.github/workflows
 .. _`CI Documentation`: https://pip.pypa.io/en/latest/development/ci/
 .. _`towncrier`: https://pypi.org/project/towncrier/
 .. _`Testing the next-gen pip dependency resolver`: https://pradyunsg.me/blog/2020/03/27/pip-resolver-testing/
diff --git a/docs/html/development/issue-triage.rst b/docs/html/development/issue-triage.rst
index 9b5e5cc1c..c21da1fc6 100644
--- a/docs/html/development/issue-triage.rst
+++ b/docs/html/development/issue-triage.rst
@@ -229,7 +229,7 @@ Examples:
   (`link `__)
 - get-pip on system with no ``/usr/lib64``
   (`link `__)
-- reproducing with ``pip`` from master branch
+- reproducing with ``pip`` from current development branch
   (`link `__)
 
 
@@ -285,7 +285,7 @@ An issue may be considered resolved and closed when:
     - already tracked by another issue
 
   - A project-specific issue has been identified and the issue no
-    longer occurs as of the latest commit on the master branch.
+    longer occurs as of the latest commit on the main branch.
 
 - An enhancement or feature request no longer has a proponent and the maintainers
   don't think it's worth keeping open.
diff --git a/docs/html/development/release-process.rst b/docs/html/development/release-process.rst
index a133e57f2..ee1595cec 100644
--- a/docs/html/development/release-process.rst
+++ b/docs/html/development/release-process.rst
@@ -7,7 +7,7 @@ Release process
 Release Cadence
 ===============
 
-The pip project has a release cadence of releasing whatever is on ``master``
+The pip project has a release cadence of releasing whatever is on ``main``
 every 3 months. This gives users a predictable pattern for when releases
 are going to happen and prevents locking up improvements for fixes for long
 periods of time, while still preventing massively fracturing the user base
@@ -22,8 +22,8 @@ The release manager may, at their discretion, choose whether or not there
 will be a pre-release period for a release, and if there is may extend that
 period into the next month if needed.
 
-Because releases are made direct from the ``master`` branch, it is essential
-that ``master`` is always in a releasable state. It is acceptable to merge
+Because releases are made direct from the ``main`` branch, it is essential
+that ``main`` is always in a releasable state. It is acceptable to merge
 PRs that partially implement a new feature, but only if the partially
 implemented version is usable in that state (for example, with reduced
 functionality or disabled by default). In the case where a merged PR is found
@@ -116,13 +116,13 @@ Release Process
 Creating a new release
 ----------------------
 
-#. Checkout the current pip ``master`` branch.
+#. Checkout the current pip ``main`` branch.
 #. Ensure you have the latest ``nox`` installed.
 #. Prepare for release using ``nox -s prepare-release -- YY.N``.
    This will update the relevant files and tag the correct commit.
 #. Build the release artifacts using ``nox -s build-release -- YY.N``.
    This will checkout the tag, generate the distribution files to be
-   uploaded and checkout the master branch again.
+   uploaded and checkout the main branch again.
 #. Upload the release to PyPI using ``nox -s upload-release -- YY.N``.
 #. Push all of the changes including the tag.
 #. Regenerate the ``get-pip.py`` script in the `get-pip repository`_ (as
@@ -155,20 +155,20 @@ Creating a bug-fix release
 
 Sometimes we need to release a bugfix release of the form ``YY.N.Z+1``. In
 order to create one of these the changes should already be merged into the
-``master`` branch.
+``main`` branch.
 
 #. Create a new ``release/YY.N.Z+1`` branch off of the ``YY.N`` tag using the
    command ``git checkout -b release/YY.N.Z+1 YY.N``.
-#. Cherry pick the fixed commits off of the ``master`` branch, fixing any
+#. Cherry pick the fixed commits off of the ``main`` branch, fixing any
    conflicts.
 #. Run ``nox -s prepare-release -- YY.N.Z+1``.
-#. Merge master into your release branch and drop the news files that have been
+#. Merge main into your release branch and drop the news files that have been
    included in your release (otherwise they would also appear in the ``YY.N+1``
    changelog)
 #. Push the ``release/YY.N.Z+1`` branch to github and submit a PR for it against
-   the ``master`` branch and wait for the tests to run.
-#. Once tests run, merge the ``release/YY.N.Z+1`` branch into master, and follow
-   the above release process starting with step 4.
+   the ``main`` branch and wait for the tests to run.
+#. Once tests run, merge the ``release/YY.N.Z+1`` branch into ``main``, and
+   follow the above release process starting with step 4.
 
 .. _`get-pip repository`: https://github.com/pypa/get-pip
 .. _`psf-salt repository`: https://github.com/python/psf-salt
diff --git a/docs/html/index.md b/docs/html/index.md
new file mode 100644
index 000000000..a84c2665d
--- /dev/null
+++ b/docs/html/index.md
@@ -0,0 +1,48 @@
+---
+hide-toc: true
+---
+
+# pip
+
+pip is the [package installer for Python][recommended]. You can use it to
+install packages from the [Python Package Index][pypi] and other indexes.
+
+```{toctree}
+:hidden:
+
+quickstart
+installing
+user_guide
+cli/index
+```
+
+```{toctree}
+:caption: Project
+:hidden:
+
+development/index
+ux_research_design
+news
+Code of Conduct 
+GitHub 
+```
+
+If you want to learn about how to use pip, check out the following resources:
+
+- [Quickstart](quickstart)
+- [Python Packaging User Guide](https://packaging.python.org)
+
+If you find bugs, need help, or want to talk to the developers, use our mailing
+lists or chat rooms:
+
+- [GitHub Issues][issue-tracker]
+- [Discourse channel][packaging-discourse]
+- [User IRC][irc-pypa]
+- [Development IRC][irc-pypa-dev]
+
+[recommended]: https://packaging.python.org/guides/tool-recommendations/
+[pypi]: https://pypi.org/
+[issue-tracker]: https://github.com/pypa/pip/issues/
+[packaging-discourse]: https://discuss.python.org/c/packaging/14
+[irc-pypa]: https://webchat.freenode.net/#pypa
+[irc-pypa-dev]: https://webchat.freenode.net/#pypa-dev
diff --git a/docs/html/index.rst b/docs/html/index.rst
deleted file mode 100644
index b92a23e02..000000000
--- a/docs/html/index.rst
+++ /dev/null
@@ -1,63 +0,0 @@
-==================================
-pip - The Python Package Installer
-==================================
-
-pip is the `package installer`_ for Python. You can use pip to install packages from the `Python Package Index`_ and other indexes.
-
-Please take a look at our documentation for how to install and use pip:
-
-.. toctree::
-   :maxdepth: 1
-
-   quickstart
-   installing
-   user_guide
-   reference/index
-   development/index
-   ux_research_design
-   news
-
-.. warning::
-
-   In pip 20.3, we've `made a big improvement to the heart of pip`_;
-   :ref:`Resolver changes 2020`. We want your input, so `sign up for
-   our user experience research studies`_ to help us do it right.
-
-.. warning::
-
-   pip 21.0, in January 2021, removed Python 2 support, per pip's
-   :ref:`Python 2 Support` policy. Please migrate to Python 3.
-
-If you find bugs, need help, or want to talk to the developers, please use our mailing lists or chat rooms:
-
-* `Issue tracking`_
-* `Discourse channel`_
-* `User IRC`_
-
-If you want to get involved, head over to GitHub to get the source code, and feel free to jump on the developer mailing lists and chat rooms:
-
-* `GitHub page`_
-* `Development mailing list`_
-* `Development IRC`_
-
-
-Code of Conduct
-===============
-
-Everyone interacting in the pip project's codebases, issue trackers, chat
-rooms, and mailing lists is expected to follow the `PSF Code of Conduct`_.
-
-.. _package installer: https://packaging.python.org/guides/tool-recommendations/
-.. _Python Package Index: https://pypi.org
-.. _made a big improvement to the heart of pip: https://pyfound.blogspot.com/2020/11/pip-20-3-new-resolver.html
-.. _sign up for our user experience research studies: https://pyfound.blogspot.com/2020/03/new-pip-resolver-to-roll-out-this-year.html
-.. _Installation: https://pip.pypa.io/en/stable/installing.html
-.. _Documentation: https://pip.pypa.io/en/stable/
-.. _Changelog: https://pip.pypa.io/en/stable/news.html
-.. _GitHub page: https://github.com/pypa/pip
-.. _Issue tracking: https://github.com/pypa/pip/issues
-.. _Discourse channel: https://discuss.python.org/c/packaging
-.. _Development mailing list: https://mail.python.org/mailman3/lists/distutils-sig.python.org/
-.. _User IRC: https://webchat.freenode.net/?channels=%23pypa
-.. _Development IRC: https://webchat.freenode.net/?channels=%23pypa-dev
-.. _PSF Code of Conduct: https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md
diff --git a/docs/html/news.rst b/docs/html/news.rst
index 8b54a02e6..829e6b74f 100644
--- a/docs/html/news.rst
+++ b/docs/html/news.rst
@@ -9,4 +9,4 @@ Changelog
 
 .. towncrier-draft-entries:: |release|, unreleased as on
 
-.. include:: ../../NEWS.rst
+.. pip-news-include:: ../../NEWS.rst
diff --git a/docs/html/reference/index.rst b/docs/html/reference/index.rst
index d21b7a980..5e81105c9 100644
--- a/docs/html/reference/index.rst
+++ b/docs/html/reference/index.rst
@@ -1,21 +1,11 @@
-===============
-Reference Guide
-===============
+:orphan:
 
-.. toctree::
-   :maxdepth: 2
+.. meta::
 
-   pip
-   pip_install
-   pip_download
-   pip_uninstall
-   pip_freeze
-   pip_list
-   pip_show
-   pip_search
-   pip_cache
-   pip_check
-   pip_config
-   pip_wheel
-   pip_hash
-   pip_debug
+  :http-equiv=refresh: 3; url=../cli/
+
+This page has moved
+===================
+
+You should be redirected automatically in 3 seconds. If that didn't
+work, here's a link: :doc:`../cli/index`
diff --git a/docs/html/reference/pip.rst b/docs/html/reference/pip.rst
index 1f52630f6..53b1c9e0d 100644
--- a/docs/html/reference/pip.rst
+++ b/docs/html/reference/pip.rst
@@ -1,255 +1,11 @@
-===
-pip
-===
+:orphan:
 
+.. meta::
 
-Usage
-*****
+  :http-equiv=refresh: 3; url=../../cli/pip/
 
-.. tab:: Unix/macOS
+This page has moved
+===================
 
-    .. code-block:: shell
-
-        python -m pip  [options]
-
-.. tab:: Windows
-
-    .. code-block:: shell
-
-        py -m pip  [options]
-
-Description
-***********
-
-
-.. _`Logging`:
-
-
-Logging
-=======
-
-Console logging
-~~~~~~~~~~~~~~~
-
-pip offers :ref:`-v, --verbose <--verbose>` and :ref:`-q, --quiet <--quiet>`
-to control the console log level. By default, some messages (error and warnings)
-are colored in the terminal. If you want to suppress the colored output use
-:ref:`--no-color <--no-color>`.
-
-
-.. _`FileLogging`:
-
-File logging
-~~~~~~~~~~~~
-
-pip offers the :ref:`--log <--log>` option for specifying a file where a maximum
-verbosity log will be kept.  This option is empty by default. This log appends
-to previous logging.
-
-Like all pip options, ``--log`` can also be set as an environment variable, or
-placed into the pip config file.  See the :ref:`Configuration` section.
-
-.. _`exists-action`:
-
---exists-action option
-======================
-
-This option specifies default behavior when path already exists.
-Possible cases: downloading files or checking out repositories for installation,
-creating archives. If ``--exists-action`` is not defined, pip will prompt
-when decision is needed.
-
-*(s)witch*
-    Only relevant to VCS checkout. Attempt to switch the checkout
-    to the appropriate URL and/or revision.
-*(i)gnore*
-    Abort current operation (e.g. don't copy file, don't create archive,
-    don't modify a checkout).
-*(w)ipe*
-    Delete the file or VCS checkout before trying to create, download, or checkout a new one.
-*(b)ackup*
-    Rename the file or checkout to ``{name}{'.bak' * n}``, where n is some number
-    of ``.bak`` extensions, such that the file didn't exist at some point.
-    So the most recent backup will be the one with the largest number after ``.bak``.
-*(a)abort*
-    Abort pip and return non-zero exit status.
-
-.. _`build-interface`:
-
-
-Build System Interface
-======================
-
-pip builds packages by invoking the build system. By default, builds will use
-``setuptools``, but if a project specifies a different build system using a
-``pyproject.toml`` file, as per :pep:`517`, pip will use that instead.  As well
-as package building, the build system is also invoked to install packages
-direct from source.  This is handled by invoking the build system to build a
-wheel, and then installing from that wheel.  The built wheel is cached locally
-by pip to avoid repeated identical builds.
-
-The current interface to the build system is via the ``setup.py`` command line
-script - all build actions are defined in terms of the specific ``setup.py``
-command line that will be run to invoke the required action.
-
-Setuptools Injection
-~~~~~~~~~~~~~~~~~~~~
-
-When :pep:`517` is not used, the supported build system is ``setuptools``.
-However, not all packages use ``setuptools`` in their build scripts. To support
-projects that use "pure ``distutils``", pip injects ``setuptools`` into
-``sys.modules`` before invoking ``setup.py``. The injection should be
-transparent to ``distutils``-based projects, but 3rd party build tools wishing
-to provide a ``setup.py`` emulating the commands pip requires may need to be
-aware that it takes place.
-
-Projects using :pep:`517` *must* explicitly use setuptools - pip does not do
-the above injection process in this case.
-
-Build System Output
-~~~~~~~~~~~~~~~~~~~
-
-Any output produced by the build system will be read by pip (for display to the
-user if requested). In order to correctly read the build system output, pip
-requires that the output is written in a well-defined encoding, specifically
-the encoding the user has configured for text output (which can be obtained in
-Python using ``locale.getpreferredencoding``). If the configured encoding is
-ASCII, pip assumes UTF-8 (to account for the behaviour of some Unix systems).
-
-Build systems should ensure that any tools they invoke (compilers, etc) produce
-output in the correct encoding. In practice - and in particular on Windows,
-where tools are inconsistent in their use of the "OEM" and "ANSI" codepages -
-this may not always be possible. pip will therefore attempt to recover cleanly
-if presented with incorrectly encoded build tool output, by translating
-unexpected byte sequences to Python-style hexadecimal escape sequences
-(``"\x80\xff"``, etc). However, it is still possible for output to be displayed
-using an incorrect encoding (mojibake).
-
-Under :pep:`517`, handling of build tool output is the backend's responsibility,
-and pip simply displays the output produced by the backend. (Backends, however,
-will likely still have to address the issues described above).
-
-PEP 517 and 518 Support
-~~~~~~~~~~~~~~~~~~~~~~~
-
-As of version 10.0, pip supports projects declaring dependencies that are
-required at install time using a ``pyproject.toml`` file, in the form described
-in :pep:`518`. When building a project, pip will install the required
-dependencies locally, and make them available to the build process.
-Furthermore, from version 19.0 onwards, pip supports projects specifying the
-build backend they use in ``pyproject.toml``, in the form described in
-:pep:`517`.
-
-When making build requirements available, pip does so in an *isolated
-environment*. That is, pip does not install those requirements into the user's
-``site-packages``, but rather installs them in a temporary directory which it
-adds to the user's ``sys.path`` for the duration of the build. This ensures
-that build requirements are handled independently of the user's runtime
-environment. For example, a project that needs a recent version of setuptools
-to build can still be installed, even if the user has an older version
-installed (and without silently replacing that version).
-
-In certain cases, projects (or redistributors) may have workflows that
-explicitly manage the build environment. For such workflows, build isolation
-can be problematic. If this is the case, pip provides a
-``--no-build-isolation`` flag to disable build isolation. Users supplying this
-flag are responsible for ensuring the build environment is managed
-appropriately (including ensuring that all required build dependencies are
-installed).
-
-By default, pip will continue to use the legacy (direct ``setup.py`` execution
-based) build processing for projects that do not have a ``pyproject.toml`` file.
-Projects with a ``pyproject.toml`` file will use a :pep:`517` backend. Projects
-with a ``pyproject.toml`` file, but which don't have a ``build-system`` section,
-will be assumed to have the following backend settings::
-
-    [build-system]
-    requires = ["setuptools>=40.8.0", "wheel"]
-    build-backend = "setuptools.build_meta:__legacy__"
-
-.. note::
-
-    ``setuptools`` 40.8.0 is the first version of setuptools that offers a
-    :pep:`517` backend that closely mimics directly executing ``setup.py``.
-
-If a project has ``[build-system]``, but no ``build-backend``, pip will also use
-``setuptools.build_meta:__legacy__``, but will expect the project requirements
-to include ``setuptools`` and ``wheel`` (and will report an error if the
-installed version of ``setuptools`` is not recent enough).
-
-If a user wants to explicitly request :pep:`517` handling even though a project
-doesn't have a ``pyproject.toml`` file, this can be done using the
-``--use-pep517`` command line option. Similarly, to request legacy processing
-even though ``pyproject.toml`` is present, the ``--no-use-pep517`` option is
-available (although obviously it is an error to choose ``--no-use-pep517`` if
-the project has no ``setup.py``, or explicitly requests a build backend). As
-with other command line flags, pip recognises the ``PIP_USE_PEP517``
-environment veriable and a ``use-pep517`` config file option (set to true or
-false) to set this option globally. Note that overriding pip's choice of
-whether to use :pep:`517` processing in this way does *not* affect whether pip
-will use an isolated build environment (which is controlled via
-``--no-build-isolation`` as noted above).
-
-Except in the case noted above (projects with no :pep:`518` ``[build-system]``
-section in ``pyproject.toml``), pip will never implicitly install a build
-system. Projects **must** ensure that the correct build system is listed in
-their ``requires`` list (this applies even if pip assumes that the
-``setuptools`` backend is being used, as noted above).
-
-.. _pep-518-limitations:
-
-**Historical Limitations**:
-
-* ``pip<18.0``: only supports installing build requirements from wheels, and
-  does not support the use of environment markers and extras (only version
-  specifiers are respected).
-
-* ``pip<18.1``: build dependencies using .pth files are not properly supported;
-  as a result namespace packages do not work under Python 3.2 and earlier.
-
-Future Developments
-~~~~~~~~~~~~~~~~~~~
-
-:pep:`426` notes that the intention is to add hooks to project metadata in
-version 2.1 of the metadata spec, to explicitly define how to build a project
-from its source. Once this version of the metadata spec is final, pip will
-migrate to using that interface. At that point, the ``setup.py`` interface
-documented here will be retained solely for legacy purposes, until projects
-have migrated.
-
-Specifically, applications should *not* expect to rely on there being any form
-of backward compatibility guarantees around the ``setup.py`` interface.
-
-
-Build Options
-~~~~~~~~~~~~~
-
-The ``--global-option`` and ``--build-option`` arguments to the ``pip install``
-and ``pip wheel`` inject additional arguments into the ``setup.py`` command
-(``--build-option`` is only available in ``pip wheel``).  These arguments are
-included in the command as follows:
-
-.. tab:: Unix/macOS
-
-    .. code-block:: console
-
-        python setup.py  BUILD COMMAND 
-
-.. tab:: Windows
-
-    .. code-block:: shell
-
-        py setup.py  BUILD COMMAND 
-
-The options are passed unmodified, and presently offer direct access to the
-distutils command line. Use of ``--global-option`` and ``--build-option``
-should be considered as build system dependent, and may not be supported in the
-current form if support for alternative build systems is added to pip.
-
-
-.. _`General Options`:
-
-General Options
-***************
-
-.. pip-general-options::
+You should be redirected automatically in 3 seconds. If that didn't
+work, here's a link: :doc:`../cli/pip`
diff --git a/docs/html/reference/pip_cache.rst b/docs/html/reference/pip_cache.rst
index 0a23c510d..a9cbd69da 100644
--- a/docs/html/reference/pip_cache.rst
+++ b/docs/html/reference/pip_cache.rst
@@ -1,27 +1,11 @@
+:orphan:
 
-.. _`pip cache`:
+.. meta::
 
-pip cache
----------
+  :http-equiv=refresh: 3; url=../../cli/pip_cache/
 
+This page has moved
+===================
 
-Usage
-*****
-
-.. tab:: Unix/macOS
-
-   .. pip-command-usage:: cache "python -m pip"
-
-.. tab:: Windows
-
-   .. pip-command-usage:: cache "py -m pip"
-
-Description
-***********
-
-.. pip-command-description:: cache
-
-Options
-*******
-
-.. pip-command-options:: cache
+You should be redirected automatically in 3 seconds. If that didn't
+work, here's a link: :doc:`../cli/pip_cache`
diff --git a/docs/html/reference/pip_check.rst b/docs/html/reference/pip_check.rst
index 268cf9a14..5bb7fc84f 100644
--- a/docs/html/reference/pip_check.rst
+++ b/docs/html/reference/pip_check.rst
@@ -1,87 +1,11 @@
-.. _`pip check`:
+:orphan:
 
-=========
-pip check
-=========
+.. meta::
 
+  :http-equiv=refresh: 3; url=../../cli/pip_check/
 
-Usage
-=====
+This page has moved
+===================
 
-.. tab:: Unix/macOS
-
-   .. pip-command-usage:: check "python -m pip"
-
-.. tab:: Windows
-
-   .. pip-command-usage:: check "py -m pip"
-
-
-Description
-===========
-
-.. pip-command-description:: check
-
-
-Examples
-========
-
-#. If all dependencies are compatible:
-
-   .. tab:: Unix/macOS
-
-      .. code-block:: console
-
-         $ python -m pip check
-         No broken requirements found.
-         $ echo $?
-         0
-
-   .. tab:: Windows
-
-      .. code-block:: console
-
-         C:\> py -m pip check
-         No broken requirements found.
-         C:\> echo %errorlevel%
-         0
-
-#. If a package is missing:
-
-   .. tab:: Unix/macOS
-
-      .. code-block:: console
-
-         $ python -m pip check
-         pyramid 1.5.2 requires WebOb, which is not installed.
-         $ echo $?
-         1
-
-   .. tab:: Windows
-
-      .. code-block:: console
-
-         C:\> py -m pip check
-         pyramid 1.5.2 requires WebOb, which is not installed.
-         C:\> echo %errorlevel%
-         1
-
-#. If a package has the wrong version:
-
-   .. tab:: Unix/macOS
-
-      .. code-block:: console
-
-         $ python -m pip check
-         pyramid 1.5.2 has requirement WebOb>=1.3.1, but you have WebOb 0.8.
-         $ echo $?
-         1
-
-   .. tab:: Windows
-
-      .. code-block:: console
-
-         C:\> py -m pip check
-         pyramid 1.5.2 has requirement WebOb>=1.3.1, but you have WebOb 0.8.
-         C:\> echo %errorlevel%
-         1
+You should be redirected automatically in 3 seconds. If that didn't
+work, here's a link: :doc:`../cli/pip_check`
diff --git a/docs/html/reference/pip_config.rst b/docs/html/reference/pip_config.rst
index 8b2f84630..31a048a51 100644
--- a/docs/html/reference/pip_config.rst
+++ b/docs/html/reference/pip_config.rst
@@ -1,30 +1,11 @@
+:orphan:
 
-.. _`pip config`:
+.. meta::
 
-==========
-pip config
-==========
+  :http-equiv=refresh: 3; url=../../cli/pip_config/
 
+This page has moved
+===================
 
-Usage
-=====
-
-.. tab:: Unix/macOS
-
-   .. pip-command-usage:: config "python -m pip"
-
-.. tab:: Windows
-
-   .. pip-command-usage:: config "py -m pip"
-
-
-Description
-===========
-
-.. pip-command-description:: config
-
-
-Options
-=======
-
-.. pip-command-options:: config
+You should be redirected automatically in 3 seconds. If that didn't
+work, here's a link: :doc:`../cli/pip_config`
diff --git a/docs/html/reference/pip_debug.rst b/docs/html/reference/pip_debug.rst
index 4023533c9..b0de68275 100644
--- a/docs/html/reference/pip_debug.rst
+++ b/docs/html/reference/pip_debug.rst
@@ -1,35 +1,11 @@
-.. _`pip debug`:
+:orphan:
 
-=========
-pip debug
-=========
+.. meta::
 
+  :http-equiv=refresh: 3; url=../../cli/pip_debug/
 
-Usage
-=====
+This page has moved
+===================
 
-.. tab:: Unix/macOS
-
-   .. pip-command-usage:: debug "python -m pip"
-
-.. tab:: Windows
-
-   .. pip-command-usage:: debug "py -m pip"
-
-
-.. warning::
-
-    This command is only meant for debugging.
-    Its options and outputs are provisional and may change without notice.
-
-
-Description
-===========
-
-.. pip-command-description:: debug
-
-
-Options
-=======
-
-.. pip-command-options:: debug
+You should be redirected automatically in 3 seconds. If that didn't
+work, here's a link: :doc:`../cli/pip_debug`
diff --git a/docs/html/reference/pip_download.rst b/docs/html/reference/pip_download.rst
index 4f15314d7..d54a7bec5 100644
--- a/docs/html/reference/pip_download.rst
+++ b/docs/html/reference/pip_download.rst
@@ -1,226 +1,11 @@
+:orphan:
 
-.. _`pip download`:
+.. meta::
 
-============
-pip download
-============
+  :http-equiv=refresh: 3; url=../../cli/pip_download/
 
+This page has moved
+===================
 
-Usage
-=====
-
-.. tab:: Unix/macOS
-
-   .. pip-command-usage:: download "python -m pip"
-
-.. tab:: Windows
-
-   .. pip-command-usage:: download "py -m pip"
-
-
-Description
-===========
-
-.. pip-command-description:: download
-
-Overview
---------
-
-``pip download`` does the same resolution and downloading as ``pip install``,
-but instead of installing the dependencies, it collects the downloaded
-distributions into the directory provided (defaulting to the current
-directory). This directory can later be passed as the value to ``pip install
---find-links`` to facilitate offline or locked down package installation.
-
-``pip download`` with the ``--platform``, ``--python-version``,
-``--implementation``, and ``--abi`` options provides the ability to fetch
-dependencies for an interpreter and system other than the ones that pip is
-running on. ``--only-binary=:all:`` or ``--no-deps`` is required when using any
-of these options. It is important to note that these options all default to the
-current system/interpreter, and not to the most restrictive constraints (e.g.
-platform any, abi none, etc). To avoid fetching dependencies that happen to
-match the constraint of the current interpreter (but not your target one), it
-is recommended to specify all of these options if you are specifying one of
-them. Generic dependencies (e.g. universal wheels, or dependencies with no
-platform, abi, or implementation constraints) will still match an over-
-constrained download requirement.
-
-
-
-Options
-=======
-
-.. pip-command-options:: download
-
-.. pip-index-options:: download
-
-
-Examples
-========
-
-#. Download a package and all of its dependencies
-
-   .. tab:: Unix/macOS
-
-      .. code-block:: shell
-
-         python -m pip download SomePackage
-         python -m pip download -d . SomePackage  # equivalent to above
-         python -m pip download --no-index --find-links=/tmp/wheelhouse -d /tmp/otherwheelhouse SomePackage
-
-   .. tab:: Windows
-
-      .. code-block:: shell
-
-         py -m pip download SomePackage
-         py -m pip download -d . SomePackage  # equivalent to above
-         py -m pip download --no-index --find-links=/tmp/wheelhouse -d /tmp/otherwheelhouse SomePackage
-
-
-#. Download a package and all of its dependencies with OSX specific interpreter constraints.
-   This forces OSX 10.10 or lower compatibility. Since OSX deps are forward compatible,
-   this will also match ``macosx-10_9_x86_64``, ``macosx-10_8_x86_64``, ``macosx-10_8_intel``,
-   etc.
-   It will also match deps with platform ``any``. Also force the interpreter version to ``27``
-   (or more generic, i.e. ``2``) and implementation to ``cp`` (or more generic, i.e. ``py``).
-
-   .. tab:: Unix/macOS
-
-      .. code-block:: shell
-
-         python -m pip download \
-            --only-binary=:all: \
-            --platform macosx-10_10_x86_64 \
-            --python-version 27 \
-            --implementation cp \
-            SomePackage
-
-   .. tab:: Windows
-
-      .. code-block:: shell
-
-         py -m pip download ^
-            --only-binary=:all: ^
-            --platform macosx-10_10_x86_64 ^
-            --python-version 27 ^
-            --implementation cp ^
-            SomePackage
-
-#. Download a package and its dependencies with linux specific constraints.
-   Force the interpreter to be any minor version of py3k, and only accept
-   ``cp34m`` or ``none`` as the abi.
-
-   .. tab:: Unix/macOS
-
-      .. code-block:: shell
-
-         python -m pip download \
-            --only-binary=:all: \
-            --platform linux_x86_64 \
-            --python-version 3 \
-            --implementation cp \
-            --abi cp34m \
-            SomePackage
-
-   .. tab:: Windows
-
-      .. code-block:: shell
-
-         py -m pip download ^
-            --only-binary=:all: ^
-            --platform linux_x86_64 ^
-            --python-version 3 ^
-            --implementation cp ^
-            --abi cp34m ^
-            SomePackage
-
-#. Force platform, implementation, and abi agnostic deps.
-
-   .. tab:: Unix/macOS
-
-      .. code-block:: shell
-
-         python -m pip download \
-            --only-binary=:all: \
-            --platform any \
-            --python-version 3 \
-            --implementation py \
-            --abi none \
-            SomePackage
-
-   .. tab:: Windows
-
-      .. code-block:: shell
-
-         py -m pip download ^
-            --only-binary=:all: ^
-            --platform any ^
-            --python-version 3 ^
-            --implementation py ^
-            --abi none ^
-            SomePackage
-
-#. Even when overconstrained, this will still correctly fetch the pip universal wheel.
-
-   .. tab:: Unix/macOS
-
-      .. code-block:: console
-
-         $ python -m pip download \
-            --only-binary=:all: \
-            --platform linux_x86_64 \
-            --python-version 33 \
-            --implementation cp \
-            --abi cp34m \
-            pip>=8
-
-      .. code-block:: console
-
-         $ ls pip-8.1.1-py2.py3-none-any.whl
-         pip-8.1.1-py2.py3-none-any.whl
-
-   .. tab:: Windows
-
-      .. code-block:: console
-
-         C:\> py -m pip download ^
-            --only-binary=:all: ^
-            --platform linux_x86_64 ^
-            --python-version 33 ^
-            --implementation cp ^
-            --abi cp34m ^
-            pip>=8
-
-      .. code-block:: console
-
-         C:\> dir pip-8.1.1-py2.py3-none-any.whl
-         pip-8.1.1-py2.py3-none-any.whl
-
-#. Download a package supporting one of several ABIs and platforms.
-    This is useful when fetching wheels for a well-defined interpreter, whose
-    supported ABIs and platforms are known and fixed, different than the one pip is
-    running under.
-
-   .. tab:: Unix/macOS
-
-      .. code-block:: console
-
-         $ python -m pip download \
-            --only-binary=:all: \
-            --platform manylinux1_x86_64 --platform linux_x86_64 --platform any \
-            --python-version 36 \
-            --implementation cp \
-            --abi cp36m --abi cp36 --abi abi3 --abi none \
-            SomePackage
-
-   .. tab:: Windows
-
-      .. code-block:: console
-
-         C:> py -m pip download ^
-            --only-binary=:all: ^
-            --platform manylinux1_x86_64 --platform linux_x86_64 --platform any ^
-            --python-version 36 ^
-            --implementation cp ^
-            --abi cp36m --abi cp36 --abi abi3 --abi none ^
-            SomePackage
+You should be redirected automatically in 3 seconds. If that didn't
+work, here's a link: :doc:`../cli/pip_download`
diff --git a/docs/html/reference/pip_freeze.rst b/docs/html/reference/pip_freeze.rst
index 352f7d321..1cf31d5d7 100644
--- a/docs/html/reference/pip_freeze.rst
+++ b/docs/html/reference/pip_freeze.rst
@@ -1,74 +1,11 @@
+:orphan:
 
-.. _`pip freeze`:
+.. meta::
 
-==========
-pip freeze
-==========
+  :http-equiv=refresh: 3; url=../../cli/pip_freeze/
 
+This page has moved
+===================
 
-Usage
-=====
-
-.. tab:: Unix/macOS
-
-   .. pip-command-usage:: freeze "python -m pip"
-
-.. tab:: Windows
-
-   .. pip-command-usage:: freeze "py -m pip"
-
-
-Description
-===========
-
-.. pip-command-description:: freeze
-
-
-Options
-=======
-
-.. pip-command-options:: freeze
-
-
-Examples
-========
-
-#. Generate output suitable for a requirements file.
-
-   .. tab:: Unix/macOS
-
-      .. code-block:: console
-
-         $ python -m pip freeze
-         docutils==0.11
-         Jinja2==2.7.2
-         MarkupSafe==0.19
-         Pygments==1.6
-         Sphinx==1.2.2
-
-   .. tab:: Windows
-
-      .. code-block:: console
-
-         C:\> py -m pip freeze
-         docutils==0.11
-         Jinja2==2.7.2
-         MarkupSafe==0.19
-         Pygments==1.6
-         Sphinx==1.2.2
-
-#. Generate a requirements file and then install from it in another environment.
-
-   .. tab:: Unix/macOS
-
-      .. code-block:: shell
-
-         env1/bin/python -m pip freeze > requirements.txt
-         env2/bin/python -m pip install -r requirements.txt
-
-   .. tab:: Windows
-
-      .. code-block:: shell
-
-         env1\bin\python -m pip freeze > requirements.txt
-         env2\bin\python -m pip install -r requirements.txt
+You should be redirected automatically in 3 seconds. If that didn't
+work, here's a link: :doc:`../cli/pip_freeze`
diff --git a/docs/html/reference/pip_hash.rst b/docs/html/reference/pip_hash.rst
index 7df0d5a4f..6112bec5f 100644
--- a/docs/html/reference/pip_hash.rst
+++ b/docs/html/reference/pip_hash.rst
@@ -1,72 +1,11 @@
-.. _`pip hash`:
+:orphan:
 
-========
-pip hash
-========
+.. meta::
 
+  :http-equiv=refresh: 3; url=../../cli/pip_hash/
 
-Usage
-=====
+This page has moved
+===================
 
-.. tab:: Unix/macOS
-
-   .. pip-command-usage:: hash "python -m pip"
-
-.. tab:: Windows
-
-   .. pip-command-usage:: hash "py -m pip"
-
-
-Description
-===========
-
-.. pip-command-description:: hash
-
-Overview
---------
-
-``pip hash`` is a convenient way to get a hash digest for use with
-:ref:`hash-checking mode`, especially for packages with multiple archives. The
-error message from ``pip install --require-hashes ...`` will give you one
-hash, but, if there are multiple archives (like source and binary ones), you
-will need to manually download and compute a hash for the others. Otherwise, a
-spurious hash mismatch could occur when :ref:`pip install` is passed a
-different set of options, like :ref:`--no-binary `.
-
-
-Options
-=======
-
-.. pip-command-options:: hash
-
-
-Example
-=======
-
-Compute the hash of a downloaded archive:
-
-.. tab:: Unix/macOS
-
-   .. code-block:: console
-
-      $ python -m pip download SomePackage
-      Collecting SomePackage
-         Downloading SomePackage-2.2.tar.gz
-         Saved ./pip_downloads/SomePackage-2.2.tar.gz
-      Successfully downloaded SomePackage
-      $ python -m pip hash ./pip_downloads/SomePackage-2.2.tar.gz
-      ./pip_downloads/SomePackage-2.2.tar.gz:
-      --hash=sha256:93e62e05c7ad3da1a233def6731e8285156701e3419a5fe279017c429ec67ce0
-
-.. tab:: Windows
-
-   .. code-block:: console
-
-      C:\> py -m pip download SomePackage
-      Collecting SomePackage
-         Downloading SomePackage-2.2.tar.gz
-         Saved ./pip_downloads/SomePackage-2.2.tar.gz
-      Successfully downloaded SomePackage
-      C:\> py -m pip hash ./pip_downloads/SomePackage-2.2.tar.gz
-      ./pip_downloads/SomePackage-2.2.tar.gz:
-      --hash=sha256:93e62e05c7ad3da1a233def6731e8285156701e3419a5fe279017c429ec67ce0
+You should be redirected automatically in 3 seconds. If that didn't
+work, here's a link: :doc:`../cli/pip_hash`
diff --git a/docs/html/reference/pip_install.rst b/docs/html/reference/pip_install.rst
index 81e315eba..580900cfb 100644
--- a/docs/html/reference/pip_install.rst
+++ b/docs/html/reference/pip_install.rst
@@ -1,1199 +1,11 @@
-.. _`pip install`:
+:orphan:
 
-===========
-pip install
-===========
+.. meta::
 
+  :http-equiv=refresh: 3; url=../../cli/pip_install/
 
+This page has moved
+===================
 
-Usage
-=====
-
-.. tab:: Unix/macOS
-
-   .. pip-command-usage:: install "python -m pip"
-
-.. tab:: Windows
-
-   .. pip-command-usage:: install "py -m pip"
-
-
-
-Description
-===========
-
-.. pip-command-description:: install
-
-Overview
---------
-
-pip install has several stages:
-
-1. Identify the base requirements. The user supplied arguments are processed
-   here.
-2. Resolve dependencies. What will be installed is determined here.
-3. Build wheels. All the dependencies that can be are built into wheels.
-4. Install the packages (and uninstall anything being upgraded/replaced).
-
-Note that ``pip install`` prefers to leave the installed version as-is
-unless ``--upgrade`` is specified.
-
-Argument Handling
------------------
-
-When looking at the items to be installed, pip checks what type of item
-each is, in the following order:
-
-1. Project or archive URL.
-2. Local directory (which must contain a ``setup.py``, or pip will report
-   an error).
-3. Local file (a sdist or wheel format archive, following the naming
-   conventions for those formats).
-4. A requirement, as specified in :pep:`440`.
-
-Each item identified is added to the set of requirements to be satisfied by
-the install.
-
-Working Out the Name and Version
---------------------------------
-
-For each candidate item, pip needs to know the project name and version. For
-wheels (identified by the ``.whl`` file extension) this can be obtained from
-the filename, as per the Wheel spec. For local directories, or explicitly
-specified sdist files, the ``setup.py egg_info`` command is used to determine
-the project metadata. For sdists located via an index, the filename is parsed
-for the name and project version (this is in theory slightly less reliable
-than using the ``egg_info`` command, but avoids downloading and processing
-unnecessary numbers of files).
-
-Any URL may use the ``#egg=name`` syntax (see :ref:`VCS Support`) to
-explicitly state the project name.
-
-Satisfying Requirements
------------------------
-
-Once pip has the set of requirements to satisfy, it chooses which version of
-each requirement to install using the simple rule that the latest version that
-satisfies the given constraints will be installed (but see :ref:`here 
`
-for an exception regarding pre-release versions). Where more than one source of
-the chosen version is available, it is assumed that any source is acceptable
-(as otherwise the versions would differ).
-
-Installation Order
-------------------
-
-.. note::
-
-   This section is only about installation order of runtime dependencies, and
-   does not apply to build dependencies (those are specified using PEP 518).
-
-As of v6.1.0, pip installs dependencies before their dependents, i.e. in
-"topological order."  This is the only commitment pip currently makes related
-to order.  While it may be coincidentally true that pip will install things in
-the order of the install arguments or in the order of the items in a
-requirements file, this is not a promise.
-
-In the event of a dependency cycle (aka "circular dependency"), the current
-implementation (which might possibly change later) has it such that the first
-encountered member of the cycle is installed last.
-
-For instance, if quux depends on foo which depends on bar which depends on baz,
-which depends on foo:
-
-.. tab:: Unix/macOS
-
-   .. code-block:: console
-
-      $ python -m pip install quux
-      ...
-      Installing collected packages baz, bar, foo, quux
-
-      $ python -m pip install bar
-      ...
-      Installing collected packages foo, baz, bar
-
-.. tab:: Windows
-
-   .. code-block:: console
-
-      C:\> py -m pip install quux
-      ...
-      Installing collected packages baz, bar, foo, quux
-
-      C:\> py -m pip install bar
-      ...
-      Installing collected packages foo, baz, bar
-
-
-Prior to v6.1.0, pip made no commitments about install order.
-
-The decision to install topologically is based on the principle that
-installations should proceed in a way that leaves the environment usable at each
-step. This has two main practical benefits:
-
-1. Concurrent use of the environment during the install is more likely to work.
-2. A failed install is less likely to leave a broken environment.  Although pip
-   would like to support failure rollbacks eventually, in the mean time, this is
-   an improvement.
-
-Although the new install order is not intended to replace (and does not replace)
-the use of ``setup_requires`` to declare build dependencies, it may help certain
-projects install from sdist (that might previously fail) that fit the following
-profile:
-
-1. They have build dependencies that are also declared as install dependencies
-   using ``install_requires``.
-2. ``python setup.py egg_info`` works without their build dependencies being
-   installed.
-3. For whatever reason, they don't or won't declare their build dependencies using
-   ``setup_requires``.
-
-
-.. _`Requirements File Format`:
-
-Requirements File Format
-------------------------
-
-Each line of the requirements file indicates something to be installed,
-and like arguments to :ref:`pip install`, the following forms are supported::
-
-    [[--option]...]
-     [; markers] [[--option]...]
-    
-    [-e] 
-    [-e] 
-
-For details on requirement specifiers, see :ref:`Requirement Specifiers`.
-
-See the :ref:`pip install Examples` for examples of all these forms.
-
-A line that begins with ``#`` is treated as a comment and ignored. Whitespace
-followed by a ``#`` causes the ``#`` and the remainder of the line to be
-treated as a comment.
-
-A line ending in an unescaped ``\`` is treated as a line continuation
-and the newline following it is effectively ignored.
-
-Comments are stripped *after* line continuations are processed.
-
-To interpret the requirements file in UTF-8 format add a comment
-``# -*- coding: utf-8 -*-`` to the first or second line of the file.
-
-The following options are supported:
-
-.. pip-requirements-file-options-ref-list::
-
-Please note that the above options are global options, and should be specified on their individual lines.
-The options which can be applied to individual requirements are
-:ref:`--install-option `, :ref:`--global-option ` and ``--hash``.
-
-For example, to specify :ref:`--pre `, :ref:`--no-index ` and two
-:ref:`--find-links ` locations:
-
-::
-
---pre
---no-index
---find-links /my/local/archives
---find-links http://some.archives.com/archives
-
-
-If you wish, you can refer to other requirements files, like this::
-
-    -r more_requirements.txt
-
-You can also refer to :ref:`constraints files `, like this::
-
-    -c some_constraints.txt
-
-.. _`Using Environment Variables`:
-
-Using Environment Variables
-^^^^^^^^^^^^^^^^^^^^^^^^^^^
-
-Since version 10, pip supports the use of environment variables inside the
-requirements file. You can now store sensitive data (tokens, keys, etc.) in
-environment variables and only specify the variable name for your requirements,
-letting pip lookup the value at runtime. This approach aligns with the commonly
-used `12-factor configuration pattern `_.
-
-You have to use the POSIX format for variable names including brackets around
-the uppercase name as shown in this example: ``${API_TOKEN}``. pip will attempt
-to find the corresponding environment variable defined on the host system at
-runtime.
-
-.. note::
-
-   There is no support for other variable expansion syntaxes such as
-   ``$VARIABLE`` and ``%VARIABLE%``.
-
-
-.. _`Example Requirements File`:
-
-Example Requirements File
-^^^^^^^^^^^^^^^^^^^^^^^^^
-
-Use ``pip install -r example-requirements.txt`` to install::
-
-    #
-    ####### example-requirements.txt #######
-    #
-    ###### Requirements without Version Specifiers ######
-    nose
-    nose-cov
-    beautifulsoup4
-    #
-    ###### Requirements with Version Specifiers ######
-    #   See https://www.python.org/dev/peps/pep-0440/#version-specifiers
-    docopt == 0.6.1             # Version Matching. Must be version 0.6.1
-    keyring >= 4.1.1            # Minimum version 4.1.1
-    coverage != 3.5             # Version Exclusion. Anything except version 3.5
-    Mopidy-Dirble ~= 1.1        # Compatible release. Same as >= 1.1, == 1.*
-    #
-    ###### Refer to other requirements files ######
-    -r other-requirements.txt
-    #
-    #
-    ###### A particular file ######
-    ./downloads/numpy-1.9.2-cp34-none-win32.whl
-    http://wxpython.org/Phoenix/snapshot-builds/wxPython_Phoenix-3.0.3.dev1820+49a8884-cp34-none-win_amd64.whl
-    #
-    ###### Additional Requirements without Version Specifiers ######
-    #   Same as 1st section, just here to show that you can put things in any order.
-    rejected
-    green
-    #
-
-.. _`Requirement Specifiers`:
-
-Requirement Specifiers
-----------------------
-
-pip supports installing from a package index using a :term:`requirement
-specifier `. Generally speaking, a requirement
-specifier is composed of a project name followed by optional :term:`version
-specifiers `.  :pep:`508` contains a full specification
-of the format of a requirement. Since version 18.1 pip supports the
-``url_req``-form specification.
-
-Some examples:
-
- ::
-
-  SomeProject
-  SomeProject == 1.3
-  SomeProject >=1.2,<2.0
-  SomeProject[foo, bar]
-  SomeProject~=1.4.2
-
-Since version 6.0, pip also supports specifiers containing `environment markers
-`__ like so:
-
- ::
-
-  SomeProject ==5.4 ; python_version < '3.8'
-  SomeProject; sys_platform == 'win32'
-
-Since version 19.1, pip also supports `direct references
-`__ like so:
-
- ::
-
-  SomeProject @ file:///somewhere/...
-
-Environment markers are supported in the command line and in requirements files.
-
-.. note::
-
-   Use quotes around specifiers in the shell when using ``>``, ``<``, or when
-   using environment markers. Don't use quotes in requirement files. [1]_
-
-
-.. _`Per-requirement Overrides`:
-
-Per-requirement Overrides
--------------------------
-
-Since version 7.0 pip supports controlling the command line options given to
-``setup.py`` via requirements files. This disables the use of wheels (cached or
-otherwise) for that package, as ``setup.py`` does not exist for wheels.
-
-The ``--global-option`` and ``--install-option`` options are used to pass
-options to ``setup.py``. For example:
-
- ::
-
-    FooProject >= 1.2 --global-option="--no-user-cfg" \
-                      --install-option="--prefix='/usr/local'" \
-                      --install-option="--no-compile"
-
-The above translates roughly into running FooProject's ``setup.py``
-script as:
-
- ::
-
-   python setup.py --no-user-cfg install --prefix='/usr/local' --no-compile
-
-Note that the only way of giving more than one option to ``setup.py``
-is through multiple ``--global-option`` and ``--install-option``
-options, as shown in the example above. The value of each option is
-passed as a single argument to the ``setup.py`` script. Therefore, a
-line such as the following is invalid and would result in an
-installation error.
-
-::
-
-   # Invalid. Please use '--install-option' twice as shown above.
-   FooProject >= 1.2 --install-option="--prefix=/usr/local --no-compile"
-
-
-.. _`Pre Release Versions`:
-
-Pre-release Versions
---------------------
-
-Starting with v1.4, pip will only install stable versions as specified by
-`pre-releases`_ by default. If a version cannot be parsed as a compliant :pep:`440`
-version then it is assumed to be a pre-release.
-
-If a Requirement specifier includes a pre-release or development version
-(e.g. ``>=0.0.dev0``) then pip will allow pre-release and development versions
-for that requirement. This does not include the != flag.
-
-The ``pip install`` command also supports a :ref:`--pre ` flag
-that enables installation of pre-releases and development releases.
-
-
-.. _pre-releases: https://www.python.org/dev/peps/pep-0440/#handling-of-pre-releases
-
-
-.. _`VCS Support`:
-
-VCS Support
------------
-
-pip supports installing from Git, Mercurial, Subversion and Bazaar, and detects
-the type of VCS using URL prefixes: ``git+``, ``hg+``, ``svn+``, and ``bzr+``.
-
-pip requires a working VCS command on your path: ``git``, ``hg``, ``svn``, or
-``bzr``.
-
-VCS projects can be installed in :ref:`editable mode ` (using
-the :ref:`--editable ` option) or not.
-
-* For editable installs, the clone location by default is ``/src/SomeProject`` in virtual environments, and
-  ``/src/SomeProject``
-  for global installs.  The :ref:`--src ` option can be used to
-  modify this location.
-* For non-editable installs, the project is built locally in a temp dir and then
-  installed normally. Note that if a satisfactory version of the package is
-  already installed, the VCS source will not overwrite it without an
-  ``--upgrade`` flag. VCS requirements pin the package version (specified
-  in the ``setup.py`` file) of the target commit, not necessarily the commit
-  itself.
-* The :ref:`pip freeze` subcommand will record the VCS requirement specifier
-  (referencing a specific commit) if and only if the install is done using the
-  editable option.
-
-The "project name" component of the URL suffix ``egg=``
-is used by pip in its dependency logic to identify the project prior
-to pip downloading and analyzing the metadata. For projects
-where ``setup.py`` is not in the root of project, the "subdirectory" component
-is used. The value of the "subdirectory" component should be a path starting
-from the root of the project to where ``setup.py`` is located.
-
-If your repository layout is::
-
-   pkg_dir
-   ├── setup.py  # setup.py for package "pkg"
-   └── some_module.py
-   other_dir
-   └── some_file
-   some_other_file
-
-Then, to install from this repository, the syntax would be:
-
-.. tab:: Unix/macOS
-
-   .. code-block:: shell
-
-      python -m pip install -e "vcs+protocol://repo_url/#egg=pkg&subdirectory=pkg_dir"
-
-.. tab:: Windows
-
-   .. code-block:: shell
-
-      py -m pip install -e "vcs+protocol://repo_url/#egg=pkg&subdirectory=pkg_dir"
-
-
-Git
-^^^
-
-pip currently supports cloning over ``git``, ``git+http``, ``git+https``,
-``git+ssh``, ``git+git`` and ``git+file``.
-
-.. warning::
-
-    Note that the use of ``git``, ``git+git``, and ``git+http`` is discouraged.
-    The former two use `the Git Protocol`_, which lacks authentication, and HTTP is
-    insecure due to lack of TLS based encryption.
-
-Here are the supported forms::
-
-    [-e] git+http://git.example.com/MyProject#egg=MyProject
-    [-e] git+https://git.example.com/MyProject#egg=MyProject
-    [-e] git+ssh://git.example.com/MyProject#egg=MyProject
-    [-e] git+file:///home/user/projects/MyProject#egg=MyProject
-
-Passing a branch name, a commit hash, a tag name or a git ref is possible like so::
-
-    [-e] git+https://git.example.com/MyProject.git@master#egg=MyProject
-    [-e] git+https://git.example.com/MyProject.git@v1.0#egg=MyProject
-    [-e] git+https://git.example.com/MyProject.git@da39a3ee5e6b4b0d3255bfef95601890afd80709#egg=MyProject
-    [-e] git+https://git.example.com/MyProject.git@refs/pull/123/head#egg=MyProject
-
-When passing a commit hash, specifying a full hash is preferable to a partial
-hash because a full hash allows pip to operate more efficiently (e.g. by
-making fewer network calls).
-
-.. _`the Git Protocol`: https://git-scm.com/book/en/v2/Git-on-the-Server-The-Protocols
-
-Mercurial
-^^^^^^^^^
-
-The supported schemes are: ``hg+file``, ``hg+http``, ``hg+https``,
-``hg+static-http``, and ``hg+ssh``.
-
-Here are the supported forms::
-
-    [-e] hg+http://hg.myproject.org/MyProject#egg=MyProject
-    [-e] hg+https://hg.myproject.org/MyProject#egg=MyProject
-    [-e] hg+ssh://hg.myproject.org/MyProject#egg=MyProject
-    [-e] hg+file:///home/user/projects/MyProject#egg=MyProject
-
-You can also specify a revision number, a revision hash, a tag name or a local
-branch name like so::
-
-    [-e] hg+http://hg.example.com/MyProject@da39a3ee5e6b#egg=MyProject
-    [-e] hg+http://hg.example.com/MyProject@2019#egg=MyProject
-    [-e] hg+http://hg.example.com/MyProject@v1.0#egg=MyProject
-    [-e] hg+http://hg.example.com/MyProject@special_feature#egg=MyProject
-
-Subversion
-^^^^^^^^^^
-
-pip supports the URL schemes ``svn``, ``svn+svn``, ``svn+http``, ``svn+https``, ``svn+ssh``.
-
-Here are some of the supported forms::
-
-    [-e] svn+https://svn.example.com/MyProject#egg=MyProject
-    [-e] svn+ssh://svn.example.com/MyProject#egg=MyProject
-    [-e] svn+ssh://user@svn.example.com/MyProject#egg=MyProject
-
-You can also give specific revisions to an SVN URL, like so::
-
-    [-e] svn+svn://svn.example.com/svn/MyProject#egg=MyProject
-    [-e] svn+http://svn.example.com/svn/MyProject/trunk@2019#egg=MyProject
-
-which will check out revision 2019.  ``@{20080101}`` would also check
-out the revision from 2008-01-01. You can only check out specific
-revisions using ``-e svn+...``.
-
-Bazaar
-^^^^^^
-
-pip supports Bazaar using the ``bzr+http``, ``bzr+https``, ``bzr+ssh``,
-``bzr+sftp``, ``bzr+ftp`` and ``bzr+lp`` schemes.
-
-Here are the supported forms::
-
-    [-e] bzr+http://bzr.example.com/MyProject/trunk#egg=MyProject
-    [-e] bzr+sftp://user@example.com/MyProject/trunk#egg=MyProject
-    [-e] bzr+ssh://user@example.com/MyProject/trunk#egg=MyProject
-    [-e] bzr+ftp://user@example.com/MyProject/trunk#egg=MyProject
-    [-e] bzr+lp:MyProject#egg=MyProject
-
-Tags or revisions can be installed like so::
-
-    [-e] bzr+https://bzr.example.com/MyProject/trunk@2019#egg=MyProject
-    [-e] bzr+http://bzr.example.com/MyProject/trunk@v1.0#egg=MyProject
-
-Using Environment Variables
-^^^^^^^^^^^^^^^^^^^^^^^^^^^
-
-Since version 10, pip also makes it possible to use environment variables which
-makes it possible to reference private repositories without having to store
-access tokens in the requirements file. For example, a private git repository
-allowing Basic Auth for authentication can be refenced like this::
-
-    [-e] git+http://${AUTH_USER}:${AUTH_PASSWORD}@git.example.com/MyProject#egg=MyProject
-    [-e] git+https://${AUTH_USER}:${AUTH_PASSWORD}@git.example.com/MyProject#egg=MyProject
-
-.. note::
-
-   Only ``${VARIABLE}`` is supported, other formats like ``$VARIABLE`` or
-   ``%VARIABLE%`` won't work.
-
-Finding Packages
-----------------
-
-pip searches for packages on `PyPI`_ using the
-`HTTP simple interface `_,
-which is documented `here `_
-and `there `_.
-
-pip offers a number of package index options for modifying how packages are
-found.
-
-pip looks for packages in a number of places: on PyPI (if not disabled via
-``--no-index``), in the local filesystem, and in any additional repositories
-specified via ``--find-links`` or ``--index-url``. There is no ordering in
-the locations that are searched. Rather they are all checked, and the "best"
-match for the requirements (in terms of version number - see :pep:`440` for
-details) is selected.
-
-See the :ref:`pip install Examples`.
-
-
-.. _`SSL Certificate Verification`:
-
-SSL Certificate Verification
-----------------------------
-
-Starting with v1.3, pip provides SSL certificate verification over https, to
-prevent man-in-the-middle attacks against PyPI downloads.
-
-
-.. _`Caching`:
-
-Caching
--------
-
-Starting with v6.0, pip provides an on-by-default cache which functions
-similarly to that of a web browser. While the cache is on by default and is
-designed do the right thing by default you can disable the cache and always
-access PyPI by utilizing the ``--no-cache-dir`` option.
-
-When making any HTTP request pip will first check its local cache to determine
-if it has a suitable response stored for that request which has not expired. If
-it does then it simply returns that response and doesn't make the request.
-
-If it has a response stored, but it has expired, then it will attempt to make a
-conditional request to refresh the cache which will either return an empty
-response telling pip to simply use the cached item (and refresh the expiration
-timer) or it will return a whole new response which pip can then store in the
-cache.
-
-While this cache attempts to minimize network activity, it does not prevent
-network access altogether. If you want a local install solution that
-circumvents accessing PyPI, see :ref:`Installing from local packages`.
-
-The default location for the cache directory depends on the operating system:
-
-Unix
-  :file:`~/.cache/pip` and it respects the ``XDG_CACHE_HOME`` directory.
-macOS
-  :file:`~/Library/Caches/pip`.
-Windows
-  :file:`\\pip\\Cache`
-
-Run ``pip cache dir`` to show the cache directory and see :ref:`pip cache` to
-inspect and manage pip’s cache.
-
-
-.. _`Wheel cache`:
-
-Wheel Cache
-^^^^^^^^^^^
-
-pip will read from the subdirectory ``wheels`` within the pip cache directory
-and use any packages found there. This is disabled via the same
-``--no-cache-dir`` option that disables the HTTP cache. The internal structure
-of that is not part of the pip API. As of 7.0, pip makes a subdirectory for
-each sdist that wheels are built from and places the resulting wheels inside.
-
-As of version 20.0, pip also caches wheels when building from an immutable Git
-reference (i.e. a commit hash).
-
-pip attempts to choose the best wheels from those built in preference to
-building a new wheel. Note that this means when a package has both optional
-C extensions and builds ``py`` tagged wheels when the C extension can't be built
-that pip will not attempt to build a better wheel for Pythons that would have
-supported it, once any generic wheel is built. To correct this, make sure that
-the wheels are built with Python specific tags - e.g. pp on PyPy.
-
-When no wheels are found for an sdist, pip will attempt to build a wheel
-automatically and insert it into the wheel cache.
-
-
-.. _`hash-checking mode`:
-
-Hash-Checking Mode
-------------------
-
-Since version 8.0, pip can check downloaded package archives against local
-hashes to protect against remote tampering. To verify a package against one or
-more hashes, add them to the end of the line::
-
-    FooProject == 1.2 --hash=sha256:2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824 \
-                      --hash=sha256:486ea46224d1bb4fb680f34f7c9ad96a8f24ec88be73ea8e5a6c65260e9cb8a7
-
-(The ability to use multiple hashes is important when a package has both
-binary and source distributions or when it offers binary distributions for a
-variety of platforms.)
-
-The recommended hash algorithm at the moment is sha256, but stronger ones are
-allowed, including all those supported by ``hashlib``. However, weaker ones
-such as md5, sha1, and sha224 are excluded to avoid giving a false sense of
-security.
-
-Hash verification is an all-or-nothing proposition. Specifying a ``--hash``
-against any requirement not only checks that hash but also activates a global
-*hash-checking mode*, which imposes several other security restrictions:
-
-* Hashes are required for all requirements. This is because a partially-hashed
-  requirements file is of little use and thus likely an error: a malicious
-  actor could slip bad code into the installation via one of the unhashed
-  requirements. Note that hashes embedded in URL-style requirements via the
-  ``#md5=...`` syntax suffice to satisfy this rule (regardless of hash
-  strength, for legacy reasons), though you should use a stronger
-  hash like sha256 whenever possible.
-* Hashes are required for all dependencies. An error results if there is a
-  dependency that is not spelled out and hashed in the requirements file.
-* Requirements that take the form of project names (rather than URLs or local
-  filesystem paths) must be pinned to a specific version using ``==``. This
-  prevents a surprising hash mismatch upon the release of a new version
-  that matches the requirement specifier.
-* ``--egg`` is disallowed, because it delegates installation of dependencies
-  to setuptools, giving up pip's ability to enforce any of the above.
-
-.. _`--require-hashes`:
-
-Hash-checking mode can be forced on with the ``--require-hashes`` command-line
-option:
-
-.. tab:: Unix/macOS
-
-   .. code-block:: console
-
-      $ python -m pip install --require-hashes -r requirements.txt
-      ...
-      Hashes are required in --require-hashes mode (implicitly on when a hash is
-      specified for any package). These requirements were missing hashes,
-      leaving them open to tampering. These are the hashes the downloaded
-      archives actually had. You can add lines like these to your requirements
-      files to prevent tampering.
-         pyelasticsearch==1.0 --hash=sha256:44ddfb1225054d7d6b1d02e9338e7d4809be94edbe9929a2ec0807d38df993fa
-         more-itertools==2.2 --hash=sha256:93e62e05c7ad3da1a233def6731e8285156701e3419a5fe279017c429ec67ce0
-
-.. tab:: Windows
-
-   .. code-block:: console
-
-      C:\> py -m pip install --require-hashes -r requirements.txt
-      ...
-      Hashes are required in --require-hashes mode (implicitly on when a hash is
-      specified for any package). These requirements were missing hashes,
-      leaving them open to tampering. These are the hashes the downloaded
-      archives actually had. You can add lines like these to your requirements
-      files to prevent tampering.
-         pyelasticsearch==1.0 --hash=sha256:44ddfb1225054d7d6b1d02e9338e7d4809be94edbe9929a2ec0807d38df993fa
-         more-itertools==2.2 --hash=sha256:93e62e05c7ad3da1a233def6731e8285156701e3419a5fe279017c429ec67ce0
-
-
-This can be useful in deploy scripts, to ensure that the author of the
-requirements file provided hashes. It is also a convenient way to bootstrap
-your list of hashes, since it shows the hashes of the packages it fetched. It
-fetches only the preferred archive for each package, so you may still need to
-add hashes for alternatives archives using :ref:`pip hash`: for instance if
-there is both a binary and a source distribution.
-
-The :ref:`wheel cache ` is disabled in hash-checking mode to
-prevent spurious hash mismatch errors. These would otherwise occur while
-installing sdists that had already been automatically built into cached wheels:
-those wheels would be selected for installation, but their hashes would not
-match the sdist ones from the requirements file. A further complication is that
-locally built wheels are nondeterministic: contemporary modification times make
-their way into the archive, making hashes unpredictable across machines and
-cache flushes. Compilation of C code adds further nondeterminism, as many
-compilers include random-seeded values in their output. However, wheels fetched
-from index servers are the same every time. They land in pip's HTTP cache, not
-its wheel cache, and are used normally in hash-checking mode. The only downside
-of having the wheel cache disabled is thus extra build time for sdists, and
-this can be solved by making sure pre-built wheels are available from the index
-server.
-
-Hash-checking mode also works with :ref:`pip download` and :ref:`pip wheel`. A
-:ref:`comparison of hash-checking mode with other repeatability strategies
-` is available in the User Guide.
-
-.. warning::
-
-   Beware of the ``setup_requires`` keyword arg in :file:`setup.py`. The
-   (rare) packages that use it will cause those dependencies to be downloaded
-   by setuptools directly, skipping pip's hash-checking. If you need to use
-   such a package, see :ref:`Controlling
-   setup_requires`.
-
-.. warning::
-
-   Be careful not to nullify all your security work when you install your
-   actual project by using setuptools directly: for example, by calling
-   ``python setup.py install``, ``python setup.py develop``, or
-   ``easy_install``. Setuptools will happily go out and download, unchecked,
-   anything you missed in your requirements file—and it’s easy to miss things
-   as your project evolves. To be safe, install your project using pip and
-   :ref:`--no-deps `.
-
-   Instead of ``python setup.py develop``, use...
-
-   .. tab:: Unix/macOS
-
-      .. code-block:: shell
-
-         python -m pip install --no-deps -e .
-
-   .. tab:: Windows
-
-      .. code-block:: shell
-
-         py -m pip install --no-deps -e .
-
-
-   Instead of ``python setup.py install``, use...
-
-   .. tab:: Unix/macOS
-
-      .. code-block:: shell
-
-         python -m pip install --no-deps .
-
-   .. tab:: Windows
-
-      .. code-block:: shell
-
-         py -m pip install --no-deps .
-
-Hashes from PyPI
-^^^^^^^^^^^^^^^^
-
-PyPI provides an MD5 hash in the fragment portion of each package download URL,
-like ``#md5=123...``, which pip checks as a protection against download
-corruption. Other hash algorithms that have guaranteed support from ``hashlib``
-are also supported here: sha1, sha224, sha384, sha256, and sha512. Since this
-hash originates remotely, it is not a useful guard against tampering and thus
-does not satisfy the ``--require-hashes`` demand that every package have a
-local hash.
-
-
-Local project installs
-----------------------
-
-pip supports installing local project in both regular mode and editable mode.
-You can install local projects by specifying the project path to pip:
-
-.. tab:: Unix/macOS
-
-   .. code-block:: shell
-
-      python -m pip install path/to/SomeProject
-
-.. tab:: Windows
-
-   .. code-block:: shell
-
-      py -m pip install path/to/SomeProject
-
-During regular installation, pip will copy the entire project directory to a
-temporary location and install from there. The exception is that pip will
-exclude .tox and .nox directories present in the top level of the project from
-being copied.
-
-
-.. _`editable-installs`:
-
-"Editable" Installs
-^^^^^^^^^^^^^^^^^^^
-
-"Editable" installs are fundamentally `"setuptools develop mode"
-`_
-installs.
-
-You can install local projects or VCS projects in "editable" mode:
-
-.. tab:: Unix/macOS
-
-   .. code-block:: shell
-
-      python -m pip install -e path/to/SomeProject
-      python -m pip install -e git+http://repo/my_project.git#egg=SomeProject
-
-.. tab:: Windows
-
-   .. code-block:: shell
-
-      py -m pip install -e path/to/SomeProject
-      py -m pip install -e git+http://repo/my_project.git#egg=SomeProject
-
-
-(See the :ref:`VCS Support` section above for more information on VCS-related syntax.)
-
-For local projects, the "SomeProject.egg-info" directory is created relative to
-the project path.  This is one advantage over just using ``setup.py develop``,
-which creates the "egg-info" directly relative the current working directory.
-
-
-.. _`controlling-setup-requires`:
-
-Controlling setup_requires
---------------------------
-
-Setuptools offers the ``setup_requires`` `setup() keyword
-`_
-for specifying dependencies that need to be present in order for the
-``setup.py`` script to run.  Internally, Setuptools uses ``easy_install``
-to fulfill these dependencies.
-
-pip has no way to control how these dependencies are located.  None of the
-package index options have an effect.
-
-The solution is to configure a "system" or "personal" `Distutils configuration
-file
-`_ to
-manage the fulfillment.
-
-For example, to have the dependency located at an alternate index, add this:
-
-::
-
-  [easy_install]
-  index_url = https://my.index-mirror.com
-
-To have the dependency located from a local directory and not crawl PyPI, add this:
-
-::
-
-  [easy_install]
-  allow_hosts = ''
-  find_links = file:///path/to/local/archives/
-
-
-Build System Interface
-----------------------
-
-In order for pip to install a package from source, ``setup.py`` must implement
-the following commands::
-
-    setup.py egg_info [--egg-base XXX]
-    setup.py install --record XXX [--single-version-externally-managed] [--root XXX] [--compile|--no-compile] [--install-headers XXX]
-
-The ``egg_info`` command should create egg metadata for the package, as
-described in the setuptools documentation at
-https://setuptools.readthedocs.io/en/latest/setuptools.html#egg-info-create-egg-metadata-and-set-build-tags
-
-The ``install`` command should implement the complete process of installing the
-package to the target directory XXX.
-
-To install a package in "editable" mode (``pip install -e``), ``setup.py`` must
-implement the following command::
-
-    setup.py develop --no-deps
-
-This should implement the complete process of installing the package in
-"editable" mode.
-
-All packages will be attempted to built into wheels::
-
-    setup.py bdist_wheel -d XXX
-
-One further ``setup.py`` command is invoked by ``pip install``::
-
-    setup.py clean
-
-This command is invoked to clean up temporary commands from the build. (TODO:
-Investigate in more detail when this command is required).
-
-No other build system commands are invoked by the ``pip install`` command.
-
-Installing a package from a wheel does not invoke the build system at all.
-
-.. _PyPI: https://pypi.org/
-.. _setuptools extras: https://setuptools.readthedocs.io/en/latest/setuptools.html#declaring-extras-optional-features-with-their-own-dependencies
-
-
-
-.. _`pip install Options`:
-
-
-Options
-=======
-
-.. pip-command-options:: install
-
-.. pip-index-options:: install
-
-
-.. _`pip install Examples`:
-
-
-Examples
-========
-
-#. Install ``SomePackage`` and its dependencies from `PyPI`_ using :ref:`Requirement Specifiers`
-
-   .. tab:: Unix/macOS
-
-      .. code-block:: shell
-
-         python -m pip install SomePackage            # latest version
-         python -m pip install SomePackage==1.0.4     # specific version
-         python -m pip install 'SomePackage>=1.0.4'   # minimum version
-
-   .. tab:: Windows
-
-      .. code-block:: shell
-
-         py -m pip install SomePackage            # latest version
-         py -m pip install SomePackage==1.0.4     # specific version
-         py -m pip install 'SomePackage>=1.0.4'   # minimum version
-
-
-#. Install a list of requirements specified in a file.  See the :ref:`Requirements files `.
-
-   .. tab:: Unix/macOS
-
-      .. code-block:: shell
-
-         python -m pip install -r requirements.txt
-
-   .. tab:: Windows
-
-      .. code-block:: shell
-
-         py -m pip install -r requirements.txt
-
-
-#. Upgrade an already installed ``SomePackage`` to the latest from PyPI.
-
-   .. tab:: Unix/macOS
-
-      .. code-block:: shell
-
-         python -m pip install --upgrade SomePackage
-
-   .. tab:: Windows
-
-      .. code-block:: shell
-
-         py -m pip install --upgrade SomePackage
-
-
-#. Install a local project in "editable" mode. See the section on :ref:`Editable Installs `.
-
-   .. tab:: Unix/macOS
-
-      .. code-block:: shell
-
-         python -m pip install -e .                # project in current directory
-         python -m pip install -e path/to/project  # project in another directory
-
-   .. tab:: Windows
-
-      .. code-block:: shell
-
-         py -m pip install -e .                 # project in current directory
-         py -m pip install -e path/to/project   # project in another directory
-
-
-#. Install a project from VCS
-
-   .. tab:: Unix/macOS
-
-      .. code-block:: shell
-
-         python -m pip install SomeProject@git+https://git.repo/some_pkg.git@1.3.1
-
-   .. tab:: Windows
-
-      .. code-block:: shell
-
-         py -m pip install SomeProject@git+https://git.repo/some_pkg.git@1.3.1
-
-
-#. Install a project from VCS in "editable" mode. See the sections on :ref:`VCS Support ` and :ref:`Editable Installs `.
-
-   .. tab:: Unix/macOS
-
-      .. code-block:: shell
-
-         python -m pip install -e git+https://git.repo/some_pkg.git#egg=SomePackage          # from git
-         python -m pip install -e hg+https://hg.repo/some_pkg.git#egg=SomePackage            # from mercurial
-         python -m pip install -e svn+svn://svn.repo/some_pkg/trunk/#egg=SomePackage         # from svn
-         python -m pip install -e git+https://git.repo/some_pkg.git@feature#egg=SomePackage  # from 'feature' branch
-         python -m pip install -e "git+https://git.repo/some_repo.git#egg=subdir&subdirectory=subdir_path" # install a python package from a repo subdirectory
-
-   .. tab:: Windows
-
-      .. code-block:: shell
-
-         py -m pip install -e git+https://git.repo/some_pkg.git#egg=SomePackage          # from git
-         py -m pip install -e hg+https://hg.repo/some_pkg.git#egg=SomePackage            # from mercurial
-         py -m pip install -e svn+svn://svn.repo/some_pkg/trunk/#egg=SomePackage         # from svn
-         py -m pip install -e git+https://git.repo/some_pkg.git@feature#egg=SomePackage  # from 'feature' branch
-         py -m pip install -e "git+https://git.repo/some_repo.git#egg=subdir&subdirectory=subdir_path" # install a python package from a repo subdirectory
-
-#. Install a package with `setuptools extras`_.
-
-   .. tab:: Unix/macOS
-
-      .. code-block:: shell
-
-         python -m pip install SomePackage[PDF]
-         python -m pip install "SomePackage[PDF] @ git+https://git.repo/SomePackage@master#subdirectory=subdir_path"
-         python -m pip install .[PDF]  # project in current directory
-         python -m pip install SomePackage[PDF]==3.0
-         python -m pip install SomePackage[PDF,EPUB]  # multiple extras
-
-   .. tab:: Windows
-
-      .. code-block:: shell
-
-         py -m pip install SomePackage[PDF]
-         py -m pip install "SomePackage[PDF] @ git+https://git.repo/SomePackage@master#subdirectory=subdir_path"
-         py -m pip install .[PDF]  # project in current directory
-         py -m pip install SomePackage[PDF]==3.0
-         py -m pip install SomePackage[PDF,EPUB]  # multiple extras
-
-#. Install a particular source archive file.
-
-   .. tab:: Unix/macOS
-
-      .. code-block:: shell
-
-         python -m pip install ./downloads/SomePackage-1.0.4.tar.gz
-         python -m pip install http://my.package.repo/SomePackage-1.0.4.zip
-
-   .. tab:: Windows
-
-      .. code-block:: shell
-
-         py -m pip install ./downloads/SomePackage-1.0.4.tar.gz
-         py -m pip install http://my.package.repo/SomePackage-1.0.4.zip
-
-#. Install a particular source archive file following :pep:`440` direct references.
-
-   .. tab:: Unix/macOS
-
-      .. code-block:: shell
-
-         python -m pip install SomeProject@http://my.package.repo/SomeProject-1.2.3-py33-none-any.whl
-         python -m pip install "SomeProject @ http://my.package.repo/SomeProject-1.2.3-py33-none-any.whl"
-         python -m pip install SomeProject@http://my.package.repo/1.2.3.tar.gz
-
-   .. tab:: Windows
-
-      .. code-block:: shell
-
-         py -m pip install SomeProject@http://my.package.repo/SomeProject-1.2.3-py33-none-any.whl
-         py -m pip install "SomeProject @ http://my.package.repo/SomeProject-1.2.3-py33-none-any.whl"
-         py -m pip install SomeProject@http://my.package.repo/1.2.3.tar.gz
-
-#. Install from alternative package repositories.
-
-   Install from a different index, and not `PyPI`_
-
-   .. tab:: Unix/macOS
-
-      .. code-block:: shell
-
-         python -m pip install --index-url http://my.package.repo/simple/ SomePackage
-
-   .. tab:: Windows
-
-      .. code-block:: shell
-
-         py -m pip install --index-url http://my.package.repo/simple/ SomePackage
-
-   Search an additional index during install, in addition to `PyPI`_
-
-   .. tab:: Unix/macOS
-
-      .. code-block:: shell
-
-         python -m pip install --extra-index-url http://my.package.repo/simple SomePackage
-
-   .. tab:: Windows
-
-      .. code-block:: shell
-
-         py -m pip install --extra-index-url http://my.package.repo/simple SomePackage
-
-   Install from a local flat directory containing archives (and don't scan indexes):
-
-   .. tab:: Unix/macOS
-
-      .. code-block:: shell
-
-         python -m pip install --no-index --find-links=file:///local/dir/ SomePackage
-         python -m pip install --no-index --find-links=/local/dir/ SomePackage
-         python -m pip install --no-index --find-links=relative/dir/ SomePackage
-
-   .. tab:: Windows
-
-      .. code-block:: shell
-
-         py -m pip install --no-index --find-links=file:///local/dir/ SomePackage
-         py -m pip install --no-index --find-links=/local/dir/ SomePackage
-         py -m pip install --no-index --find-links=relative/dir/ SomePackage
-
-
-#. Find pre-release and development versions, in addition to stable versions.  By default, pip only finds stable versions.
-
-   .. tab:: Unix/macOS
-
-      .. code-block:: shell
-
-         python -m pip install --pre SomePackage
-
-   .. tab:: Windows
-
-      .. code-block:: shell
-
-         py -m pip install --pre SomePackage
-
-
-#. Install packages from source.
-
-   Do not use any binary packages
-
-   .. tab:: Unix/macOS
-
-      .. code-block:: shell
-
-         python -m pip install SomePackage1 SomePackage2 --no-binary :all:
-
-   .. tab:: Windows
-
-      .. code-block:: shell
-
-         py -m pip install SomePackage1 SomePackage2 --no-binary :all:
-
-   Specify ``SomePackage1`` to be installed from source:
-
-   .. tab:: Unix/macOS
-
-      .. code-block:: shell
-
-         python -m pip install SomePackage1 SomePackage2 --no-binary SomePackage1
-
-   .. tab:: Windows
-
-      .. code-block:: shell
-
-         py -m pip install SomePackage1 SomePackage2 --no-binary SomePackage1
-
-----
-
-.. [1] This is true with the exception that pip v7.0 and v7.0.1 required quotes
-       around specifiers containing environment markers in requirement files.
+You should be redirected automatically in 3 seconds. If that didn't
+work, here's a link: :doc:`../cli/pip_install`
diff --git a/docs/html/reference/pip_list.rst b/docs/html/reference/pip_list.rst
index 5119a804c..3768baf60 100644
--- a/docs/html/reference/pip_list.rst
+++ b/docs/html/reference/pip_list.rst
@@ -1,201 +1,11 @@
-.. _`pip list`:
+:orphan:
 
-========
-pip list
-========
+.. meta::
 
+  :http-equiv=refresh: 3; url=../../cli/pip_list/
 
+This page has moved
+===================
 
-Usage
-=====
-
-.. tab:: Unix/macOS
-
-   .. pip-command-usage:: list "python -m pip"
-
-.. tab:: Windows
-
-   .. pip-command-usage:: list "py -m pip"
-
-
-Description
-===========
-
-.. pip-command-description:: list
-
-
-Options
-=======
-
-.. pip-command-options:: list
-
-.. pip-index-options:: list
-
-
-Examples
-========
-
-#. List installed packages.
-
-   .. tab:: Unix/macOS
-
-      .. code-block:: console
-
-         $ python -m pip list
-         docutils (0.10)
-         Jinja2 (2.7.2)
-         MarkupSafe (0.18)
-         Pygments (1.6)
-         Sphinx (1.2.1)
-
-   .. tab:: Windows
-
-      .. code-block:: console
-
-         C:\> py -m pip list
-         docutils (0.10)
-         Jinja2 (2.7.2)
-         MarkupSafe (0.18)
-         Pygments (1.6)
-         Sphinx (1.2.1)
-
-#. List outdated packages (excluding editables), and the latest version available.
-
-   .. tab:: Unix/macOS
-
-      .. code-block:: console
-
-         $ python -m pip list --outdated
-         docutils (Current: 0.10 Latest: 0.11)
-         Sphinx (Current: 1.2.1 Latest: 1.2.2)
-
-   .. tab:: Windows
-
-      .. code-block:: console
-
-         C:\> py -m pip list --outdated
-         docutils (Current: 0.10 Latest: 0.11)
-         Sphinx (Current: 1.2.1 Latest: 1.2.2)
-
-#. List installed packages with column formatting.
-
-   .. tab:: Unix/macOS
-
-      .. code-block:: console
-
-         $ python -m pip list --format columns
-         Package Version
-         ------- -------
-         docopt  0.6.2
-         idlex   1.13
-         jedi    0.9.0
-
-   .. tab:: Windows
-
-      .. code-block:: console
-
-         C:\> py -m pip list --format columns
-         Package Version
-         ------- -------
-         docopt  0.6.2
-         idlex   1.13
-         jedi    0.9.0
-
-#. List outdated packages with column formatting.
-
-   .. tab:: Unix/macOS
-
-      .. code-block:: console
-
-         $ python -m pip list -o --format columns
-         Package    Version Latest Type
-         ---------- ------- ------ -----
-         retry      0.8.1   0.9.1  wheel
-         setuptools 20.6.7  21.0.0 wheel
-
-   .. tab:: Windows
-
-      .. code-block:: console
-
-         C:\> py -m pip list -o --format columns
-         Package    Version Latest Type
-         ---------- ------- ------ -----
-         retry      0.8.1   0.9.1  wheel
-         setuptools 20.6.7  21.0.0 wheel
-
-#. List packages that are not dependencies of other packages. Can be combined with
-   other options.
-
-   .. tab:: Unix/macOS
-
-      .. code-block:: console
-
-         $ python -m pip list --outdated --not-required
-         docutils (Current: 0.10 Latest: 0.11)
-
-   .. tab:: Windows
-
-      .. code-block:: console
-
-         C:\> py -m pip list --outdated --not-required
-         docutils (Current: 0.10 Latest: 0.11)
-
-#. Use legacy formatting
-
-   .. tab:: Unix/macOS
-
-      .. code-block:: console
-
-         $ python -m pip list --format=legacy
-         colorama (0.3.7)
-         docopt (0.6.2)
-         idlex (1.13)
-         jedi (0.9.0)
-
-   .. tab:: Windows
-
-      .. code-block:: console
-
-         C:\> py -m pip list --format=legacy
-         colorama (0.3.7)
-         docopt (0.6.2)
-         idlex (1.13)
-         jedi (0.9.0)
-
-#. Use json formatting
-
-   .. tab:: Unix/macOS
-
-      .. code-block:: console
-
-         $ python -m pip list --format=json
-         [{'name': 'colorama', 'version': '0.3.7'}, {'name': 'docopt', 'version': '0.6.2'}, ...
-
-   .. tab:: Windows
-
-      .. code-block:: console
-
-         C:\> py -m pip list --format=json
-         [{'name': 'colorama', 'version': '0.3.7'}, {'name': 'docopt', 'version': '0.6.2'}, ...
-
-#. Use freeze formatting
-
-   .. tab:: Unix/macOS
-
-      .. code-block:: console
-
-         $ python -m pip list --format=freeze
-         colorama==0.3.7
-         docopt==0.6.2
-         idlex==1.13
-         jedi==0.9.0
-
-   .. tab:: Windows
-
-      .. code-block:: console
-
-         C:\> py -m pip list --format=freeze
-         colorama==0.3.7
-         docopt==0.6.2
-         idlex==1.13
-         jedi==0.9.0
+You should be redirected automatically in 3 seconds. If that didn't
+work, here's a link: :doc:`../cli/pip_list`
diff --git a/docs/html/reference/pip_search.rst b/docs/html/reference/pip_search.rst
index 9905a1baf..0a7532ee7 100644
--- a/docs/html/reference/pip_search.rst
+++ b/docs/html/reference/pip_search.rst
@@ -1,52 +1,11 @@
-.. _`pip search`:
+:orphan:
 
-==========
-pip search
-==========
+.. meta::
 
+  :http-equiv=refresh: 3; url=../../cli/pip_search/
 
+This page has moved
+===================
 
-Usage
-=====
-
-.. tab:: Unix/macOS
-
-   .. pip-command-usage:: search "python -m pip"
-
-.. tab:: Windows
-
-   .. pip-command-usage:: search "py -m pip"
-
-
-Description
-===========
-
-.. pip-command-description:: search
-
-
-Options
-=======
-
-.. pip-command-options:: search
-
-
-Examples
-========
-
-#. Search for "peppercorn"
-
-   .. tab:: Unix/macOS
-
-      .. code-block:: console
-
-         $ python -m pip search peppercorn
-         pepperedform    - Helpers for using peppercorn with formprocess.
-         peppercorn      - A library for converting a token stream into [...]
-
-   .. tab:: Windows
-
-      .. code-block:: console
-
-         C:\> py -m pip search peppercorn
-         pepperedform    - Helpers for using peppercorn with formprocess.
-         peppercorn      - A library for converting a token stream into [...]
+You should be redirected automatically in 3 seconds. If that didn't
+work, here's a link: :doc:`../cli/pip_search`
diff --git a/docs/html/reference/pip_show.rst b/docs/html/reference/pip_show.rst
index b603f786f..b2ce3c7d8 100644
--- a/docs/html/reference/pip_show.rst
+++ b/docs/html/reference/pip_show.rst
@@ -1,154 +1,11 @@
-.. _`pip show`:
+:orphan:
 
-========
-pip show
-========
+.. meta::
 
+  :http-equiv=refresh: 3; url=../../cli/pip_show/
 
+This page has moved
+===================
 
-Usage
-=====
-
-.. tab:: Unix/macOS
-
-   .. pip-command-usage:: show "python -m pip"
-
-.. tab:: Windows
-
-   .. pip-command-usage:: show "py -m pip"
-
-
-Description
-===========
-
-.. pip-command-description:: show
-
-
-Options
-=======
-
-.. pip-command-options:: show
-
-
-Examples
-========
-
-#. Show information about a package:
-
-   .. tab:: Unix/macOS
-
-      .. code-block:: console
-
-         $ python -m pip show sphinx
-         Name: Sphinx
-         Version: 1.4.5
-         Summary: Python documentation generator
-         Home-page: http://sphinx-doc.org/
-         Author: Georg Brandl
-         Author-email: georg@python.org
-         License: BSD
-         Location: /my/env/lib/python2.7/site-packages
-         Requires: docutils, snowballstemmer, alabaster, Pygments, imagesize, Jinja2, babel, six
-
-   .. tab:: Windows
-
-      .. code-block:: console
-
-         C:\> py -m pip show sphinx
-         Name: Sphinx
-         Version: 1.4.5
-         Summary: Python documentation generator
-         Home-page: http://sphinx-doc.org/
-         Author: Georg Brandl
-         Author-email: georg@python.org
-         License: BSD
-         Location: /my/env/lib/python2.7/site-packages
-         Requires: docutils, snowballstemmer, alabaster, Pygments, imagesize, Jinja2, babel, six
-
-#. Show all information about a package
-
-   .. tab:: Unix/macOS
-
-      .. code-block:: console
-
-         $ python -m pip show --verbose sphinx
-         Name: Sphinx
-         Version: 1.4.5
-         Summary: Python documentation generator
-         Home-page: http://sphinx-doc.org/
-         Author: Georg Brandl
-         Author-email: georg@python.org
-         License: BSD
-         Location: /my/env/lib/python2.7/site-packages
-         Requires: docutils, snowballstemmer, alabaster, Pygments, imagesize, Jinja2, babel, six
-         Metadata-Version: 2.0
-         Installer:
-         Classifiers:
-            Development Status :: 5 - Production/Stable
-            Environment :: Console
-            Environment :: Web Environment
-            Intended Audience :: Developers
-            Intended Audience :: Education
-            License :: OSI Approved :: BSD License
-            Operating System :: OS Independent
-            Programming Language :: Python
-            Programming Language :: Python :: 2
-            Programming Language :: Python :: 3
-            Framework :: Sphinx
-            Framework :: Sphinx :: Extension
-            Framework :: Sphinx :: Theme
-            Topic :: Documentation
-            Topic :: Documentation :: Sphinx
-            Topic :: Text Processing
-            Topic :: Utilities
-         Entry-points:
-            [console_scripts]
-            sphinx-apidoc = sphinx.apidoc:main
-            sphinx-autogen = sphinx.ext.autosummary.generate:main
-            sphinx-build = sphinx:main
-            sphinx-quickstart = sphinx.quickstart:main
-            [distutils.commands]
-            build_sphinx = sphinx.setup_command:BuildDoc
-
-   .. tab:: Windows
-
-      .. code-block:: console
-
-         C:\> py -m pip show --verbose sphinx
-         Name: Sphinx
-         Version: 1.4.5
-         Summary: Python documentation generator
-         Home-page: http://sphinx-doc.org/
-         Author: Georg Brandl
-         Author-email: georg@python.org
-         License: BSD
-         Location: /my/env/lib/python2.7/site-packages
-         Requires: docutils, snowballstemmer, alabaster, Pygments, imagesize, Jinja2, babel, six
-         Metadata-Version: 2.0
-         Installer:
-         Classifiers:
-            Development Status :: 5 - Production/Stable
-            Environment :: Console
-            Environment :: Web Environment
-            Intended Audience :: Developers
-            Intended Audience :: Education
-            License :: OSI Approved :: BSD License
-            Operating System :: OS Independent
-            Programming Language :: Python
-            Programming Language :: Python :: 2
-            Programming Language :: Python :: 3
-            Framework :: Sphinx
-            Framework :: Sphinx :: Extension
-            Framework :: Sphinx :: Theme
-            Topic :: Documentation
-            Topic :: Documentation :: Sphinx
-            Topic :: Text Processing
-            Topic :: Utilities
-         Entry-points:
-            [console_scripts]
-            sphinx-apidoc = sphinx.apidoc:main
-            sphinx-autogen = sphinx.ext.autosummary.generate:main
-            sphinx-build = sphinx:main
-            sphinx-quickstart = sphinx.quickstart:main
-            [distutils.commands]
-            build_sphinx = sphinx.setup_command:BuildDoc
+You should be redirected automatically in 3 seconds. If that didn't
+work, here's a link: :doc:`../cli/pip_show`
diff --git a/docs/html/reference/pip_uninstall.rst b/docs/html/reference/pip_uninstall.rst
index e6eeb5ebf..db84476c8 100644
--- a/docs/html/reference/pip_uninstall.rst
+++ b/docs/html/reference/pip_uninstall.rst
@@ -1,58 +1,11 @@
-.. _`pip uninstall`:
+:orphan:
 
-=============
-pip uninstall
-=============
+.. meta::
 
+  :http-equiv=refresh: 3; url=../../cli/pip_uninstall/
 
+This page has moved
+===================
 
-Usage
-=====
-
-.. tab:: Unix/macOS
-
-   .. pip-command-usage:: uninstall "python -m pip"
-
-.. tab:: Windows
-
-   .. pip-command-usage:: uninstall "py -m pip"
-
-
-Description
-===========
-
-.. pip-command-description:: uninstall
-
-
-Options
-=======
-
-.. pip-command-options:: uninstall
-
-
-Examples
-========
-
-#. Uninstall a package.
-
-   .. tab:: Unix/macOS
-
-      .. code-block:: console
-
-         $ python -m pip uninstall simplejson
-         Uninstalling simplejson:
-            /home/me/env/lib/python3.9/site-packages/simplejson
-            /home/me/env/lib/python3.9/site-packages/simplejson-2.2.1-py3.9.egg-info
-         Proceed (y/n)? y
-            Successfully uninstalled simplejson
-
-   .. tab:: Windows
-
-      .. code-block:: console
-
-         C:\> py -m pip uninstall simplejson
-         Uninstalling simplejson:
-            /home/me/env/lib/python3.9/site-packages/simplejson
-            /home/me/env/lib/python3.9/site-packages/simplejson-2.2.1-py3.9.egg-info
-         Proceed (y/n)? y
-            Successfully uninstalled simplejson
+You should be redirected automatically in 3 seconds. If that didn't
+work, here's a link: :doc:`../cli/pip_uninstall`
diff --git a/docs/html/reference/pip_wheel.rst b/docs/html/reference/pip_wheel.rst
index c2a9543fc..06861f607 100644
--- a/docs/html/reference/pip_wheel.rst
+++ b/docs/html/reference/pip_wheel.rst
@@ -1,125 +1,11 @@
+:orphan:
 
-.. _`pip wheel`:
+.. meta::
 
-=========
-pip wheel
-=========
+  :http-equiv=refresh: 3; url=../../cli/pip_wheel/
 
+This page has moved
+===================
 
-
-Usage
-=====
-
-.. tab:: Unix/macOS
-
-   .. pip-command-usage:: wheel "python -m pip"
-
-.. tab:: Windows
-
-   .. pip-command-usage:: wheel "py -m pip"
-
-
-Description
-===========
-
-.. pip-command-description:: wheel
-
-
-Build System Interface
-----------------------
-
-In order for pip to build a wheel, ``setup.py`` must implement the
-``bdist_wheel`` command with the following syntax:
-
-.. tab:: Unix/macOS
-
-   .. code-block:: shell
-
-      python setup.py bdist_wheel -d TARGET
-
-.. tab:: Windows
-
-   .. code-block:: shell
-
-      py setup.py bdist_wheel -d TARGET
-
-
-This command must create a wheel compatible with the invoking Python
-interpreter, and save that wheel in the directory TARGET.
-
-No other build system commands are invoked by the ``pip wheel`` command.
-
-Customising the build
-^^^^^^^^^^^^^^^^^^^^^
-
-It is possible using ``--global-option`` to include additional build commands
-with their arguments in the ``setup.py`` command. This is currently the only
-way to influence the building of C extensions from the command line. For
-example:
-
-.. tab:: Unix/macOS
-
-   .. code-block:: shell
-
-      python -m pip wheel --global-option bdist_ext --global-option -DFOO wheel
-
-.. tab:: Windows
-
-   .. code-block:: shell
-
-      py -m pip wheel --global-option bdist_ext --global-option -DFOO wheel
-
-
-will result in a build command of
-
-::
-
-    setup.py bdist_ext -DFOO bdist_wheel -d TARGET
-
-which passes a preprocessor symbol to the extension build.
-
-Such usage is considered highly build-system specific and more an accident of
-the current implementation than a supported interface.
-
-
-
-Options
-=======
-
-.. pip-command-options:: wheel
-
-.. pip-index-options:: wheel
-
-
-Examples
-========
-
-#. Build wheels for a requirement (and all its dependencies), and then install
-
-   .. tab:: Unix/macOS
-
-      .. code-block:: shell
-
-         python -m pip wheel --wheel-dir=/tmp/wheelhouse SomePackage
-         python -m pip install --no-index --find-links=/tmp/wheelhouse SomePackage
-
-   .. tab:: Windows
-
-      .. code-block:: shell
-
-         py -m pip wheel --wheel-dir=/tmp/wheelhouse SomePackage
-         py -m pip install --no-index --find-links=/tmp/wheelhouse SomePackage
-
-#. Build a wheel for a package from source
-
-   .. tab:: Unix/macOS
-
-      .. code-block:: shell
-
-         python -m pip wheel --no-binary SomePackage SomePackage
-
-   .. tab:: Windows
-
-      .. code-block:: shell
-
-         py -m pip wheel --no-binary SomePackage SomePackage
+You should be redirected automatically in 3 seconds. If that didn't
+work, here's a link: :doc:`../cli/pip_wheel`
diff --git a/docs/html/user_guide.rst b/docs/html/user_guide.rst
index 92887885b..10e372b19 100644
--- a/docs/html/user_guide.rst
+++ b/docs/html/user_guide.rst
@@ -125,7 +125,7 @@ does not come with it included.
 
    pip install keyring
    echo your-password | keyring set pypi.company.com your-username
-   pip install your-package --extra-index-url https://pypi.company.com/
+   pip install your-package --index-url https://pypi.company.com/
 
 .. _keyring: https://pypi.org/project/keyring/
 
@@ -825,6 +825,21 @@ strategies supported:
 The default strategy is ``only-if-needed``. This was changed in pip 10.0 due to
 the breaking nature of ``eager`` when upgrading conflicting dependencies.
 
+It is important to note that ``--upgrade`` affects *direct requirements* (e.g.
+those specified on the command-line or via a requirements file) while
+``--upgrade-strategy`` affects *indirect requirements* (dependencies of direct
+requirements).
+
+As an example, say ``SomePackage`` has a dependency, ``SomeDependency``, and
+both of them are already installed but are not the latest avaialable versions:
+
+- ``pip install SomePackage``: will not upgrade the existing ``SomePackage`` or
+  ``SomeDependency``.
+- ``pip install --upgrade SomePackage``: will upgrade ``SomePackage``, but not
+  ``SomeDependency`` (unless a minimum requirement is not met).
+- ``pip install --upgrade SomePackage --upgrade-strategy=eager``: upgrades both
+  ``SomePackage`` and ``SomeDependency``.
+
 As an historic note, an earlier "fix" for getting the ``only-if-needed``
 behaviour was:
 
@@ -1857,9 +1872,11 @@ We plan for the resolver changeover to proceed as follows, using
      environments, pip defaults to the old resolver, and the new one is
      available using the flag ``--use-feature=2020-resolver``.
 
-*    pip 21.0: pip uses new resolver, and the old resolver is no longer
-     available. Python 2 support is removed per our :ref:`Python 2
-     Support` policy.
+*    pip 21.0: pip uses new resolver by default, and the old resolver is
+     no longer supported. It will be removed after a currently undecided
+     amount of time, as the removal is dependent on pip's volunteer
+     maintainers' availability. Python 2 support is removed per our
+     :ref:`Python 2 Support` policy.
 
 Since this work will not change user-visible behavior described in the
 pip documentation, this change is not covered by the :ref:`Deprecation
@@ -1885,6 +1902,6 @@ announcements on the `low-traffic packaging announcements list`_ and
 .. _low-traffic packaging announcements list: https://mail.python.org/mailman3/lists/pypi-announce.python.org/
 .. _our survey on upgrades that create conflicts: https://docs.google.com/forms/d/e/1FAIpQLSeBkbhuIlSofXqCyhi3kGkLmtrpPOEBwr6iJA6SzHdxWKfqdA/viewform
 .. _the official Python blog: https://blog.python.org/
-.. _requests: https://requests.readthedocs.io/en/master/user/authentication/#netrc-authentication
+.. _requests: https://requests.readthedocs.io/en/latest/user/authentication/#netrc-authentication
 .. _Python standard library: https://docs.python.org/3/library/netrc.html
 .. _Python Windows launcher: https://docs.python.org/3/using/windows.html#launcher
diff --git a/docs/pip_sphinxext.py b/docs/pip_sphinxext.py
index df4390d81..c59902886 100644
--- a/docs/pip_sphinxext.py
+++ b/docs/pip_sphinxext.py
@@ -1,32 +1,91 @@
 """pip sphinx extensions"""
 
 import optparse
+import pathlib
+import re
 import sys
 from textwrap import dedent
+from typing import Iterable, List, Optional
 
-from docutils import nodes
+from docutils import nodes, statemachine
 from docutils.parsers import rst
-from docutils.statemachine import ViewList
+from docutils.statemachine import StringList, ViewList
+from sphinx.application import Sphinx
 
 from pip._internal.cli import cmdoptions
 from pip._internal.commands import commands_dict, create_command
 from pip._internal.req.req_file import SUPPORTED_OPTIONS
 
 
+class PipNewsInclude(rst.Directive):
+    required_arguments = 1
+
+    def _is_version_section_title_underline(self, prev, curr):
+        """Find a ==== line that marks the version section title."""
+        if prev is None:
+            return False
+        if re.match(r"^=+$", curr) is None:
+            return False
+        if len(curr) < len(prev):
+            return False
+        return True
+
+    def _iter_lines_with_refs(self, lines):
+        """Transform the input lines to add a ref before each section title.
+
+        This is done by looking one line ahead and locate a title's underline,
+        and add a ref before the title text.
+
+        Dots in the version is converted into dash, and a ``v`` is prefixed.
+        This makes Sphinx use them as HTML ``id`` verbatim without generating
+        auto numbering (which would make the the anchors unstable).
+        """
+        prev = None
+        for line in lines:
+            # Transform the previous line to include an explicit ref.
+            if self._is_version_section_title_underline(prev, line):
+                vref = prev.split(None, 1)[0].replace(".", "-")
+                yield f".. _`v{vref}`:"
+                yield ""  # Empty line between ref and the title.
+            if prev is not None:
+                yield prev
+            prev = line
+        if prev is not None:
+            yield prev
+
+    def run(self):
+        source = self.state_machine.input_lines.source(
+            self.lineno - self.state_machine.input_offset - 1,
+        )
+        path = (
+            pathlib.Path(source)
+            .resolve()
+            .parent
+            .joinpath(self.arguments[0])
+            .resolve()
+        )
+        include_lines = statemachine.string2lines(
+            path.read_text(encoding="utf-8"),
+            self.state.document.settings.tab_width,
+            convert_whitespace=True,
+        )
+        include_lines = list(self._iter_lines_with_refs(include_lines))
+        self.state_machine.insert_input(include_lines, str(path))
+        return []
+
+
 class PipCommandUsage(rst.Directive):
     required_arguments = 1
     optional_arguments = 3
 
-    def run(self):
+    def run(self) -> List[nodes.Node]:
         cmd = create_command(self.arguments[0])
-        cmd_prefix = 'python -m pip'
+        cmd_prefix = "python -m pip"
         if len(self.arguments) > 1:
             cmd_prefix = " ".join(self.arguments[1:])
             cmd_prefix = cmd_prefix.strip('"')
             cmd_prefix = cmd_prefix.strip("'")
-        usage = dedent(
-            cmd.usage.replace('%prog', f'{cmd_prefix} {cmd.name}')
-        ).strip()
+        usage = dedent(cmd.usage.replace("%prog", f"{cmd_prefix} {cmd.name}")).strip()
         node = nodes.literal_block(usage, usage)
         return [node]
 
@@ -34,26 +93,28 @@ class PipCommandUsage(rst.Directive):
 class PipCommandDescription(rst.Directive):
     required_arguments = 1
 
-    def run(self):
+    def run(self) -> List[nodes.Node]:
         node = nodes.paragraph()
         node.document = self.state.document
         desc = ViewList()
         cmd = create_command(self.arguments[0])
+        assert cmd.__doc__ is not None
         description = dedent(cmd.__doc__)
-        for line in description.split('\n'):
+        for line in description.split("\n"):
             desc.append(line, "")
         self.state.nested_parse(desc, 0, node)
         return [node]
 
 
 class PipOptions(rst.Directive):
-
-    def _format_option(self, option, cmd_name=None):
+    def _format_option(
+        self, option: optparse.Option, cmd_name: Optional[str] = None
+    ) -> List[str]:
         bookmark_line = (
-            ".. _`{cmd_name}_{option._long_opts[0]}`:"
-            if cmd_name else
-            ".. _`{option._long_opts[0]}`:"
-        ).format(**locals())
+            f".. _`{cmd_name}_{option._long_opts[0]}`:"
+            if cmd_name
+            else f".. _`{option._long_opts[0]}`:"
+        )
         line = ".. option:: "
         if option._short_opts:
             line += option._short_opts[0]
@@ -62,22 +123,27 @@ class PipOptions(rst.Directive):
         elif option._long_opts:
             line += option._long_opts[0]
         if option.takes_value():
-            metavar = option.metavar or option.dest.lower()
+            metavar = option.metavar or option.dest
+            assert metavar is not None
             line += f" <{metavar.lower()}>"
         # fix defaults
-        opt_help = option.help.replace('%default', str(option.default))
+        assert option.help is not None
+        # https://github.com/python/typeshed/pull/5080
+        opt_help = option.help.replace("%default", str(option.default))  # type: ignore
         # fix paths with sys.prefix
         opt_help = opt_help.replace(sys.prefix, "")
         return [bookmark_line, "", line, "", "    " + opt_help, ""]
 
-    def _format_options(self, options, cmd_name=None):
+    def _format_options(
+        self, options: Iterable[optparse.Option], cmd_name: Optional[str] = None
+    ) -> None:
         for option in options:
             if option.help == optparse.SUPPRESS_HELP:
                 continue
             for line in self._format_option(option, cmd_name):
                 self.view_list.append(line, "")
 
-    def run(self):
+    def run(self) -> List[nodes.Node]:
         node = nodes.paragraph()
         node.document = self.state.document
         self.view_list = ViewList()
@@ -87,19 +153,17 @@ class PipOptions(rst.Directive):
 
 
 class PipGeneralOptions(PipOptions):
-    def process_options(self):
-        self._format_options(
-            [o() for o in cmdoptions.general_group['options']]
-        )
+    def process_options(self) -> None:
+        self._format_options([o() for o in cmdoptions.general_group["options"]])
 
 
 class PipIndexOptions(PipOptions):
     required_arguments = 1
 
-    def process_options(self):
+    def process_options(self) -> None:
         cmd_name = self.arguments[0]
         self._format_options(
-            [o() for o in cmdoptions.index_group['options']],
+            [o() for o in cmdoptions.index_group["options"]],
             cmd_name=cmd_name,
         )
 
@@ -107,7 +171,7 @@ class PipIndexOptions(PipOptions):
 class PipCommandOptions(PipOptions):
     required_arguments = 1
 
-    def process_options(self):
+    def process_options(self) -> None:
         cmd = create_command(self.arguments[0])
         self._format_options(
             cmd.parser.option_groups[0].option_list,
@@ -116,49 +180,128 @@ class PipCommandOptions(PipOptions):
 
 
 class PipReqFileOptionsReference(PipOptions):
-
-    def determine_opt_prefix(self, opt_name):
+    def determine_opt_prefix(self, opt_name: str) -> str:
         for command in commands_dict:
             cmd = create_command(command)
             if cmd.cmd_opts.has_option(opt_name):
                 return command
 
-        raise KeyError(f'Could not identify prefix of opt {opt_name}')
+        raise KeyError(f"Could not identify prefix of opt {opt_name}")
 
-    def process_options(self):
+    def process_options(self) -> None:
         for option in SUPPORTED_OPTIONS:
-            if getattr(option, 'deprecated', False):
+            if getattr(option, "deprecated", False):
                 continue
 
             opt = option()
             opt_name = opt._long_opts[0]
             if opt._short_opts:
-                short_opt_name = '{}, '.format(opt._short_opts[0])
+                short_opt_name = "{}, ".format(opt._short_opts[0])
             else:
-                short_opt_name = ''
+                short_opt_name = ""
 
-            if option in cmdoptions.general_group['options']:
-                prefix = ''
+            if option in cmdoptions.general_group["options"]:
+                prefix = ""
             else:
-                prefix = '{}_'.format(self.determine_opt_prefix(opt_name))
+                prefix = "{}_".format(self.determine_opt_prefix(opt_name))
 
             self.view_list.append(
-                '*  :ref:`{short}{long}<{prefix}{opt_name}>`'.format(
+                "*  :ref:`{short}{long}<{prefix}{opt_name}>`".format(
                     short=short_opt_name,
                     long=opt_name,
                     prefix=prefix,
-                    opt_name=opt_name
+                    opt_name=opt_name,
                 ),
-                "\n"
+                "\n",
             )
 
 
-def setup(app):
-    app.add_directive('pip-command-usage', PipCommandUsage)
-    app.add_directive('pip-command-description', PipCommandDescription)
-    app.add_directive('pip-command-options', PipCommandOptions)
-    app.add_directive('pip-general-options', PipGeneralOptions)
-    app.add_directive('pip-index-options', PipIndexOptions)
+class PipCLIDirective(rst.Directive):
+    """
+    - Only works when used in a MyST document.
+    - Requires sphinx-inline-tabs' tab directive.
+    """
+
+    has_content = True
+    optional_arguments = 1
+
+    def run(self) -> List[nodes.Node]:
+        node = nodes.paragraph()
+        node.document = self.state.document
+
+        os_variants = {
+            "Linux": {
+                "highlighter": "console",
+                "executable": "python",
+                "prompt": "$",
+            },
+            "MacOS": {
+                "highlighter": "console",
+                "executable": "python",
+                "prompt": "$",
+            },
+            "Windows": {
+                "highlighter": "doscon",
+                "executable": "py",
+                "prompt": "C:>",
+            },
+        }
+
+        if self.arguments:
+            assert self.arguments == ["in-a-venv"]
+            in_virtual_environment = True
+        else:
+            in_virtual_environment = False
+
+        lines = []
+        # Create a tab for each OS
+        for os, variant in os_variants.items():
+
+            # Unpack the values
+            prompt = variant["prompt"]
+            highlighter = variant["highlighter"]
+            if in_virtual_environment:
+                executable = "python"
+                pip_spelling = "pip"
+            else:
+                executable = variant["executable"]
+                pip_spelling = f"{executable} -m pip"
+
+            # Substitute the various "prompts" into the correct variants
+            substitution_pipeline = [
+                (
+                    r"(^|(?<=\n))\$ python",
+                    f"{prompt} {executable}",
+                ),
+                (
+                    r"(^|(?<=\n))\$ pip",
+                    f"{prompt} {pip_spelling}",
+                ),
+            ]
+            content = self.block_text
+            for pattern, substitution in substitution_pipeline:
+                content = re.sub(pattern, substitution, content)
+
+            # Write the tab
+            lines.append(f"````{{tab}} {os}")
+            lines.append(f"```{highlighter}")
+            lines.append(f"{content}")
+            lines.append("```")
+            lines.append("````")
+
+        string_list = StringList(lines)
+        self.state.nested_parse(string_list, 0, node)
+        return [node]
+
+
+def setup(app: Sphinx) -> None:
+    app.add_directive("pip-command-usage", PipCommandUsage)
+    app.add_directive("pip-command-description", PipCommandDescription)
+    app.add_directive("pip-command-options", PipCommandOptions)
+    app.add_directive("pip-general-options", PipGeneralOptions)
+    app.add_directive("pip-index-options", PipIndexOptions)
     app.add_directive(
-        'pip-requirements-file-options-ref-list', PipReqFileOptionsReference
+        "pip-requirements-file-options-ref-list", PipReqFileOptionsReference
     )
+    app.add_directive('pip-news-include', PipNewsInclude)
+    app.add_directive("pip-cli", PipCLIDirective)
diff --git a/news/0a741827-049c-4d5d-b44d-daea0c2fd01a.trivial.rst b/news/0a741827-049c-4d5d-b44d-daea0c2fd01a.trivial.rst
new file mode 100644
index 000000000..e69de29bb
diff --git a/news/11e1b2eb-6433-4f15-b70d-c2c514f72ebd.trivial.rst b/news/11e1b2eb-6433-4f15-b70d-c2c514f72ebd.trivial.rst
new file mode 100644
index 000000000..e69de29bb
diff --git a/news/151a1e46-d005-46ca-b1ae-a3811357dba3.trivial.rst b/news/151a1e46-d005-46ca-b1ae-a3811357dba3.trivial.rst
new file mode 100644
index 000000000..e69de29bb
diff --git a/news/1ab8f1c8-c115-4055-9a60-30a8f8eef7ba.trivial.rst b/news/1ab8f1c8-c115-4055-9a60-30a8f8eef7ba.trivial.rst
new file mode 100644
index 000000000..e69de29bb
diff --git a/news/40711960-12d9-4e58-8322-21e5975a804e.trivial.rst b/news/40711960-12d9-4e58-8322-21e5975a804e.trivial.rst
new file mode 100644
index 000000000..e69de29bb
diff --git a/news/4390.bugfix.rst b/news/4390.bugfix.rst
new file mode 100644
index 000000000..0d84de5cf
--- /dev/null
+++ b/news/4390.bugfix.rst
@@ -0,0 +1 @@
+Fixed ``--target`` to work with ``--editable`` installs.
diff --git a/news/4822829F-6A45-4202-87BA-A80482DF6D4E.doc.rst b/news/4822829F-6A45-4202-87BA-A80482DF6D4E.doc.rst
new file mode 100644
index 000000000..6c3ef3653
--- /dev/null
+++ b/news/4822829F-6A45-4202-87BA-A80482DF6D4E.doc.rst
@@ -0,0 +1,2 @@
+Update "setuptools extras" link to match upstream.
+
diff --git a/news/5be04056-e1d6-4f9a-bf46-8938d1936d9e.trivial.rst b/news/5be04056-e1d6-4f9a-bf46-8938d1936d9e.trivial.rst
new file mode 100644
index 000000000..e69de29bb
diff --git a/news/6409.bugfix.rst b/news/6409.bugfix.rst
new file mode 100644
index 000000000..e906c15fa
--- /dev/null
+++ b/news/6409.bugfix.rst
@@ -0,0 +1 @@
+Add a warning, discouraging the usage of pip as root, outside a virtual environment.
diff --git a/news/6720.doc.rst b/news/6720.doc.rst
new file mode 100644
index 000000000..f5547dfe5
--- /dev/null
+++ b/news/6720.doc.rst
@@ -0,0 +1 @@
+Improve SSL Certificate Verification docs and ``--cert`` help text.
diff --git a/news/7269.bugfix.rst b/news/7269.bugfix.rst
new file mode 100644
index 000000000..46816692b
--- /dev/null
+++ b/news/7269.bugfix.rst
@@ -0,0 +1,2 @@
+Ignore ``.dist-info`` directories if the stem is not a valid Python distribution
+name, so they don't show up in e.g. ``pip freeze``.
diff --git a/news/76c758fb-6f07-4ec1-956b-d77c9f339773.trivial.rst b/news/76c758fb-6f07-4ec1-956b-d77c9f339773.trivial.rst
new file mode 100644
index 000000000..e69de29bb
diff --git a/news/8418.bugfix.rst b/news/8418.bugfix.rst
new file mode 100644
index 000000000..1bcc9b787
--- /dev/null
+++ b/news/8418.bugfix.rst
@@ -0,0 +1 @@
+Fix ``pip freeze`` permission denied error in order to display an understandable error message and offer solutions.
diff --git a/news/8418.doc.rst b/news/8418.doc.rst
new file mode 100644
index 000000000..6634f6cd6
--- /dev/null
+++ b/news/8418.doc.rst
@@ -0,0 +1 @@
+Add a section in the documentation to suggest solutions to the ``pip freeze`` permission denied issue.
diff --git a/news/855bfaed-4341-4d28-ab9e-e5ab43fb039f.trivial.rst b/news/855bfaed-4341-4d28-ab9e-e5ab43fb039f.trivial.rst
new file mode 100644
index 000000000..e69de29bb
diff --git a/news/8597c433-9c0b-4bec-a1e8-afd31786eaeb.trivial.rst b/news/8597c433-9c0b-4bec-a1e8-afd31786eaeb.trivial.rst
new file mode 100644
index 000000000..e69de29bb
diff --git a/news/8733.bugfix.rst b/news/8733.bugfix.rst
new file mode 100644
index 000000000..95fd67539
--- /dev/null
+++ b/news/8733.bugfix.rst
@@ -0,0 +1 @@
+Correctly uninstall script files (from setuptools' ``scripts`` argument), when installed with ``--user``.
diff --git a/news/9091.feature.rst b/news/9091.feature.rst
new file mode 100644
index 000000000..8147e79c5
--- /dev/null
+++ b/news/9091.feature.rst
@@ -0,0 +1,4 @@
+Add a feature ``--use-feature=in-tree-build`` to build local projects in-place
+when installing. This is expected to become the default behavior in pip 21.3;
+see `Installing from local packages `_
+for more information.
diff --git a/news/9139.feature.rst b/news/9139.feature.rst
new file mode 100644
index 000000000..98dc133a1
--- /dev/null
+++ b/news/9139.feature.rst
@@ -0,0 +1 @@
+Bring back the "(from versions: ...)" message, that was shown on resolution failures.
diff --git a/news/917ab6ff-72ea-4db5-846a-30273dac1c0c.trivial.rst b/news/917ab6ff-72ea-4db5-846a-30273dac1c0c.trivial.rst
new file mode 100644
index 000000000..e69de29bb
diff --git a/news/9300.bugfix.rst b/news/9300.bugfix.rst
new file mode 100644
index 000000000..7da27f997
--- /dev/null
+++ b/news/9300.bugfix.rst
@@ -0,0 +1,2 @@
+New resolver: Show relevant entries from user-supplied constraint files in the
+error message to improve debuggability.
diff --git a/news/9348.bugfix.rst b/news/9348.bugfix.rst
new file mode 100644
index 000000000..99e673954
--- /dev/null
+++ b/news/9348.bugfix.rst
@@ -0,0 +1,2 @@
+Avoid parsing version to make the version check more robust against lousily
+debundled downstream distributions.
diff --git a/news/9541.bugfix.rst b/news/9541.bugfix.rst
new file mode 100644
index 000000000..88180198c
--- /dev/null
+++ b/news/9541.bugfix.rst
@@ -0,0 +1 @@
+Fix incorrect reporting on ``Requires-Python`` conflicts.
diff --git a/news/9547.feature.rst b/news/9547.feature.rst
new file mode 100644
index 000000000..364a8f688
--- /dev/null
+++ b/news/9547.feature.rst
@@ -0,0 +1 @@
+Add support for editable installs for project with only setup.cfg files.
diff --git a/news/9565.bugfix.rst b/news/9565.bugfix.rst
new file mode 100644
index 000000000..b8f95fd35
--- /dev/null
+++ b/news/9565.bugfix.rst
@@ -0,0 +1 @@
+Make wheel compatibility tag preferences more important than the build tag
diff --git a/news/9617.process.rst b/news/9617.process.rst
new file mode 100644
index 000000000..f505c4605
--- /dev/null
+++ b/news/9617.process.rst
@@ -0,0 +1,3 @@
+Start installation scheme migration from ``distutils`` to ``sysconfig``. A
+warning is implemented to detect differences between the two implementations to
+encourage user reports, so we can avoid breakages before they happen.
diff --git a/news/9647.doc.rst b/news/9647.doc.rst
new file mode 100644
index 000000000..709178176
--- /dev/null
+++ b/news/9647.doc.rst
@@ -0,0 +1 @@
+Add warning about ``--extra-index-url`` and dependency confusion
diff --git a/news/9692.doc.rst b/news/9692.doc.rst
new file mode 100644
index 000000000..2ef962370
--- /dev/null
+++ b/news/9692.doc.rst
@@ -0,0 +1,2 @@
+Describe ``--upgrade-strategy`` and direct requirements explicitly; add a brief
+example.
diff --git a/news/9748.feature.rst b/news/9748.feature.rst
new file mode 100644
index 000000000..cb4a1cded
--- /dev/null
+++ b/news/9748.feature.rst
@@ -0,0 +1 @@
+Improve performance when picking the best file from indexes during `pip install`.
diff --git a/news/9e768673-6079-491e-bbe0-d1593952f1c7.trivial.rst b/news/9e768673-6079-491e-bbe0-d1593952f1c7.trivial.rst
new file mode 100644
index 000000000..e69de29bb
diff --git a/news/CVE-2021-28363.vendor.rst b/news/CVE-2021-28363.vendor.rst
new file mode 100644
index 000000000..29700ab74
--- /dev/null
+++ b/news/CVE-2021-28363.vendor.rst
@@ -0,0 +1 @@
+Update urllib3 to 1.26.4 to fix CVE-2021-28363
diff --git a/news/dfaa54d4-21e2-460f-9d80-455ff318c713.trivial.rst b/news/dfaa54d4-21e2-460f-9d80-455ff318c713.trivial.rst
new file mode 100644
index 000000000..e69de29bb
diff --git a/news/f24d8f47-5750-4a13-b36f-d4a4622861cf.trivial.rst b/news/f24d8f47-5750-4a13-b36f-d4a4622861cf.trivial.rst
new file mode 100644
index 000000000..e69de29bb
diff --git a/news/fc6b6951-9a1a-453e-af98-bbb35f7c3e66.trivial.rst b/news/fc6b6951-9a1a-453e-af98-bbb35f7c3e66.trivial.rst
new file mode 100644
index 000000000..e69de29bb
diff --git a/news/fd62a11c-018c-4fde-ac8d-f674c6d9d190.trivial.rst b/news/fd62a11c-018c-4fde-ac8d-f674c6d9d190.trivial.rst
new file mode 100644
index 000000000..e69de29bb
diff --git a/news/resolvelib.vendor.rst b/news/resolvelib.vendor.rst
new file mode 100644
index 000000000..4f102fc0d
--- /dev/null
+++ b/news/resolvelib.vendor.rst
@@ -0,0 +1 @@
+Upgrade vendored resolvelib to 0.5.5.
diff --git a/noxfile.py b/noxfile.py
index 0dd382a24..d9e344543 100644
--- a/noxfile.py
+++ b/noxfile.py
@@ -10,9 +10,11 @@ from typing import Iterator, List, Tuple
 
 import nox
 
+# fmt: off
 sys.path.append(".")
-from tools.automation import release  # isort:skip  # noqa
+from tools import release  # isort:skip  # noqa
 sys.path.pop()
+# fmt: on
 
 nox.options.reuse_existing_virtualenvs = True
 nox.options.sessions = ["lint"]
@@ -75,17 +77,16 @@ def test(session):
     # type: (nox.Session) -> None
     # Get the common wheels.
     if should_update_common_wheels():
+        # fmt: off
         run_with_protected_pip(
             session,
             "wheel",
             "-w", LOCATIONS["common-wheels"],
             "-r", REQUIREMENTS["common-wheels"],
         )
+        # fmt: on
     else:
-        msg = (
-            "Re-using existing common-wheels at {}."
-            .format(LOCATIONS["common-wheels"])
-        )
+        msg = f"Re-using existing common-wheels at {LOCATIONS['common-wheels']}."
         session.log(msg)
 
     # Build source distribution
@@ -93,11 +94,14 @@ def test(session):
     sdist_dir = os.path.join(session.virtualenv.location, "sdist")  # type: ignore
     if os.path.exists(sdist_dir):
         shutil.rmtree(sdist_dir, ignore_errors=True)
+
+    # fmt: off
     session.run(
-        "python", "setup.py", "sdist",
-        "--formats=zip", "--dist-dir", sdist_dir,
+        "python", "setup.py", "sdist", "--formats=zip", "--dist-dir", sdist_dir,
         silent=True,
     )
+    # fmt: on
+
     generated_files = os.listdir(sdist_dir)
     assert len(generated_files) == 1
     generated_sdist = os.path.join(sdist_dir, generated_files[0])
@@ -129,6 +133,7 @@ def docs(session):
         # can not use a different configuration directory vs source directory
         # on RTD currently. So, we'll pass "-c docs/html" here.
         # See https://github.com/rtfd/readthedocs.org/issues/1543.
+        # fmt: off
         return [
             "sphinx-build",
             "-W",
@@ -138,11 +143,28 @@ def docs(session):
             "docs/" + kind,
             "docs/build/" + kind,
         ]
+        # fmt: on
 
     session.run(*get_sphinx_build_command("html"))
     session.run(*get_sphinx_build_command("man"))
 
 
+@nox.session(name="docs-live")
+def docs_live(session):
+    # type: (nox.Session) -> None
+    session.install("-e", ".")
+    session.install("-r", REQUIREMENTS["docs"], "sphinx-autobuild")
+
+    session.run(
+        "sphinx-autobuild",
+        "-d=docs/build/doctrees/livehtml",
+        "-b=dirhtml",
+        "docs/html",
+        "docs/build/livehtml",
+        *session.posargs,
+    )
+
+
 @nox.session
 def lint(session):
     # type: (nox.Session) -> None
@@ -152,7 +174,6 @@ def lint(session):
         args = session.posargs + ["--all-files"]
     else:
         args = ["--all-files", "--show-diff-on-failure"]
-    args.append("--hook-stage=manual")
 
     session.run("pre-commit", "run", *args)
 
@@ -168,11 +189,14 @@ def vendoring(session):
 
     def pinned_requirements(path):
         # type: (Path) -> Iterator[Tuple[str, str]]
-        for line in path.read_text().splitlines():
-            one, two = line.split("==", 1)
+        for line in path.read_text().splitlines(keepends=False):
+            one, sep, two = line.partition("==")
+            if not sep:
+                continue
             name = one.strip()
-            version = two.split("#")[0].strip()
-            yield name, version
+            version = two.split("#", 1)[0].strip()
+            if name and version:
+                yield name, version
 
     vendor_txt = Path("src/pip/_vendor/vendor.txt")
     for name, old_version in pinned_requirements(vendor_txt):
@@ -227,9 +251,7 @@ def prepare_release(session):
     session.log(f"# Updating {AUTHORS_FILE}")
     release.generate_authors(AUTHORS_FILE)
     if release.modified_files_in_git():
-        release.commit_file(
-            session, AUTHORS_FILE, message=f"Update {AUTHORS_FILE}",
-        )
+        release.commit_file(session, AUTHORS_FILE, message=f"Update {AUTHORS_FILE}")
     else:
         session.log(f"# No changes to {AUTHORS_FILE}")
 
@@ -276,7 +298,7 @@ def build_release(session):
 
         tmp_dist_paths = (build_dir / p for p in tmp_dists)
         session.log(f"# Copying dists from {build_dir}")
-        os.makedirs('dist', exist_ok=True)
+        os.makedirs("dist", exist_ok=True)
         for dist, final in zip(tmp_dist_paths, tmp_dists):
             session.log(f"# Copying {dist} to {final}")
             shutil.copy(dist, final)
@@ -291,7 +313,7 @@ def build_dists(session):
 
     has_forbidden_git_untracked_files = any(
         # Don't report the environment this session is running in
-        not untracked_file.startswith('.nox/build-release/')
+        not untracked_file.startswith(".nox/build-release/")
         for untracked_file in release.get_git_untracked_files()
     )
     if has_forbidden_git_untracked_files:
@@ -337,9 +359,7 @@ def upload_release(session):
         f"pip-{version}.tar.gz",
     ]
     if sorted(distfile_names) != sorted(expected_distribution_files):
-        session.error(
-            f"Distribution files do not seem to be for {version} release."
-        )
+        session.error(f"Distribution files do not seem to be for {version} release.")
 
     session.log("# Upload distributions")
     session.run("twine", "upload", *distribution_files)
diff --git a/pyproject.toml b/pyproject.toml
index 073362ceb..9f6dbe12a 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -9,7 +9,7 @@ filename = "NEWS.rst"
 directory = "news/"
 title_format = "{version} ({project_date})"
 issue_format = "`#{issue} `_"
-template = "tools/automation/news/template.rst"
+template = "tools/news/template.rst"
 type = [
   { name = "Process",                   directory = "process", showcontent = true },
   { name = "Deprecations and Removals", directory = "removal", showcontent = true },
@@ -26,7 +26,7 @@ requirements = "src/pip/_vendor/vendor.txt"
 namespace = "pip._vendor"
 
 protected-files = ["__init__.py", "README.rst", "vendor.txt"]
-patches-dir = "tools/automation/vendoring/patches"
+patches-dir = "tools/vendoring/patches"
 
 [tool.vendoring.transformations]
 substitute = [
diff --git a/setup.cfg b/setup.cfg
index 1d851d949..cb1ecab65 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -36,10 +36,23 @@ disallow_untyped_defs = True
 disallow_any_generics = True
 warn_unused_ignores = True
 
-[mypy-pip/_vendor/*]
-follow_imports = skip
+[mypy-pip._vendor.*]
 ignore_errors = True
 
+# These vendored libraries use runtime magic to populate things and don't sit
+# well with static typing out of the box. Eventually we should provide correct
+# typing information for their public interface and remove these configs.
+[mypy-pip._vendor.colorama]
+follow_imports = skip
+[mypy-pip._vendor.pkg_resources]
+follow_imports = skip
+[mypy-pip._vendor.progress.*]
+follow_imports = skip
+[mypy-pip._vendor.requests.*]
+follow_imports = skip
+[mypy-pip._vendor.retrying]
+follow_imports = skip
+
 [tool:pytest]
 addopts = --ignore src/pip/_vendor --ignore tests/tests_cache -r aR
 markers =
@@ -53,7 +66,6 @@ markers =
     svn: VCS: Subversion
     mercurial: VCS: Mercurial
     git: VCS: git
-    yaml: yaml based tests
     search: tests for 'pip search'
 
 [coverage:run]
diff --git a/setup.py b/setup.py
index 66820387b..91f537a40 100644
--- a/setup.py
+++ b/setup.py
@@ -1,6 +1,3 @@
-# The following comment should be removed at some point in the future.
-# mypy: disallow-untyped-defs=False
-
 import os
 import sys
 
@@ -8,31 +5,32 @@ from setuptools import find_packages, setup
 
 
 def read(rel_path):
+    # type: (str) -> str
     here = os.path.abspath(os.path.dirname(__file__))
     # intentionally *not* adding an encoding option to open, See:
     #   https://github.com/pypa/virtualenv/issues/201#issuecomment-3145690
-    with open(os.path.join(here, rel_path), 'r') as fp:
+    with open(os.path.join(here, rel_path)) as fp:
         return fp.read()
 
 
 def get_version(rel_path):
+    # type: (str) -> str
     for line in read(rel_path).splitlines():
-        if line.startswith('__version__'):
+        if line.startswith("__version__"):
             # __version__ = "0.9"
             delim = '"' if '"' in line else "'"
             return line.split(delim)[1]
     raise RuntimeError("Unable to find version string.")
 
 
-long_description = read('README.rst')
+long_description = read("README.rst")
 
 setup(
     name="pip",
     version=get_version("src/pip/__init__.py"),
     description="The PyPA recommended tool for installing Python packages.",
     long_description=long_description,
-
-    license='MIT',
+    license="MIT",
     classifiers=[
         "Development Status :: 5 - Production/Stable",
         "Intended Audience :: Developers",
@@ -48,17 +46,14 @@ setup(
         "Programming Language :: Python :: Implementation :: CPython",
         "Programming Language :: Python :: Implementation :: PyPy",
     ],
-    url='https://pip.pypa.io/',
-    keywords='distutils easy_install egg setuptools wheel virtualenv',
+    url="https://pip.pypa.io/",
     project_urls={
         "Documentation": "https://pip.pypa.io",
         "Source": "https://github.com/pypa/pip",
         "Changelog": "https://pip.pypa.io/en/stable/news/",
     },
-
-    author='The pip developers',
-    author_email='distutils-sig@python.org',
-
+    author="The pip developers",
+    author_email="distutils-sig@python.org",
     package_dir={"": "src"},
     packages=find_packages(
         where="src",
@@ -76,12 +71,9 @@ setup(
         "console_scripts": [
             "pip=pip._internal.cli.main:main",
             "pip{}=pip._internal.cli.main:main".format(sys.version_info[0]),
-            "pip{}.{}=pip._internal.cli.main:main".format(
-                *sys.version_info[:2]
-            ),
+            "pip{}.{}=pip._internal.cli.main:main".format(*sys.version_info[:2]),
         ],
     },
-
     zip_safe=False,
-    python_requires='>=3.6',
+    python_requires=">=3.6",
 )
diff --git a/src/pip/__main__.py b/src/pip/__main__.py
index 1005489f3..063fd1aac 100644
--- a/src/pip/__main__.py
+++ b/src/pip/__main__.py
@@ -5,12 +5,12 @@ import sys
 # of sys.path, if present to avoid using current directory
 # in pip commands check, freeze, install, list and show,
 # when invoked as python -m pip 
-if sys.path[0] in ('', os.getcwd()):
+if sys.path[0] in ("", os.getcwd()):
     sys.path.pop(0)
 
 # If we are running from a wheel, add the wheel to sys.path
 # This allows the usage python pip-*.whl/pip install pip-*.whl
-if __package__ == '':
+if __package__ == "":
     # __file__ is pip-*.whl/pip/__main__.py
     # first dirname call strips of '/__main__.py', second strips off '/pip'
     # Resulting path is the name of the wheel itself
@@ -20,5 +20,5 @@ if __package__ == '':
 
 from pip._internal.cli.main import main as _main
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     sys.exit(_main())
diff --git a/src/pip/_internal/build_env.py b/src/pip/_internal/build_env.py
index b1c877cfd..fa22d6377 100644
--- a/src/pip/_internal/build_env.py
+++ b/src/pip/_internal/build_env.py
@@ -6,7 +6,6 @@ import os
 import sys
 import textwrap
 from collections import OrderedDict
-from distutils.sysconfig import get_python_lib
 from sysconfig import get_paths
 from types import TracebackType
 from typing import TYPE_CHECKING, Iterable, List, Optional, Set, Tuple, Type
@@ -15,6 +14,7 @@ from pip._vendor.pkg_resources import Requirement, VersionConflict, WorkingSet
 
 from pip import __file__ as pip_location
 from pip._internal.cli.spinners import open_spinner
+from pip._internal.locations import get_platlib, get_prefixed_libs, get_purelib
 from pip._internal.utils.subprocess import call_subprocess
 from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
 
@@ -34,14 +34,7 @@ class _Prefix:
             'nt' if os.name == 'nt' else 'posix_prefix',
             vars={'base': path, 'platbase': path}
         )['scripts']
-        # Note: prefer distutils' sysconfig to get the
-        # library paths so PyPy is correctly supported.
-        purelib = get_python_lib(plat_specific=False, prefix=path)
-        platlib = get_python_lib(plat_specific=True, prefix=path)
-        if purelib == platlib:
-            self.lib_dirs = [purelib]
-        else:
-            self.lib_dirs = [purelib, platlib]
+        self.lib_dirs = get_prefixed_libs(path)
 
 
 class BuildEnvironment:
@@ -54,10 +47,10 @@ class BuildEnvironment:
             kind=tempdir_kinds.BUILD_ENV, globally_managed=True
         )
 
-        self._prefixes = OrderedDict((
+        self._prefixes = OrderedDict(
             (name, _Prefix(os.path.join(temp_dir.path, name)))
             for name in ('normal', 'overlay')
-        ))
+        )
 
         self._bin_dirs = []  # type: List[str]
         self._lib_dirs = []  # type: List[str]
@@ -69,10 +62,7 @@ class BuildEnvironment:
         # - ensure .pth files are honored
         # - prevent access to system site packages
         system_sites = {
-            os.path.normcase(site) for site in (
-                get_python_lib(plat_specific=False),
-                get_python_lib(plat_specific=True),
-            )
+            os.path.normcase(site) for site in (get_purelib(), get_platlib())
         }
         self._site_dir = os.path.join(temp_dir.path, 'site')
         if not os.path.exists(self._site_dir):
diff --git a/src/pip/_internal/cli/autocompletion.py b/src/pip/_internal/cli/autocompletion.py
index 3b5aa15de..3b1d2ac9b 100644
--- a/src/pip/_internal/cli/autocompletion.py
+++ b/src/pip/_internal/cli/autocompletion.py
@@ -14,17 +14,16 @@ from pip._internal.utils.misc import get_installed_distributions
 
 def autocomplete():
     # type: () -> None
-    """Entry Point for completion of main and subcommand options.
-    """
+    """Entry Point for completion of main and subcommand options."""
     # Don't complete if user hasn't sourced bash_completion file.
-    if 'PIP_AUTO_COMPLETE' not in os.environ:
+    if "PIP_AUTO_COMPLETE" not in os.environ:
         return
-    cwords = os.environ['COMP_WORDS'].split()[1:]
-    cword = int(os.environ['COMP_CWORD'])
+    cwords = os.environ["COMP_WORDS"].split()[1:]
+    cword = int(os.environ["COMP_CWORD"])
     try:
         current = cwords[cword - 1]
     except IndexError:
-        current = ''
+        current = ""
 
     parser = create_main_parser()
     subcommands = list(commands_dict)
@@ -39,19 +38,20 @@ def autocomplete():
     # subcommand options
     if subcommand_name is not None:
         # special case: 'help' subcommand has no options
-        if subcommand_name == 'help':
+        if subcommand_name == "help":
             sys.exit(1)
         # special case: list locally installed dists for show and uninstall
-        should_list_installed = (
-            subcommand_name in ['show', 'uninstall'] and
-            not current.startswith('-')
-        )
+        should_list_installed = not current.startswith("-") and subcommand_name in [
+            "show",
+            "uninstall",
+        ]
         if should_list_installed:
-            installed = []
             lc = current.lower()
-            for dist in get_installed_distributions(local_only=True):
-                if dist.key.startswith(lc) and dist.key not in cwords[1:]:
-                    installed.append(dist.key)
+            installed = [
+                dist.key
+                for dist in get_installed_distributions(local_only=True)
+                if dist.key.startswith(lc) and dist.key not in cwords[1:]
+            ]
             # if there are no dists installed, fall back to option completion
             if installed:
                 for dist in installed:
@@ -66,13 +66,15 @@ def autocomplete():
                     options.append((opt_str, opt.nargs))
 
         # filter out previously specified options from available options
-        prev_opts = [x.split('=')[0] for x in cwords[1:cword - 1]]
+        prev_opts = [x.split("=")[0] for x in cwords[1 : cword - 1]]
         options = [(x, v) for (x, v) in options if x not in prev_opts]
         # filter options by current input
         options = [(k, v) for k, v in options if k.startswith(current)]
         # get completion type given cwords and available subcommand options
         completion_type = get_path_completion_type(
-            cwords, cword, subcommand.parser.option_list_all,
+            cwords,
+            cword,
+            subcommand.parser.option_list_all,
         )
         # get completion files and directories if ``completion_type`` is
         # ````, ```` or ````
@@ -83,7 +85,7 @@ def autocomplete():
             opt_label = option[0]
             # append '=' to options which require args
             if option[1] and option[0][:2] == "--":
-                opt_label += '='
+                opt_label += "="
             print(opt_label)
     else:
         # show main parser options only when necessary
@@ -91,19 +93,17 @@ def autocomplete():
         opts = [i.option_list for i in parser.option_groups]
         opts.append(parser.option_list)
         flattened_opts = chain.from_iterable(opts)
-        if current.startswith('-'):
+        if current.startswith("-"):
             for opt in flattened_opts:
                 if opt.help != optparse.SUPPRESS_HELP:
                     subcommands += opt._long_opts + opt._short_opts
         else:
             # get completion type given cwords and all available options
-            completion_type = get_path_completion_type(cwords, cword,
-                                                       flattened_opts)
+            completion_type = get_path_completion_type(cwords, cword, flattened_opts)
             if completion_type:
-                subcommands = list(auto_complete_paths(current,
-                                                       completion_type))
+                subcommands = list(auto_complete_paths(current, completion_type))
 
-        print(' '.join([x for x in subcommands if x.startswith(current)]))
+        print(" ".join([x for x in subcommands if x.startswith(current)]))
     sys.exit(1)
 
 
@@ -116,16 +116,16 @@ def get_path_completion_type(cwords, cword, opts):
     :param opts: The available options to check
     :return: path completion type (``file``, ``dir``, ``path`` or None)
     """
-    if cword < 2 or not cwords[cword - 2].startswith('-'):
+    if cword < 2 or not cwords[cword - 2].startswith("-"):
         return None
     for opt in opts:
         if opt.help == optparse.SUPPRESS_HELP:
             continue
-        for o in str(opt).split('/'):
-            if cwords[cword - 2].split('=')[0] == o:
+        for o in str(opt).split("/"):
+            if cwords[cword - 2].split("=")[0] == o:
                 if not opt.metavar or any(
-                        x in ('path', 'file', 'dir')
-                        for x in opt.metavar.split('/')):
+                    x in ("path", "file", "dir") for x in opt.metavar.split("/")
+                ):
                     return opt.metavar
     return None
 
@@ -147,15 +147,16 @@ def auto_complete_paths(current, completion_type):
         return
     filename = os.path.normcase(filename)
     # list all files that start with ``filename``
-    file_list = (x for x in os.listdir(current_path)
-                 if os.path.normcase(x).startswith(filename))
+    file_list = (
+        x for x in os.listdir(current_path) if os.path.normcase(x).startswith(filename)
+    )
     for f in file_list:
         opt = os.path.join(current_path, f)
         comp_file = os.path.normcase(os.path.join(directory, f))
         # complete regular files when there is not ```` after option
         # complete directories when there is ````, ```` or
         # ````after option
-        if completion_type != 'dir' and os.path.isfile(opt):
+        if completion_type != "dir" and os.path.isfile(opt):
             yield comp_file
         elif os.path.isdir(opt):
-            yield os.path.join(comp_file, '')
+            yield os.path.join(comp_file, "")
diff --git a/src/pip/_internal/cli/base_command.py b/src/pip/_internal/cli/base_command.py
index 87944e495..2dc9845c2 100644
--- a/src/pip/_internal/cli/base_command.py
+++ b/src/pip/_internal/cli/base_command.py
@@ -34,7 +34,7 @@ from pip._internal.utils.temp_dir import TempDirectoryTypeRegistry as TempDirReg
 from pip._internal.utils.temp_dir import global_tempdir_manager, tempdir_registry
 from pip._internal.utils.virtualenv import running_under_virtualenv
 
-__all__ = ['Command']
+__all__ = ["Command"]
 
 logger = logging.getLogger(__name__)
 
@@ -51,7 +51,7 @@ class Command(CommandContextMixIn):
         self.summary = summary
         self.parser = ConfigOptionParser(
             usage=self.usage,
-            prog=f'{get_prog()} {name}',
+            prog=f"{get_prog()} {name}",
             formatter=UpdatingDefaultsHelpFormatter(),
             add_help_option=False,
             name=name,
@@ -62,7 +62,7 @@ class Command(CommandContextMixIn):
         self.tempdir_registry = None  # type: Optional[TempDirRegistry]
 
         # Commands should add options to this option group
-        optgroup_name = f'{self.name.capitalize()} Options'
+        optgroup_name = f"{self.name.capitalize()} Options"
         self.cmd_opts = optparse.OptionGroup(self.parser, optgroup_name)
 
         # Add the general options
@@ -86,7 +86,7 @@ class Command(CommandContextMixIn):
         """
         # Make sure we do the pip version check if the index_group options
         # are present.
-        assert not hasattr(options, 'no_index')
+        assert not hasattr(options, "no_index")
 
     def run(self, options, args):
         # type: (Values, List[Any]) -> int
@@ -131,17 +131,15 @@ class Command(CommandContextMixIn):
         #       This also affects isolated builds and it should.
 
         if options.no_input:
-            os.environ['PIP_NO_INPUT'] = '1'
+            os.environ["PIP_NO_INPUT"] = "1"
 
         if options.exists_action:
-            os.environ['PIP_EXISTS_ACTION'] = ' '.join(options.exists_action)
+            os.environ["PIP_EXISTS_ACTION"] = " ".join(options.exists_action)
 
         if options.require_venv and not self.ignore_require_venv:
             # If a venv is required check if it can really be found
             if not running_under_virtualenv():
-                logger.critical(
-                    'Could not find an activated virtualenv (required).'
-                )
+                logger.critical("Could not find an activated virtualenv (required).")
                 sys.exit(VIRTUALENV_NOT_FOUND)
 
         if options.cache_dir:
@@ -151,8 +149,8 @@ class Command(CommandContextMixIn):
                     "The directory '%s' or its parent directory is not owned "
                     "or is not writable by the current user. The cache "
                     "has been disabled. Check the permissions and owner of "
-                    "that directory. If executing pip with sudo, you may want "
-                    "sudo's -H flag.",
+                    "that directory. If executing pip with sudo, you should "
+                    "use sudo's -H flag.",
                     options.cache_dir,
                 )
                 options.cache_dir = None
@@ -171,7 +169,7 @@ class Command(CommandContextMixIn):
                 issue=8333,
             )
 
-        if '2020-resolver' in options.features_enabled:
+        if "2020-resolver" in options.features_enabled:
             logger.warning(
                 "--use-feature=2020-resolver no longer has any effect, "
                 "since it is now the default dependency resolver in pip. "
@@ -184,35 +182,39 @@ class Command(CommandContextMixIn):
             return status
         except PreviousBuildDirError as exc:
             logger.critical(str(exc))
-            logger.debug('Exception information:', exc_info=True)
+            logger.debug("Exception information:", exc_info=True)
 
             return PREVIOUS_BUILD_DIR_ERROR
-        except (InstallationError, UninstallationError, BadCommand,
-                NetworkConnectionError) as exc:
+        except (
+            InstallationError,
+            UninstallationError,
+            BadCommand,
+            NetworkConnectionError,
+        ) as exc:
             logger.critical(str(exc))
-            logger.debug('Exception information:', exc_info=True)
+            logger.debug("Exception information:", exc_info=True)
 
             return ERROR
         except CommandError as exc:
-            logger.critical('%s', exc)
-            logger.debug('Exception information:', exc_info=True)
+            logger.critical("%s", exc)
+            logger.debug("Exception information:", exc_info=True)
 
             return ERROR
         except BrokenStdoutLoggingError:
             # Bypass our logger and write any remaining messages to stderr
             # because stdout no longer works.
-            print('ERROR: Pipe to stdout was broken', file=sys.stderr)
+            print("ERROR: Pipe to stdout was broken", file=sys.stderr)
             if level_number <= logging.DEBUG:
                 traceback.print_exc(file=sys.stderr)
 
             return ERROR
         except KeyboardInterrupt:
-            logger.critical('Operation cancelled by user')
-            logger.debug('Exception information:', exc_info=True)
+            logger.critical("Operation cancelled by user")
+            logger.debug("Exception information:", exc_info=True)
 
             return ERROR
         except BaseException:
-            logger.critical('Exception:', exc_info=True)
+            logger.critical("Exception:", exc_info=True)
 
             return UNKNOWN_ERROR
         finally:
diff --git a/src/pip/_internal/cli/cmdoptions.py b/src/pip/_internal/cli/cmdoptions.py
index 0a7e28685..24dc9d141 100644
--- a/src/pip/_internal/cli/cmdoptions.py
+++ b/src/pip/_internal/cli/cmdoptions.py
@@ -41,8 +41,8 @@ def raise_option_error(parser, option, msg):
       option: an Option instance.
       msg: the error text.
     """
-    msg = f'{option} error: {msg}'
-    msg = textwrap.fill(' '.join(msg.split()))
+    msg = f"{option} error: {msg}"
+    msg = textwrap.fill(" ".join(msg.split()))
     parser.error(msg)
 
 
@@ -53,8 +53,8 @@ def make_option_group(group, parser):
     group  -- assumed to be dict with 'name' and 'options' keys
     parser -- an optparse Parser
     """
-    option_group = OptionGroup(parser, group['name'])
-    for option in group['options']:
+    option_group = OptionGroup(parser, group["name"])
+    for option in group["options"]:
         option_group.add_option(option())
     return option_group
 
@@ -73,13 +73,15 @@ def check_install_build_global(options, check_options=None):
     def getname(n):
         # type: (str) -> Optional[Any]
         return getattr(check_options, n, None)
+
     names = ["build_options", "global_options", "install_options"]
     if any(map(getname, names)):
         control = options.format_control
         control.disallow_binaries()
         warnings.warn(
-            'Disabling all use of wheels due to the use of --build-option '
-            '/ --global-option / --install-option.', stacklevel=2,
+            "Disabling all use of wheels due to the use of --build-option "
+            "/ --global-option / --install-option.",
+            stacklevel=2,
         )
 
 
@@ -90,17 +92,18 @@ def check_dist_restriction(options, check_target=False):
     :param options: The OptionParser options.
     :param check_target: Whether or not to check if --target is being used.
     """
-    dist_restriction_set = any([
-        options.python_version,
-        options.platforms,
-        options.abis,
-        options.implementation,
-    ])
+    dist_restriction_set = any(
+        [
+            options.python_version,
+            options.platforms,
+            options.abis,
+            options.implementation,
+        ]
+    )
 
-    binary_only = FormatControl(set(), {':all:'})
+    binary_only = FormatControl(set(), {":all:"})
     sdist_dependencies_allowed = (
-        options.format_control != binary_only and
-        not options.ignore_dependencies
+        options.format_control != binary_only and not options.ignore_dependencies
     )
 
     # Installations or downloads using dist restrictions must not combine
@@ -146,10 +149,11 @@ class PipOption(Option):
 
 help_ = partial(
     Option,
-    '-h', '--help',
-    dest='help',
-    action='help',
-    help='Show help.',
+    "-h",
+    "--help",
+    dest="help",
+    action="help",
+    help="Show help.",
 )  # type: Callable[..., Option]
 
 isolated_mode = partial(
@@ -167,111 +171,119 @@ isolated_mode = partial(
 require_virtualenv = partial(
     Option,
     # Run only if inside a virtualenv, bail if not.
-    '--require-virtualenv', '--require-venv',
-    dest='require_venv',
-    action='store_true',
+    "--require-virtualenv",
+    "--require-venv",
+    dest="require_venv",
+    action="store_true",
     default=False,
-    help=SUPPRESS_HELP
+    help=SUPPRESS_HELP,
 )  # type: Callable[..., Option]
 
 verbose = partial(
     Option,
-    '-v', '--verbose',
-    dest='verbose',
-    action='count',
+    "-v",
+    "--verbose",
+    dest="verbose",
+    action="count",
     default=0,
-    help='Give more output. Option is additive, and can be used up to 3 times.'
+    help="Give more output. Option is additive, and can be used up to 3 times.",
 )  # type: Callable[..., Option]
 
 no_color = partial(
     Option,
-    '--no-color',
-    dest='no_color',
-    action='store_true',
+    "--no-color",
+    dest="no_color",
+    action="store_true",
     default=False,
     help="Suppress colored output.",
 )  # type: Callable[..., Option]
 
 version = partial(
     Option,
-    '-V', '--version',
-    dest='version',
-    action='store_true',
-    help='Show version and exit.',
+    "-V",
+    "--version",
+    dest="version",
+    action="store_true",
+    help="Show version and exit.",
 )  # type: Callable[..., Option]
 
 quiet = partial(
     Option,
-    '-q', '--quiet',
-    dest='quiet',
-    action='count',
+    "-q",
+    "--quiet",
+    dest="quiet",
+    action="count",
     default=0,
     help=(
-        'Give less output. Option is additive, and can be used up to 3'
-        ' times (corresponding to WARNING, ERROR, and CRITICAL logging'
-        ' levels).'
+        "Give less output. Option is additive, and can be used up to 3"
+        " times (corresponding to WARNING, ERROR, and CRITICAL logging"
+        " levels)."
     ),
 )  # type: Callable[..., Option]
 
 progress_bar = partial(
     Option,
-    '--progress-bar',
-    dest='progress_bar',
-    type='choice',
+    "--progress-bar",
+    dest="progress_bar",
+    type="choice",
     choices=list(BAR_TYPES.keys()),
-    default='on',
+    default="on",
     help=(
-        'Specify type of progress to be displayed [' +
-        '|'.join(BAR_TYPES.keys()) + '] (default: %default)'
+        "Specify type of progress to be displayed ["
+        + "|".join(BAR_TYPES.keys())
+        + "] (default: %default)"
     ),
 )  # type: Callable[..., Option]
 
 log = partial(
     PipOption,
-    "--log", "--log-file", "--local-log",
+    "--log",
+    "--log-file",
+    "--local-log",
     dest="log",
     metavar="path",
     type="path",
-    help="Path to a verbose appending log."
+    help="Path to a verbose appending log.",
 )  # type: Callable[..., Option]
 
 no_input = partial(
     Option,
     # Don't ask for input
-    '--no-input',
-    dest='no_input',
-    action='store_true',
+    "--no-input",
+    dest="no_input",
+    action="store_true",
     default=False,
-    help="Disable prompting for input."
+    help="Disable prompting for input.",
 )  # type: Callable[..., Option]
 
 proxy = partial(
     Option,
-    '--proxy',
-    dest='proxy',
-    type='str',
-    default='',
-    help="Specify a proxy in the form [user:passwd@]proxy.server:port."
+    "--proxy",
+    dest="proxy",
+    type="str",
+    default="",
+    help="Specify a proxy in the form [user:passwd@]proxy.server:port.",
 )  # type: Callable[..., Option]
 
 retries = partial(
     Option,
-    '--retries',
-    dest='retries',
-    type='int',
+    "--retries",
+    dest="retries",
+    type="int",
     default=5,
     help="Maximum number of retries each connection should attempt "
-         "(default %default times).",
+    "(default %default times).",
 )  # type: Callable[..., Option]
 
 timeout = partial(
     Option,
-    '--timeout', '--default-timeout',
-    metavar='sec',
-    dest='timeout',
-    type='float',
+    "--timeout",
+    "--default-timeout",
+    metavar="sec",
+    dest="timeout",
+    type="float",
     default=15,
-    help='Set the socket timeout (default %default seconds).',
+    help="Set the socket timeout (default %default seconds).",
 )  # type: Callable[..., Option]
 
 
@@ -279,88 +291,96 @@ def exists_action():
     # type: () -> Option
     return Option(
         # Option when path already exist
-        '--exists-action',
-        dest='exists_action',
-        type='choice',
-        choices=['s', 'i', 'w', 'b', 'a'],
+        "--exists-action",
+        dest="exists_action",
+        type="choice",
+        choices=["s", "i", "w", "b", "a"],
         default=[],
-        action='append',
-        metavar='action',
+        action="append",
+        metavar="action",
         help="Default action when a path already exists: "
-             "(s)witch, (i)gnore, (w)ipe, (b)ackup, (a)bort.",
+        "(s)witch, (i)gnore, (w)ipe, (b)ackup, (a)bort.",
     )
 
 
 cert = partial(
     PipOption,
-    '--cert',
-    dest='cert',
-    type='path',
-    metavar='path',
-    help="Path to alternate CA bundle.",
+    "--cert",
+    dest="cert",
+    type="path",
+    metavar="path",
+    help=(
+        "Path to PEM-encoded CA certificate bundle. "
+        "If provided, overrides the default. "
+        "See 'SSL Certificate Verification' in pip documentation "
+        "for more information."
+    ),
 )  # type: Callable[..., Option]
 
 client_cert = partial(
     PipOption,
-    '--client-cert',
-    dest='client_cert',
-    type='path',
+    "--client-cert",
+    dest="client_cert",
+    type="path",
     default=None,
-    metavar='path',
+    metavar="path",
     help="Path to SSL client certificate, a single file containing the "
-         "private key and the certificate in PEM format.",
+    "private key and the certificate in PEM format.",
 )  # type: Callable[..., Option]
 
 index_url = partial(
     Option,
-    '-i', '--index-url', '--pypi-url',
-    dest='index_url',
-    metavar='URL',
+    "-i",
+    "--index-url",
+    "--pypi-url",
+    dest="index_url",
+    metavar="URL",
     default=PyPI.simple_url,
     help="Base URL of the Python Package Index (default %default). "
-         "This should point to a repository compliant with PEP 503 "
-         "(the simple repository API) or a local directory laid out "
-         "in the same format.",
+    "This should point to a repository compliant with PEP 503 "
+    "(the simple repository API) or a local directory laid out "
+    "in the same format.",
 )  # type: Callable[..., Option]
 
 
 def extra_index_url():
     # type: () -> Option
     return Option(
-        '--extra-index-url',
-        dest='extra_index_urls',
-        metavar='URL',
-        action='append',
+        "--extra-index-url",
+        dest="extra_index_urls",
+        metavar="URL",
+        action="append",
         default=[],
         help="Extra URLs of package indexes to use in addition to "
-             "--index-url. Should follow the same rules as "
-             "--index-url.",
+        "--index-url. Should follow the same rules as "
+        "--index-url.",
     )
 
 
 no_index = partial(
     Option,
-    '--no-index',
-    dest='no_index',
-    action='store_true',
+    "--no-index",
+    dest="no_index",
+    action="store_true",
     default=False,
-    help='Ignore package index (only looking at --find-links URLs instead).',
+    help="Ignore package index (only looking at --find-links URLs instead).",
 )  # type: Callable[..., Option]
 
 
 def find_links():
     # type: () -> Option
     return Option(
-        '-f', '--find-links',
-        dest='find_links',
-        action='append',
+        "-f",
+        "--find-links",
+        dest="find_links",
+        action="append",
         default=[],
-        metavar='url',
+        metavar="url",
         help="If a URL or path to an html file, then parse for links to "
-             "archives such as sdist (.tar.gz) or wheel (.whl) files. "
-             "If a local path or file:// URL that's a directory,  "
-             "then look for archives in the directory listing. "
-             "Links to VCS project URLs are not supported.",
+        "archives such as sdist (.tar.gz) or wheel (.whl) files. "
+        "If a local path or file:// URL that's a directory,  "
+        "then look for archives in the directory listing. "
+        "Links to VCS project URLs are not supported.",
     )
 
 
@@ -373,46 +393,51 @@ def trusted_host():
         metavar="HOSTNAME",
         default=[],
         help="Mark this host or host:port pair as trusted, even though it "
-             "does not have valid or any HTTPS.",
+        "does not have valid or any HTTPS.",
     )
 
 
 def constraints():
     # type: () -> Option
     return Option(
-        '-c', '--constraint',
-        dest='constraints',
-        action='append',
+        "-c",
+        "--constraint",
+        dest="constraints",
+        action="append",
         default=[],
-        metavar='file',
-        help='Constrain versions using the given constraints file. '
-        'This option can be used multiple times.'
+        metavar="file",
+        help="Constrain versions using the given constraints file. "
+        "This option can be used multiple times.",
     )
 
 
 def requirements():
     # type: () -> Option
     return Option(
-        '-r', '--requirement',
-        dest='requirements',
-        action='append',
+        "-r",
+        "--requirement",
+        dest="requirements",
+        action="append",
         default=[],
-        metavar='file',
-        help='Install from the given requirements file. '
-        'This option can be used multiple times.'
+        metavar="file",
+        help="Install from the given requirements file. "
+        "This option can be used multiple times.",
     )
 
 
 def editable():
     # type: () -> Option
     return Option(
-        '-e', '--editable',
-        dest='editables',
-        action='append',
+        "-e",
+        "--editable",
+        dest="editables",
+        action="append",
         default=[],
-        metavar='path/url',
-        help=('Install a project in editable mode (i.e. setuptools '
-              '"develop mode") from a local project path or a VCS url.'),
+        metavar="path/url",
+        help=(
+            "Install a project in editable mode (i.e. setuptools "
+            '"develop mode") from a local project path or a VCS url.'
+        ),
     )
 
 
@@ -424,16 +449,19 @@ def _handle_src(option, opt_str, value, parser):
 
 src = partial(
     PipOption,
-    '--src', '--source', '--source-dir', '--source-directory',
-    dest='src_dir',
-    type='path',
-    metavar='dir',
+    "--src",
+    "--source",
+    "--source-dir",
+    "--source-directory",
+    dest="src_dir",
+    type="path",
+    metavar="dir",
     default=get_src_prefix(),
-    action='callback',
+    action="callback",
     callback=_handle_src,
-    help='Directory to check out editable projects into. '
+    help="Directory to check out editable projects into. "
     'The default in a virtualenv is "/src". '
-    'The default for global installs is "/src".'
+    'The default for global installs is "/src".',
 )  # type: Callable[..., Option]
 
 
@@ -447,7 +475,9 @@ def _handle_no_binary(option, opt_str, value, parser):
     # type: (Option, str, str, OptionParser) -> None
     existing = _get_format_control(parser.values, option)
     FormatControl.handle_mutual_excludes(
-        value, existing.no_binary, existing.only_binary,
+        value,
+        existing.no_binary,
+        existing.only_binary,
     )
 
 
@@ -455,7 +485,9 @@ def _handle_only_binary(option, opt_str, value, parser):
     # type: (Option, str, str, OptionParser) -> None
     existing = _get_format_control(parser.values, option)
     FormatControl.handle_mutual_excludes(
-        value, existing.only_binary, existing.no_binary,
+        value,
+        existing.only_binary,
+        existing.no_binary,
     )
 
 
@@ -463,15 +495,18 @@ def no_binary():
     # type: () -> Option
     format_control = FormatControl(set(), set())
     return Option(
-        "--no-binary", dest="format_control", action="callback",
-        callback=_handle_no_binary, type="str",
+        "--no-binary",
+        dest="format_control",
+        action="callback",
+        callback=_handle_no_binary,
+        type="str",
         default=format_control,
-        help='Do not use binary packages. Can be supplied multiple times, and '
-             'each time adds to the existing value. Accepts either ":all:" to '
-             'disable all binary packages, ":none:" to empty the set (notice '
-             'the colons), or one or more package names with commas between '
-             'them (no colons). Note that some packages are tricky to compile '
-             'and may fail to install when this option is used on them.',
+        help="Do not use binary packages. Can be supplied multiple times, and "
+        'each time adds to the existing value. Accepts either ":all:" to '
+        'disable all binary packages, ":none:" to empty the set (notice '
+        "the colons), or one or more package names with commas between "
+        "them (no colons). Note that some packages are tricky to compile "
+        "and may fail to install when this option is used on them.",
     )
 
 
@@ -479,28 +514,33 @@ def only_binary():
     # type: () -> Option
     format_control = FormatControl(set(), set())
     return Option(
-        "--only-binary", dest="format_control", action="callback",
-        callback=_handle_only_binary, type="str",
+        "--only-binary",
+        dest="format_control",
+        action="callback",
+        callback=_handle_only_binary,
+        type="str",
         default=format_control,
-        help='Do not use source packages. Can be supplied multiple times, and '
-             'each time adds to the existing value. Accepts either ":all:" to '
-             'disable all source packages, ":none:" to empty the set, or one '
-             'or more package names with commas between them. Packages '
-             'without binary distributions will fail to install when this '
-             'option is used on them.',
+        help="Do not use source packages. Can be supplied multiple times, and "
+        'each time adds to the existing value. Accepts either ":all:" to '
+        'disable all source packages, ":none:" to empty the set, or one '
+        "or more package names with commas between them. Packages "
+        "without binary distributions will fail to install when this "
+        "option is used on them.",
     )
 
 
 platforms = partial(
     Option,
-    '--platform',
-    dest='platforms',
-    metavar='platform',
-    action='append',
+    "--platform",
+    dest="platforms",
+    metavar="platform",
+    action="append",
     default=None,
-    help=("Only use wheels compatible with . Defaults to the "
-          "platform of the running system. Use this option multiple times to "
-          "specify multiple platforms supported by the target interpreter."),
+    help=(
+        "Only use wheels compatible with . Defaults to the "
+        "platform of the running system. Use this option multiple times to "
+        "specify multiple platforms supported by the target interpreter."
+    ),
 )  # type: Callable[..., Option]
 
 
@@ -517,9 +557,9 @@ def _convert_python_version(value):
         # The empty string is the same as not providing a value.
         return (None, None)
 
-    parts = value.split('.')
+    parts = value.split(".")
     if len(parts) > 3:
-        return ((), 'at most three version parts are allowed')
+        return ((), "at most three version parts are allowed")
 
     if len(parts) == 1:
         # Then we are in the case of "3" or "37".
@@ -530,7 +570,7 @@ def _convert_python_version(value):
     try:
         version_info = tuple(int(part) for part in parts)
     except ValueError:
-        return ((), 'each version part must be an integer')
+        return ((), "each version part must be an integer")
 
     return (version_info, None)
 
@@ -542,10 +582,9 @@ def _handle_python_version(option, opt_str, value, parser):
     """
     version_info, error_msg = _convert_python_version(value)
     if error_msg is not None:
-        msg = (
-            'invalid --python-version value: {!r}: {}'.format(
-                value, error_msg,
-            )
+        msg = "invalid --python-version value: {!r}: {}".format(
+            value,
+            error_msg,
         )
         raise_option_error(parser, option=option, msg=msg)
 
@@ -554,49 +593,56 @@ def _handle_python_version(option, opt_str, value, parser):
 
 python_version = partial(
     Option,
-    '--python-version',
-    dest='python_version',
-    metavar='python_version',
-    action='callback',
-    callback=_handle_python_version, type='str',
+    "--python-version",
+    dest="python_version",
+    metavar="python_version",
+    action="callback",
+    callback=_handle_python_version,
+    type="str",
     default=None,
-    help=dedent("""\
+    help=dedent(
+        """\
     The Python interpreter version to use for wheel and "Requires-Python"
     compatibility checks. Defaults to a version derived from the running
     interpreter. The version can be specified using up to three dot-separated
     integers (e.g. "3" for 3.0.0, "3.7" for 3.7.0, or "3.7.3"). A major-minor
     version can also be given as a string without dots (e.g. "37" for 3.7.0).
-    """),
+    """
+    ),
 )  # type: Callable[..., Option]
 
 
 implementation = partial(
     Option,
-    '--implementation',
-    dest='implementation',
-    metavar='implementation',
+    "--implementation",
+    dest="implementation",
+    metavar="implementation",
     default=None,
-    help=("Only use wheels compatible with Python "
-          "implementation , e.g. 'pp', 'jy', 'cp', "
-          " or 'ip'. If not specified, then the current "
-          "interpreter implementation is used.  Use 'py' to force "
-          "implementation-agnostic wheels."),
+    help=(
+        "Only use wheels compatible with Python "
+        "implementation , e.g. 'pp', 'jy', 'cp', "
+        " or 'ip'. If not specified, then the current "
+        "interpreter implementation is used.  Use 'py' to force "
+        "implementation-agnostic wheels."
+    ),
 )  # type: Callable[..., Option]
 
 
 abis = partial(
     Option,
-    '--abi',
-    dest='abis',
-    metavar='abi',
-    action='append',
+    "--abi",
+    dest="abis",
+    metavar="abi",
+    action="append",
     default=None,
-    help=("Only use wheels compatible with Python abi , e.g. 'pypy_41'. "
-          "If not specified, then the current interpreter abi tag is used. "
-          "Use this option multiple times to specify multiple abis supported "
-          "by the target interpreter. Generally you will need to specify "
-          "--implementation, --platform, and --python-version when using this "
-          "option."),
+    help=(
+        "Only use wheels compatible with Python abi , e.g. 'pypy_41'. "
+        "If not specified, then the current interpreter abi tag is used. "
+        "Use this option multiple times to specify multiple abis supported "
+        "by the target interpreter. Generally you will need to specify "
+        "--implementation, --platform, and --python-version when using this "
+        "option."
+    ),
 )  # type: Callable[..., Option]
 
 
@@ -627,7 +673,7 @@ def prefer_binary():
         dest="prefer_binary",
         action="store_true",
         default=False,
-        help="Prefer older binary packages over newer source packages."
+        help="Prefer older binary packages over newer source packages.",
     )
 
 
@@ -637,8 +683,8 @@ cache_dir = partial(
     dest="cache_dir",
     default=USER_CACHE_DIR,
     metavar="dir",
-    type='path',
-    help="Store the cache data in ."
+    type="path",
+    help="Store the cache data in .",
 )  # type: Callable[..., Option]
 
 
@@ -681,39 +727,43 @@ no_cache = partial(
 
 no_deps = partial(
     Option,
-    '--no-deps', '--no-dependencies',
-    dest='ignore_dependencies',
-    action='store_true',
+    "--no-deps",
+    "--no-dependencies",
+    dest="ignore_dependencies",
+    action="store_true",
     default=False,
     help="Don't install package dependencies.",
 )  # type: Callable[..., Option]
 
 build_dir = partial(
     PipOption,
-    '-b', '--build', '--build-dir', '--build-directory',
-    dest='build_dir',
-    type='path',
-    metavar='dir',
+    "-b",
+    "--build",
+    "--build-dir",
+    "--build-directory",
+    dest="build_dir",
+    type="path",
+    metavar="dir",
     help=SUPPRESS_HELP,
 )  # type: Callable[..., Option]
 
 ignore_requires_python = partial(
     Option,
-    '--ignore-requires-python',
-    dest='ignore_requires_python',
-    action='store_true',
-    help='Ignore the Requires-Python information.'
+    "--ignore-requires-python",
+    dest="ignore_requires_python",
+    action="store_true",
+    help="Ignore the Requires-Python information.",
 )  # type: Callable[..., Option]
 
 no_build_isolation = partial(
     Option,
-    '--no-build-isolation',
-    dest='build_isolation',
-    action='store_false',
+    "--no-build-isolation",
+    dest="build_isolation",
+    action="store_false",
     default=True,
-    help='Disable isolation when building a modern source distribution. '
-         'Build dependencies specified by PEP 518 must be already installed '
-         'if this option is used.'
+    help="Disable isolation when building a modern source distribution. "
+    "Build dependencies specified by PEP 518 must be already installed "
+    "if this option is used.",
 )  # type: Callable[..., Option]
 
 
@@ -743,62 +793,62 @@ def _handle_no_use_pep517(option, opt, value, parser):
 
 use_pep517 = partial(
     Option,
-    '--use-pep517',
-    dest='use_pep517',
-    action='store_true',
+    "--use-pep517",
+    dest="use_pep517",
+    action="store_true",
     default=None,
-    help='Use PEP 517 for building source distributions '
-         '(use --no-use-pep517 to force legacy behaviour).'
+    help="Use PEP 517 for building source distributions "
+    "(use --no-use-pep517 to force legacy behaviour).",
 )  # type: Any
 
 no_use_pep517 = partial(
     Option,
-    '--no-use-pep517',
-    dest='use_pep517',
-    action='callback',
+    "--no-use-pep517",
+    dest="use_pep517",
+    action="callback",
     callback=_handle_no_use_pep517,
     default=None,
-    help=SUPPRESS_HELP
+    help=SUPPRESS_HELP,
 )  # type: Any
 
 install_options = partial(
     Option,
-    '--install-option',
-    dest='install_options',
-    action='append',
-    metavar='options',
+    "--install-option",
+    dest="install_options",
+    action="append",
+    metavar="options",
     help="Extra arguments to be supplied to the setup.py install "
-         "command (use like --install-option=\"--install-scripts=/usr/local/"
-         "bin\"). Use multiple --install-option options to pass multiple "
-         "options to setup.py install. If you are using an option with a "
-         "directory path, be sure to use absolute path.",
+    'command (use like --install-option="--install-scripts=/usr/local/'
+    'bin"). Use multiple --install-option options to pass multiple '
+    "options to setup.py install. If you are using an option with a "
+    "directory path, be sure to use absolute path.",
 )  # type: Callable[..., Option]
 
 global_options = partial(
     Option,
-    '--global-option',
-    dest='global_options',
-    action='append',
-    metavar='options',
+    "--global-option",
+    dest="global_options",
+    action="append",
+    metavar="options",
     help="Extra global options to be supplied to the setup.py "
-         "call before the install command.",
+    "call before the install command.",
 )  # type: Callable[..., Option]
 
 no_clean = partial(
     Option,
-    '--no-clean',
-    action='store_true',
+    "--no-clean",
+    action="store_true",
     default=False,
-    help="Don't clean up build directories."
+    help="Don't clean up build directories.",
 )  # type: Callable[..., Option]
 
 pre = partial(
     Option,
-    '--pre',
-    action='store_true',
+    "--pre",
+    action="store_true",
     default=False,
     help="Include pre-release and development versions. By default, "
-         "pip only finds stable versions.",
+    "pip only finds stable versions.",
 )  # type: Callable[..., Option]
 
 disable_pip_version_check = partial(
@@ -808,7 +858,7 @@ disable_pip_version_check = partial(
     action="store_true",
     default=False,
     help="Don't periodically check PyPI to determine whether a new version "
-         "of pip is available for download. Implied with --no-index.",
+    "of pip is available for download. Implied with --no-index.",
 )  # type: Callable[..., Option]
 
 
@@ -819,105 +869,106 @@ def _handle_merge_hash(option, opt_str, value, parser):
     if not parser.values.hashes:
         parser.values.hashes = {}
     try:
-        algo, digest = value.split(':', 1)
+        algo, digest = value.split(":", 1)
     except ValueError:
-        parser.error('Arguments to {} must be a hash name '  # noqa
-                     'followed by a value, like --hash=sha256:'
-                     'abcde...'.format(opt_str))
+        parser.error(
+            "Arguments to {} must be a hash name "  # noqa
+            "followed by a value, like --hash=sha256:"
+            "abcde...".format(opt_str)
+        )
     if algo not in STRONG_HASHES:
-        parser.error('Allowed hash algorithms for {} are {}.'.format(  # noqa
-                     opt_str, ', '.join(STRONG_HASHES)))
+        parser.error(
+            "Allowed hash algorithms for {} are {}.".format(  # noqa
+                opt_str, ", ".join(STRONG_HASHES)
+            )
+        )
     parser.values.hashes.setdefault(algo, []).append(digest)
 
 
 hash = partial(
     Option,
-    '--hash',
+    "--hash",
     # Hash values eventually end up in InstallRequirement.hashes due to
     # __dict__ copying in process_line().
-    dest='hashes',
-    action='callback',
+    dest="hashes",
+    action="callback",
     callback=_handle_merge_hash,
-    type='string',
+    type="string",
     help="Verify that the package's archive matches this "
-         'hash before installing. Example: --hash=sha256:abcdef...',
+    "hash before installing. Example: --hash=sha256:abcdef...",
 )  # type: Callable[..., Option]
 
 
 require_hashes = partial(
     Option,
-    '--require-hashes',
-    dest='require_hashes',
-    action='store_true',
+    "--require-hashes",
+    dest="require_hashes",
+    action="store_true",
     default=False,
-    help='Require a hash to check each requirement against, for '
-         'repeatable installs. This option is implied when any package in a '
-         'requirements file has a --hash option.',
+    help="Require a hash to check each requirement against, for "
+    "repeatable installs. This option is implied when any package in a "
+    "requirements file has a --hash option.",
 )  # type: Callable[..., Option]
 
 
 list_path = partial(
     PipOption,
-    '--path',
-    dest='path',
-    type='path',
-    action='append',
-    help='Restrict to the specified installation path for listing '
-         'packages (can be used multiple times).'
+    "--path",
+    dest="path",
+    type="path",
+    action="append",
+    help="Restrict to the specified installation path for listing "
+    "packages (can be used multiple times).",
 )  # type: Callable[..., Option]
 
 
 def check_list_path_option(options):
     # type: (Values) -> None
     if options.path and (options.user or options.local):
-        raise CommandError(
-            "Cannot combine '--path' with '--user' or '--local'"
-        )
+        raise CommandError("Cannot combine '--path' with '--user' or '--local'")
 
 
 list_exclude = partial(
     PipOption,
-    '--exclude',
-    dest='excludes',
-    action='append',
-    metavar='package',
-    type='package_name',
+    "--exclude",
+    dest="excludes",
+    action="append",
+    metavar="package",
+    type="package_name",
     help="Exclude specified package from the output",
 )  # type: Callable[..., Option]
 
 
 no_python_version_warning = partial(
     Option,
-    '--no-python-version-warning',
-    dest='no_python_version_warning',
-    action='store_true',
+    "--no-python-version-warning",
+    dest="no_python_version_warning",
+    action="store_true",
     default=False,
-    help='Silence deprecation warnings for upcoming unsupported Pythons.',
+    help="Silence deprecation warnings for upcoming unsupported Pythons.",
 )  # type: Callable[..., Option]
 
 
 use_new_feature = partial(
     Option,
-    '--use-feature',
-    dest='features_enabled',
-    metavar='feature',
-    action='append',
+    "--use-feature",
+    dest="features_enabled",
+    metavar="feature",
+    action="append",
     default=[],
-    choices=['2020-resolver', 'fast-deps'],
-    help='Enable new functionality, that may be backward incompatible.',
+    choices=["2020-resolver", "fast-deps", "in-tree-build"],
+    help="Enable new functionality, that may be backward incompatible.",
 )  # type: Callable[..., Option]
 
 use_deprecated_feature = partial(
     Option,
-    '--use-deprecated',
-    dest='deprecated_features_enabled',
-    metavar='feature',
-    action='append',
+    "--use-deprecated",
+    dest="deprecated_features_enabled",
+    metavar="feature",
+    action="append",
     default=[],
-    choices=['legacy-resolver'],
-    help=(
-        'Enable deprecated functionality, that will be removed in the future.'
-    ),
+    choices=["legacy-resolver"],
+    help=("Enable deprecated functionality, that will be removed in the future."),
 )  # type: Callable[..., Option]
 
 
@@ -926,8 +977,8 @@ use_deprecated_feature = partial(
 ##########
 
 general_group = {
-    'name': 'General Options',
-    'options': [
+    "name": "General Options",
+    "options": [
         help_,
         isolated_mode,
         require_virtualenv,
@@ -950,15 +1001,15 @@ general_group = {
         no_python_version_warning,
         use_new_feature,
         use_deprecated_feature,
-    ]
+    ],
 }  # type: Dict[str, Any]
 
 index_group = {
-    'name': 'Package Index Options',
-    'options': [
+    "name": "Package Index Options",
+    "options": [
         index_url,
         extra_index_url,
         no_index,
         find_links,
-    ]
+    ],
 }  # type: Dict[str, Any]
diff --git a/src/pip/_internal/cli/command_context.py b/src/pip/_internal/cli/command_context.py
index 0f7c6afc4..375a2e366 100644
--- a/src/pip/_internal/cli/command_context.py
+++ b/src/pip/_internal/cli/command_context.py
@@ -1,7 +1,7 @@
 from contextlib import ExitStack, contextmanager
 from typing import ContextManager, Iterator, TypeVar
 
-_T = TypeVar('_T', covariant=True)
+_T = TypeVar("_T", covariant=True)
 
 
 class CommandContextMixIn:
diff --git a/src/pip/_internal/cli/main.py b/src/pip/_internal/cli/main.py
index 6f107a2e7..7ae074b59 100644
--- a/src/pip/_internal/cli/main.py
+++ b/src/pip/_internal/cli/main.py
@@ -41,6 +41,7 @@ logger = logging.getLogger(__name__)
 # call to main. As it is not safe to do any processing after calling
 # main, this should not be an issue in practice.
 
+
 def main(args=None):
     # type: (Optional[List[str]]) -> int
     if args is None:
@@ -61,7 +62,7 @@ def main(args=None):
     # Needed for locale.getpreferredencoding(False) to work
     # in pip._internal.utils.encoding.auto_decode
     try:
-        locale.setlocale(locale.LC_ALL, '')
+        locale.setlocale(locale.LC_ALL, "")
     except locale.Error as e:
         # setlocale can apparently crash if locale are uninitialized
         logger.debug("Ignoring error %s when setting locale", e)
diff --git a/src/pip/_internal/cli/main_parser.py b/src/pip/_internal/cli/main_parser.py
index 979269755..d0f58fe42 100644
--- a/src/pip/_internal/cli/main_parser.py
+++ b/src/pip/_internal/cli/main_parser.py
@@ -16,14 +16,13 @@ __all__ = ["create_main_parser", "parse_command"]
 
 def create_main_parser():
     # type: () -> ConfigOptionParser
-    """Creates and returns the main parser for pip's CLI
-    """
+    """Creates and returns the main parser for pip's CLI"""
 
     parser = ConfigOptionParser(
-        usage='\n%prog  [options]',
+        usage="\n%prog  [options]",
         add_help_option=False,
         formatter=UpdatingDefaultsHelpFormatter(),
-        name='global',
+        name="global",
         prog=get_prog(),
     )
     parser.disable_interspersed_args()
@@ -38,11 +37,11 @@ def create_main_parser():
     parser.main = True  # type: ignore
 
     # create command listing for description
-    description = [''] + [
-        '{name:27} {command_info.summary}'.format(**locals())
+    description = [""] + [
+        f"{name:27} {command_info.summary}"
         for name, command_info in commands_dict.items()
     ]
-    parser.description = '\n'.join(description)
+    parser.description = "\n".join(description)
 
     return parser
 
@@ -67,7 +66,7 @@ def parse_command(args):
         sys.exit()
 
     # pip || pip help -> print_help()
-    if not args_else or (args_else[0] == 'help' and len(args_else) == 1):
+    if not args_else or (args_else[0] == "help" and len(args_else) == 1):
         parser.print_help()
         sys.exit()
 
@@ -81,7 +80,7 @@ def parse_command(args):
         if guess:
             msg.append(f'maybe you meant "{guess}"')
 
-        raise CommandError(' - '.join(msg))
+        raise CommandError(" - ".join(msg))
 
     # all the args without the subcommand
     cmd_args = args[:]
diff --git a/src/pip/_internal/cli/parser.py b/src/pip/_internal/cli/parser.py
index d3958727b..16523c5a1 100644
--- a/src/pip/_internal/cli/parser.py
+++ b/src/pip/_internal/cli/parser.py
@@ -1,15 +1,12 @@
 """Base option parser setup"""
 
-# The following comment should be removed at some point in the future.
-# mypy: disallow-untyped-defs=False
-
 import logging
 import optparse
 import shutil
 import sys
 import textwrap
 from contextlib import suppress
-from typing import Any
+from typing import Any, Dict, Iterator, List, Tuple
 
 from pip._internal.cli.status_codes import UNKNOWN_ERROR
 from pip._internal.configuration import Configuration, ConfigurationError
@@ -22,16 +19,19 @@ class PrettyHelpFormatter(optparse.IndentedHelpFormatter):
     """A prettier/less verbose help formatter for optparse."""
 
     def __init__(self, *args, **kwargs):
+        # type: (*Any, **Any) -> None
         # help position must be aligned with __init__.parseopts.description
-        kwargs['max_help_position'] = 30
-        kwargs['indent_increment'] = 1
-        kwargs['width'] = shutil.get_terminal_size()[0] - 2
+        kwargs["max_help_position"] = 30
+        kwargs["indent_increment"] = 1
+        kwargs["width"] = shutil.get_terminal_size()[0] - 2
         super().__init__(*args, **kwargs)
 
     def format_option_strings(self, option):
+        # type: (optparse.Option) -> str
         return self._format_option_strings(option)
 
-    def _format_option_strings(self, option, mvarfmt=' <{}>', optsep=', '):
+    def _format_option_strings(self, option, mvarfmt=" <{}>", optsep=", "):
+        # type: (optparse.Option, str, str) -> str
         """
         Return a comma-separated list of option strings and metavars.
 
@@ -49,52 +49,57 @@ class PrettyHelpFormatter(optparse.IndentedHelpFormatter):
             opts.insert(1, optsep)
 
         if option.takes_value():
+            assert option.dest is not None
             metavar = option.metavar or option.dest.lower()
             opts.append(mvarfmt.format(metavar.lower()))
 
-        return ''.join(opts)
+        return "".join(opts)
 
     def format_heading(self, heading):
-        if heading == 'Options':
-            return ''
-        return heading + ':\n'
+        # type: (str) -> str
+        if heading == "Options":
+            return ""
+        return heading + ":\n"
 
     def format_usage(self, usage):
+        # type: (str) -> str
         """
         Ensure there is only one newline between usage and the first heading
         if there is no description.
         """
-        msg = '\nUsage: {}\n'.format(
-            self.indent_lines(textwrap.dedent(usage), "  "))
+        msg = "\nUsage: {}\n".format(self.indent_lines(textwrap.dedent(usage), "  "))
         return msg
 
     def format_description(self, description):
+        # type: (str) -> str
         # leave full control over description to us
         if description:
-            if hasattr(self.parser, 'main'):
-                label = 'Commands'
+            if hasattr(self.parser, "main"):
+                label = "Commands"
             else:
-                label = 'Description'
+                label = "Description"
             # some doc strings have initial newlines, some don't
-            description = description.lstrip('\n')
+            description = description.lstrip("\n")
             # some doc strings have final newlines and spaces, some don't
             description = description.rstrip()
             # dedent, then reindent
             description = self.indent_lines(textwrap.dedent(description), "  ")
-            description = f'{label}:\n{description}\n'
+            description = f"{label}:\n{description}\n"
             return description
         else:
-            return ''
+            return ""
 
     def format_epilog(self, epilog):
+        # type: (str) -> str
         # leave full control over epilog to us
         if epilog:
             return epilog
         else:
-            return ''
+            return ""
 
     def indent_lines(self, text, indent):
-        new_lines = [indent + line for line in text.split('\n')]
+        # type: (str, str) -> str
+        new_lines = [indent + line for line in text.split("\n")]
         return "\n".join(new_lines)
 
 
@@ -108,13 +113,16 @@ class UpdatingDefaultsHelpFormatter(PrettyHelpFormatter):
     """
 
     def expand_default(self, option):
+        # type: (optparse.Option) -> str
         default_values = None
         if self.parser is not None:
+            assert isinstance(self.parser, ConfigOptionParser)
             self.parser._update_defaults(self.parser.defaults)
+            assert option.dest is not None
             default_values = self.parser.defaults.get(option.dest)
         help_text = super().expand_default(option)
 
-        if default_values and option.metavar == 'URL':
+        if default_values and option.metavar == "URL":
             if isinstance(default_values, str):
                 default_values = [default_values]
 
@@ -123,15 +131,14 @@ class UpdatingDefaultsHelpFormatter(PrettyHelpFormatter):
                 default_values = []
 
             for val in default_values:
-                help_text = help_text.replace(
-                    val, redact_auth_from_url(val))
+                help_text = help_text.replace(val, redact_auth_from_url(val))
 
         return help_text
 
 
 class CustomOptionParser(optparse.OptionParser):
-
     def insert_option_group(self, idx, *args, **kwargs):
+        # type: (int, Any, Any) -> optparse.OptionGroup
         """Insert an OptionGroup at a given position."""
         group = self.add_option_group(*args, **kwargs)
 
@@ -142,6 +149,7 @@ class CustomOptionParser(optparse.OptionParser):
 
     @property
     def option_list_all(self):
+        # type: () -> List[optparse.Option]
         """Get a list of all options, including those in option groups."""
         res = self.option_list[:]
         for i in self.option_groups:
@@ -169,6 +177,7 @@ class ConfigOptionParser(CustomOptionParser):
         super().__init__(*args, **kwargs)
 
     def check_default(self, option, key, val):
+        # type: (optparse.Option, str, Any) -> Any
         try:
             return option.check_value(key, val)
         except optparse.OptionValueError as exc:
@@ -176,17 +185,20 @@ class ConfigOptionParser(CustomOptionParser):
             sys.exit(3)
 
     def _get_ordered_configuration_items(self):
+        # type: () -> Iterator[Tuple[str, Any]]
         # Configuration gives keys in an unordered manner. Order them.
         override_order = ["global", self.name, ":env:"]
 
         # Pool the options into different groups
-        section_items = {name: [] for name in override_order}
+        section_items = {
+            name: [] for name in override_order
+        }  # type: Dict[str, List[Tuple[str, Any]]]
         for section_key, val in self.config.items():
             # ignore empty values
             if not val:
                 logger.debug(
                     "Ignoring configuration key '%s' as it's value is empty.",
-                    section_key
+                    section_key,
                 )
                 continue
 
@@ -200,6 +212,7 @@ class ConfigOptionParser(CustomOptionParser):
                 yield key, val
 
     def _update_defaults(self, defaults):
+        # type: (Dict[str, Any]) -> Dict[str, Any]
         """Updates the given defaults with values from the config files and
         the environ. Does a little special handling for certain types of
         options (lists)."""
@@ -210,7 +223,7 @@ class ConfigOptionParser(CustomOptionParser):
         # Then set the options with those values
         for key, val in self._get_ordered_configuration_items():
             # '--' because configuration supports only long names
-            option = self.get_option('--' + key)
+            option = self.get_option("--" + key)
 
             # Ignore options not present in this parser. E.g. non-globals put
             # in [global] by users that want them to apply to all applicable
@@ -218,31 +231,34 @@ class ConfigOptionParser(CustomOptionParser):
             if option is None:
                 continue
 
-            if option.action in ('store_true', 'store_false'):
+            assert option.dest is not None
+
+            if option.action in ("store_true", "store_false"):
                 try:
                     val = strtobool(val)
                 except ValueError:
                     self.error(
-                        '{} is not a valid value for {} option, '  # noqa
-                        'please specify a boolean value like yes/no, '
-                        'true/false or 1/0 instead.'.format(val, key)
+                        "{} is not a valid value for {} option, "  # noqa
+                        "please specify a boolean value like yes/no, "
+                        "true/false or 1/0 instead.".format(val, key)
                     )
-            elif option.action == 'count':
+            elif option.action == "count":
                 with suppress(ValueError):
                     val = strtobool(val)
                 with suppress(ValueError):
                     val = int(val)
                 if not isinstance(val, int) or val < 0:
                     self.error(
-                        '{} is not a valid value for {} option, '  # noqa
-                        'please instead specify either a non-negative integer '
-                        'or a boolean value like yes/no or false/true '
-                        'which is equivalent to 1/0.'.format(val, key)
+                        "{} is not a valid value for {} option, "  # noqa
+                        "please instead specify either a non-negative integer "
+                        "or a boolean value like yes/no or false/true "
+                        "which is equivalent to 1/0.".format(val, key)
                     )
-            elif option.action == 'append':
+            elif option.action == "append":
                 val = val.split()
                 val = [self.check_default(option, key, v) for v in val]
-            elif option.action == 'callback':
+            elif option.action == "callback":
+                assert option.callback is not None
                 late_eval.add(option.dest)
                 opt_str = option.get_opt_string()
                 val = option.convert_value(opt_str, val)
@@ -261,6 +277,7 @@ class ConfigOptionParser(CustomOptionParser):
         return defaults
 
     def get_default_values(self):
+        # type: () -> optparse.Values
         """Overriding to make updating the defaults after instantiation of
         the option parser possible, _update_defaults() does the dirty work."""
         if not self.process_default_values:
@@ -275,6 +292,7 @@ class ConfigOptionParser(CustomOptionParser):
 
         defaults = self._update_defaults(self.defaults.copy())  # ours
         for option in self._get_all_options():
+            assert option.dest is not None
             default = defaults.get(option.dest)
             if isinstance(default, str):
                 opt_str = option.get_opt_string()
@@ -282,5 +300,6 @@ class ConfigOptionParser(CustomOptionParser):
         return optparse.Values(defaults)
 
     def error(self, msg):
+        # type: (str) -> None
         self.print_usage(sys.stderr)
         self.exit(UNKNOWN_ERROR, f"{msg}\n")
diff --git a/src/pip/_internal/cli/progress_bars.py b/src/pip/_internal/cli/progress_bars.py
index 50cb74f5b..3064c8569 100644
--- a/src/pip/_internal/cli/progress_bars.py
+++ b/src/pip/_internal/cli/progress_bars.py
@@ -108,7 +108,6 @@ class InterruptibleMixin:
 
 
 class SilentBar(Bar):
-
     def update(self):
         # type: () -> None
         pass
@@ -123,14 +122,11 @@ class BlueEmojiBar(IncrementalBar):
 
 
 class DownloadProgressMixin:
-
     def __init__(self, *args, **kwargs):
         # type: (List[Any], Dict[Any, Any]) -> None
         # https://github.com/python/mypy/issues/5887
         super().__init__(*args, **kwargs)  # type: ignore
-        self.message = (" " * (
-            get_indentation() + 2
-        )) + self.message  # type: str
+        self.message = (" " * (get_indentation() + 2)) + self.message  # type: str
 
     @property
     def downloaded(self):
@@ -162,7 +158,6 @@ class DownloadProgressMixin:
 
 
 class WindowsMixin:
-
     def __init__(self, *args, **kwargs):
         # type: (List[Any], Dict[Any, Any]) -> None
         # The Windows terminal does not support the hide/show cursor ANSI codes
@@ -192,16 +187,14 @@ class WindowsMixin:
             self.file.flush = lambda: self.file.wrapped.flush()
 
 
-class BaseDownloadProgressBar(WindowsMixin, InterruptibleMixin,
-                              DownloadProgressMixin):
+class BaseDownloadProgressBar(WindowsMixin, InterruptibleMixin, DownloadProgressMixin):
 
     file = sys.stdout
     message = "%(percent)d%%"
     suffix = "%(downloaded)s %(download_speed)s %(pretty_eta)s"
 
 
-class DefaultDownloadProgressBar(BaseDownloadProgressBar,
-                                 _BaseBar):
+class DefaultDownloadProgressBar(BaseDownloadProgressBar, _BaseBar):
     pass
 
 
@@ -209,23 +202,21 @@ class DownloadSilentBar(BaseDownloadProgressBar, SilentBar):
     pass
 
 
-class DownloadBar(BaseDownloadProgressBar,
-                  Bar):
+class DownloadBar(BaseDownloadProgressBar, Bar):
     pass
 
 
-class DownloadFillingCirclesBar(BaseDownloadProgressBar,
-                                FillingCirclesBar):
+class DownloadFillingCirclesBar(BaseDownloadProgressBar, FillingCirclesBar):
     pass
 
 
-class DownloadBlueEmojiProgressBar(BaseDownloadProgressBar,
-                                   BlueEmojiBar):
+class DownloadBlueEmojiProgressBar(BaseDownloadProgressBar, BlueEmojiBar):
     pass
 
 
-class DownloadProgressSpinner(WindowsMixin, InterruptibleMixin,
-                              DownloadProgressMixin, Spinner):
+class DownloadProgressSpinner(
+    WindowsMixin, InterruptibleMixin, DownloadProgressMixin, Spinner
+):
 
     file = sys.stdout
     suffix = "%(downloaded)s %(download_speed)s"
@@ -241,13 +232,15 @@ class DownloadProgressSpinner(WindowsMixin, InterruptibleMixin,
         message = self.message % self
         phase = self.next_phase()
         suffix = self.suffix % self
-        line = ''.join([
-            message,
-            " " if message else "",
-            phase,
-            " " if suffix else "",
-            suffix,
-        ])
+        line = "".join(
+            [
+                message,
+                " " if message else "",
+                phase,
+                " " if suffix else "",
+                suffix,
+            ]
+        )
 
         self.writeln(line)
 
@@ -257,7 +250,7 @@ BAR_TYPES = {
     "on": (DefaultDownloadProgressBar, DownloadProgressSpinner),
     "ascii": (DownloadBar, DownloadProgressSpinner),
     "pretty": (DownloadFillingCirclesBar, DownloadProgressSpinner),
-    "emoji": (DownloadBlueEmojiProgressBar, DownloadProgressSpinner)
+    "emoji": (DownloadBlueEmojiProgressBar, DownloadProgressSpinner),
 }
 
 
diff --git a/src/pip/_internal/cli/req_command.py b/src/pip/_internal/cli/req_command.py
index 3cb8ab001..3fc00d4f4 100644
--- a/src/pip/_internal/cli/req_command.py
+++ b/src/pip/_internal/cli/req_command.py
@@ -7,6 +7,7 @@ PackageFinder machinery and all its vendored dependencies, etc.
 
 import logging
 import os
+import sys
 from functools import partial
 from optparse import Values
 from typing import Any, List, Optional, Tuple
@@ -38,6 +39,7 @@ from pip._internal.utils.temp_dir import (
     TempDirectoryTypeRegistry,
     tempdir_kinds,
 )
+from pip._internal.utils.virtualenv import running_under_virtualenv
 
 logger = logging.getLogger(__name__)
 
@@ -47,6 +49,7 @@ class SessionCommandMixin(CommandContextMixIn):
     """
     A class mixin for command classes needing _build_session().
     """
+
     def __init__(self):
         # type: () -> None
         super().__init__()
@@ -83,8 +86,7 @@ class SessionCommandMixin(CommandContextMixIn):
         assert not options.cache_dir or os.path.isabs(options.cache_dir)
         session = PipSession(
             cache=(
-                os.path.join(options.cache_dir, "http")
-                if options.cache_dir else None
+                os.path.join(options.cache_dir, "http") if options.cache_dir else None
             ),
             retries=retries if retries is not None else options.retries,
             trusted_hosts=options.trusted_hosts,
@@ -101,9 +103,7 @@ class SessionCommandMixin(CommandContextMixIn):
 
         # Handle timeouts
         if options.timeout or timeout:
-            session.timeout = (
-                timeout if timeout is not None else options.timeout
-            )
+            session.timeout = timeout if timeout is not None else options.timeout
 
         # Handle configured proxies
         if options.proxy:
@@ -134,16 +134,14 @@ class IndexGroupCommand(Command, SessionCommandMixin):
         This overrides the default behavior of not doing the check.
         """
         # Make sure the index_group options are present.
-        assert hasattr(options, 'no_index')
+        assert hasattr(options, "no_index")
 
         if options.disable_pip_version_check or options.no_index:
             return
 
         # Otherwise, check if we're using the latest version of pip available.
         session = self._build_session(
-            options,
-            retries=0,
-            timeout=min(5, options.timeout)
+            options, retries=0, timeout=min(5, options.timeout)
         )
         with session:
             pip_self_version_check(session, options)
@@ -156,11 +154,41 @@ KEEPABLE_TEMPDIR_TYPES = [
 ]
 
 
+def warn_if_run_as_root():
+    # type: () -> None
+    """Output a warning for sudo users on Unix.
+
+    In a virtual environment, sudo pip still writes to virtualenv.
+    On Windows, users may run pip as Administrator without issues.
+    This warning only applies to Unix root users outside of virtualenv.
+    """
+    if running_under_virtualenv():
+        return
+    if not hasattr(os, "getuid"):
+        return
+    # On Windows, there are no "system managed" Python packages. Installing as
+    # Administrator via pip is the correct way of updating system environments.
+    #
+    # We choose sys.platform over utils.compat.WINDOWS here to enable Mypy platform
+    # checks: https://mypy.readthedocs.io/en/stable/common_issues.html
+    if sys.platform == "win32" or sys.platform == "cygwin":
+        return
+    if sys.platform == "darwin" or sys.platform == "linux":
+        if os.getuid() != 0:
+            return
+    logger.warning(
+        "Running pip as root will break packages and permissions. "
+        "You should install packages reliably by using venv: "
+        "https://pip.pypa.io/warnings/venv"
+    )
+
+
 def with_cleanup(func):
     # type: (Any) -> Any
     """Decorator for common logic related to managing temporary
     directories.
     """
+
     def configure_tempdir_registry(registry):
         # type: (TempDirectoryTypeRegistry) -> None
         for t in KEEPABLE_TEMPDIR_TYPES:
@@ -185,7 +213,6 @@ def with_cleanup(func):
 
 
 class RequirementCommand(IndexGroupCommand):
-
     def __init__(self, *args, **kw):
         # type: (Any, Any) -> None
         super().__init__(*args, **kw)
@@ -204,13 +231,13 @@ class RequirementCommand(IndexGroupCommand):
     @classmethod
     def make_requirement_preparer(
         cls,
-        temp_build_dir,           # type: TempDirectory
-        options,                  # type: Values
-        req_tracker,              # type: RequirementTracker
-        session,                  # type: PipSession
-        finder,                   # type: PackageFinder
-        use_user_site,            # type: bool
-        download_dir=None,        # type: str
+        temp_build_dir,  # type: TempDirectory
+        options,  # type: Values
+        req_tracker,  # type: RequirementTracker
+        session,  # type: PipSession
+        finder,  # type: PackageFinder
+        use_user_site,  # type: bool
+        download_dir=None,  # type: str
     ):
         # type: (...) -> RequirementPreparer
         """
@@ -221,20 +248,20 @@ class RequirementCommand(IndexGroupCommand):
 
         resolver_variant = cls.determine_resolver_variant(options)
         if resolver_variant == "2020-resolver":
-            lazy_wheel = 'fast-deps' in options.features_enabled
+            lazy_wheel = "fast-deps" in options.features_enabled
             if lazy_wheel:
                 logger.warning(
-                    'pip is using lazily downloaded wheels using HTTP '
-                    'range requests to obtain dependency information. '
-                    'This experimental feature is enabled through '
-                    '--use-feature=fast-deps and it is not ready for '
-                    'production.'
+                    "pip is using lazily downloaded wheels using HTTP "
+                    "range requests to obtain dependency information. "
+                    "This experimental feature is enabled through "
+                    "--use-feature=fast-deps and it is not ready for "
+                    "production."
                 )
         else:
             lazy_wheel = False
-            if 'fast-deps' in options.features_enabled:
+            if "fast-deps" in options.features_enabled:
                 logger.warning(
-                    'fast-deps has no effect when used with the legacy resolver.'
+                    "fast-deps has no effect when used with the legacy resolver."
                 )
 
         return RequirementPreparer(
@@ -249,22 +276,23 @@ class RequirementCommand(IndexGroupCommand):
             require_hashes=options.require_hashes,
             use_user_site=use_user_site,
             lazy_wheel=lazy_wheel,
+            in_tree_build="in-tree-build" in options.features_enabled,
         )
 
     @classmethod
     def make_resolver(
         cls,
-        preparer,                            # type: RequirementPreparer
-        finder,                              # type: PackageFinder
-        options,                             # type: Values
-        wheel_cache=None,                    # type: Optional[WheelCache]
-        use_user_site=False,                 # type: bool
-        ignore_installed=True,               # type: bool
-        ignore_requires_python=False,        # type: bool
-        force_reinstall=False,               # type: bool
+        preparer,  # type: RequirementPreparer
+        finder,  # type: PackageFinder
+        options,  # type: Values
+        wheel_cache=None,  # type: Optional[WheelCache]
+        use_user_site=False,  # type: bool
+        ignore_installed=True,  # type: bool
+        ignore_requires_python=False,  # type: bool
+        force_reinstall=False,  # type: bool
         upgrade_strategy="to-satisfy-only",  # type: str
-        use_pep517=None,                     # type: Optional[bool]
-        py_version_info=None,                # type: Optional[Tuple[int, ...]]
+        use_pep517=None,  # type: Optional[bool]
+        py_version_info=None,  # type: Optional[Tuple[int, ...]]
     ):
         # type: (...) -> BaseResolver
         """
@@ -296,6 +324,7 @@ class RequirementCommand(IndexGroupCommand):
                 py_version_info=py_version_info,
             )
         import pip._internal.resolution.legacy.resolver
+
         return pip._internal.resolution.legacy.resolver.Resolver(
             preparer=preparer,
             finder=finder,
@@ -312,10 +341,10 @@ class RequirementCommand(IndexGroupCommand):
 
     def get_requirements(
         self,
-        args,             # type: List[str]
-        options,          # type: Values
-        finder,           # type: PackageFinder
-        session,          # type: PipSession
+        args,  # type: List[str]
+        options,  # type: Values
+        finder,  # type: PackageFinder
+        session,  # type: PipSession
     ):
         # type: (...) -> List[InstallRequirement]
         """
@@ -324,9 +353,12 @@ class RequirementCommand(IndexGroupCommand):
         requirements = []  # type: List[InstallRequirement]
         for filename in options.constraints:
             for parsed_req in parse_requirements(
-                    filename,
-                    constraint=True, finder=finder, options=options,
-                    session=session):
+                filename,
+                constraint=True,
+                finder=finder,
+                options=options,
+                session=session,
+            ):
                 req_to_add = install_req_from_parsed_requirement(
                     parsed_req,
                     isolated=options.isolated_mode,
@@ -336,7 +368,9 @@ class RequirementCommand(IndexGroupCommand):
 
         for req in args:
             req_to_add = install_req_from_line(
-                req, None, isolated=options.isolated_mode,
+                req,
+                None,
+                isolated=options.isolated_mode,
                 use_pep517=options.use_pep517,
                 user_supplied=True,
             )
@@ -354,8 +388,8 @@ class RequirementCommand(IndexGroupCommand):
         # NOTE: options.require_hashes may be set if --require-hashes is True
         for filename in options.requirements:
             for parsed_req in parse_requirements(
-                    filename,
-                    finder=finder, options=options, session=session):
+                filename, finder=finder, options=options, session=session
+            ):
                 req_to_add = install_req_from_parsed_requirement(
                     parsed_req,
                     isolated=options.isolated_mode,
@@ -369,16 +403,19 @@ class RequirementCommand(IndexGroupCommand):
             options.require_hashes = True
 
         if not (args or options.editables or options.requirements):
-            opts = {'name': self.name}
+            opts = {"name": self.name}
             if options.find_links:
                 raise CommandError(
-                    'You must give at least one requirement to {name} '
+                    "You must give at least one requirement to {name} "
                     '(maybe you meant "pip {name} {links}"?)'.format(
-                        **dict(opts, links=' '.join(options.find_links))))
+                        **dict(opts, links=" ".join(options.find_links))
+                    )
+                )
             else:
                 raise CommandError(
-                    'You must give at least one requirement to {name} '
-                    '(see "pip help {name}")'.format(**opts))
+                    "You must give at least one requirement to {name} "
+                    '(see "pip help {name}")'.format(**opts)
+                )
 
         return requirements
 
@@ -396,9 +433,9 @@ class RequirementCommand(IndexGroupCommand):
 
     def _build_package_finder(
         self,
-        options,               # type: Values
-        session,               # type: PipSession
-        target_python=None,    # type: Optional[TargetPython]
+        options,  # type: Values
+        session,  # type: PipSession
+        target_python=None,  # type: Optional[TargetPython]
         ignore_requires_python=None,  # type: Optional[bool]
     ):
         # type: (...) -> PackageFinder
diff --git a/src/pip/_internal/cli/spinners.py b/src/pip/_internal/cli/spinners.py
index 13984d3c4..08e156617 100644
--- a/src/pip/_internal/cli/spinners.py
+++ b/src/pip/_internal/cli/spinners.py
@@ -24,9 +24,14 @@ class SpinnerInterface:
 
 
 class InteractiveSpinner(SpinnerInterface):
-    def __init__(self, message, file=None, spin_chars="-\\|/",
-                 # Empirically, 8 updates/second looks nice
-                 min_update_interval_seconds=0.125):
+    def __init__(
+        self,
+        message,
+        file=None,
+        spin_chars="-\\|/",
+        # Empirically, 8 updates/second looks nice
+        min_update_interval_seconds=0.125,
+    ):
         # type: (str, IO[str], str, float) -> None
         self._message = message
         if file is None:
@@ -101,8 +106,7 @@ class NonInteractiveSpinner(SpinnerInterface):
         # type: (str) -> None
         if self._finished:
             return
-        self._update(
-            "finished with status '{final_status}'".format(**locals()))
+        self._update(f"finished with status '{final_status}'")
         self._finished = True
 
 
diff --git a/src/pip/_internal/commands/debug.py b/src/pip/_internal/commands/debug.py
index 480c0444d..ead5119a2 100644
--- a/src/pip/_internal/commands/debug.py
+++ b/src/pip/_internal/commands/debug.py
@@ -4,7 +4,7 @@ import os
 import sys
 from optparse import Values
 from types import ModuleType
-from typing import Dict, List, Optional
+from typing import Any, Dict, List, Optional
 
 import pip._vendor
 from pip._vendor.certifi import where
@@ -24,7 +24,7 @@ logger = logging.getLogger(__name__)
 
 
 def show_value(name, value):
-    # type: (str, Optional[str]) -> None
+    # type: (str, Any) -> None
     logger.info('%s: %s', name, value)
 
 
diff --git a/src/pip/_internal/commands/install.py b/src/pip/_internal/commands/install.py
index 78cd0b5cf..dc637d876 100644
--- a/src/pip/_internal/commands/install.py
+++ b/src/pip/_internal/commands/install.py
@@ -12,10 +12,14 @@ from pip._vendor.packaging.utils import canonicalize_name
 from pip._internal.cache import WheelCache
 from pip._internal.cli import cmdoptions
 from pip._internal.cli.cmdoptions import make_target_python
-from pip._internal.cli.req_command import RequirementCommand, with_cleanup
+from pip._internal.cli.req_command import (
+    RequirementCommand,
+    warn_if_run_as_root,
+    with_cleanup,
+)
 from pip._internal.cli.status_codes import ERROR, SUCCESS
 from pip._internal.exceptions import CommandError, InstallationError
-from pip._internal.locations import distutils_scheme
+from pip._internal.locations import get_scheme
 from pip._internal.metadata import get_environment
 from pip._internal.models.format_control import FormatControl
 from pip._internal.operations.check import ConflictDetails, check_install_conflicts
@@ -45,7 +49,7 @@ def get_check_binary_allowed(format_control):
     # type: (FormatControl) -> BinaryAllowedPredicate
     def check_binary_allowed(req):
         # type: (InstallRequirement) -> bool
-        canonical_name = canonicalize_name(req.name)
+        canonical_name = canonicalize_name(req.name or "")
         allowed_formats = format_control.get_allowed_formats(canonical_name)
         return "binary" in allowed_formats
 
@@ -443,6 +447,7 @@ class InstallCommand(RequirementCommand):
                 options.target_dir, target_temp_dir, options.upgrade
             )
 
+        warn_if_run_as_root()
         return SUCCESS
 
     def _handle_target_dir(self, target_dir, target_temp_dir, upgrade):
@@ -455,10 +460,10 @@ class InstallCommand(RequirementCommand):
 
         # Checking both purelib and platlib directories for installed
         # packages to be moved to target directory
-        scheme = distutils_scheme('', home=target_temp_dir.path)
-        purelib_dir = scheme['purelib']
-        platlib_dir = scheme['platlib']
-        data_dir = scheme['data']
+        scheme = get_scheme('', home=target_temp_dir.path)
+        purelib_dir = scheme.purelib
+        platlib_dir = scheme.platlib
+        data_dir = scheme.data
 
         if os.path.exists(purelib_dir):
             lib_dir_list.append(purelib_dir)
@@ -574,9 +579,15 @@ def get_lib_location_guesses(
         prefix=None  # type: Optional[str]
 ):
     # type:(...) -> List[str]
-    scheme = distutils_scheme('', user=user, home=home, root=root,
-                              isolated=isolated, prefix=prefix)
-    return [scheme['purelib'], scheme['platlib']]
+    scheme = get_scheme(
+        '',
+        user=user,
+        home=home,
+        root=root,
+        isolated=isolated,
+        prefix=prefix,
+    )
+    return [scheme.purelib, scheme.platlib]
 
 
 def site_packages_writable(root, isolated):
diff --git a/src/pip/_internal/commands/search.py b/src/pip/_internal/commands/search.py
index 6fcc9354f..d66e82347 100644
--- a/src/pip/_internal/commands/search.py
+++ b/src/pip/_internal/commands/search.py
@@ -2,16 +2,13 @@ import logging
 import shutil
 import sys
 import textwrap
+import xmlrpc.client
 from collections import OrderedDict
 from optparse import Values
 from typing import TYPE_CHECKING, Dict, List, Optional
 
 from pip._vendor.packaging.version import parse as parse_version
 
-# NOTE: XMLRPC Client is not annotated in typeshed as on 2017-07-17, which is
-#       why we ignore the type on this import
-from pip._vendor.six.moves import xmlrpc_client  # type: ignore
-
 from pip._internal.cli.base_command import Command
 from pip._internal.cli.req_command import SessionCommandMixin
 from pip._internal.cli.status_codes import NO_MATCHES_FOUND, SUCCESS
@@ -25,10 +22,10 @@ from pip._internal.utils.misc import write_output
 if TYPE_CHECKING:
     from typing import TypedDict
 
-    TransformedHit = TypedDict(
-        'TransformedHit',
-        {'name': str, 'summary': str, 'versions': List[str]},
-    )
+    class TransformedHit(TypedDict):
+        name: str
+        summary: str
+        versions: List[str]
 
 logger = logging.getLogger(__name__)
 
@@ -75,15 +72,16 @@ class SearchCommand(Command, SessionCommandMixin):
         session = self.get_default_session(options)
 
         transport = PipXmlrpcTransport(index_url, session)
-        pypi = xmlrpc_client.ServerProxy(index_url, transport)
+        pypi = xmlrpc.client.ServerProxy(index_url, transport)
         try:
             hits = pypi.search({'name': query, 'summary': query}, 'or')
-        except xmlrpc_client.Fault as fault:
+        except xmlrpc.client.Fault as fault:
             message = "XMLRPC request failed [code: {code}]\n{string}".format(
                 code=fault.faultCode,
                 string=fault.faultString,
             )
             raise CommandError(message)
+        assert isinstance(hits, list)
         return hits
 
 
@@ -139,9 +137,8 @@ def print_results(hits, name_column_width=None, terminal_width=None):
                 summary = ('\n' + ' ' * (name_column_width + 3)).join(
                     summary_lines)
 
-        line = '{name_latest:{name_column_width}} - {summary}'.format(
-            name_latest='{name} ({latest})'.format(**locals()),
-            **locals())
+        name_latest = f'{name} ({latest})'
+        line = f'{name_latest:{name_column_width}} - {summary}'
         try:
             write_output(line)
             dist = env.get_distribution(name)
diff --git a/src/pip/_internal/commands/uninstall.py b/src/pip/_internal/commands/uninstall.py
index d01fde9fe..9a3c9f881 100644
--- a/src/pip/_internal/commands/uninstall.py
+++ b/src/pip/_internal/commands/uninstall.py
@@ -4,7 +4,7 @@ from typing import List
 from pip._vendor.packaging.utils import canonicalize_name
 
 from pip._internal.cli.base_command import Command
-from pip._internal.cli.req_command import SessionCommandMixin
+from pip._internal.cli.req_command import SessionCommandMixin, warn_if_run_as_root
 from pip._internal.cli.status_codes import SUCCESS
 from pip._internal.exceptions import InstallationError
 from pip._internal.req import parse_requirements
@@ -73,8 +73,8 @@ class UninstallCommand(Command, SessionCommandMixin):
                     reqs_to_uninstall[canonicalize_name(req.name)] = req
         if not reqs_to_uninstall:
             raise InstallationError(
-                'You must give at least one requirement to {self.name} (see '
-                '"pip help {self.name}")'.format(**locals())
+                f'You must give at least one requirement to {self.name} (see '
+                f'"pip help {self.name}")'
             )
 
         protect_pip_from_modification_on_windows(
@@ -88,4 +88,5 @@ class UninstallCommand(Command, SessionCommandMixin):
             if uninstall_pathset:
                 uninstall_pathset.commit()
 
+        warn_if_run_as_root()
         return SUCCESS
diff --git a/src/pip/_internal/distributions/__init__.py b/src/pip/_internal/distributions/__init__.py
index 75bea848d..a222f248f 100644
--- a/src/pip/_internal/distributions/__init__.py
+++ b/src/pip/_internal/distributions/__init__.py
@@ -6,8 +6,7 @@ from pip._internal.req.req_install import InstallRequirement
 
 def make_distribution_for_install_requirement(install_req):
     # type: (InstallRequirement) -> AbstractDistribution
-    """Returns a Distribution for the given InstallRequirement
-    """
+    """Returns a Distribution for the given InstallRequirement"""
     # Editable requirements will always be source distributions. They use the
     # legacy logic until we create a modern standard for them.
     if install_req.editable:
diff --git a/src/pip/_internal/distributions/base.py b/src/pip/_internal/distributions/base.py
index 1798286ed..78ee91e76 100644
--- a/src/pip/_internal/distributions/base.py
+++ b/src/pip/_internal/distributions/base.py
@@ -22,6 +22,7 @@ class AbstractDistribution(metaclass=abc.ABCMeta):
      - we must be able to create a Distribution object exposing the
        above metadata.
     """
+
     def __init__(self, req):
         # type: (InstallRequirement) -> None
         super().__init__()
diff --git a/src/pip/_internal/distributions/sdist.py b/src/pip/_internal/distributions/sdist.py
index 28249076c..c873a9f10 100644
--- a/src/pip/_internal/distributions/sdist.py
+++ b/src/pip/_internal/distributions/sdist.py
@@ -46,10 +46,10 @@ class SourceDistribution(AbstractDistribution):
             error_message = format_string.format(
                 requirement=self.req,
                 conflicting_with=conflicting_with,
-                description=', '.join(
-                    f'{installed} is incompatible with {wanted}'
+                description=", ".join(
+                    f"{installed} is incompatible with {wanted}"
                     for installed, wanted in sorted(conflicting)
-                )
+                ),
             )
             raise InstallationError(error_message)
 
@@ -60,15 +60,13 @@ class SourceDistribution(AbstractDistribution):
 
         self.req.build_env = BuildEnvironment()
         self.req.build_env.install_requirements(
-            finder, pyproject_requires, 'overlay',
-            "Installing build dependencies"
+            finder, pyproject_requires, "overlay", "Installing build dependencies"
         )
         conflicting, missing = self.req.build_env.check_requirements(
             self.req.requirements_to_check
         )
         if conflicting:
-            _raise_conflicts("PEP 517/518 supported requirements",
-                             conflicting)
+            _raise_conflicts("PEP 517/518 supported requirements", conflicting)
         if missing:
             logger.warning(
                 "Missing build requirements in pyproject.toml for %s.",
@@ -77,15 +75,13 @@ class SourceDistribution(AbstractDistribution):
             logger.warning(
                 "The project does not specify a build backend, and "
                 "pip cannot fall back to setuptools without %s.",
-                " and ".join(map(repr, sorted(missing)))
+                " and ".join(map(repr, sorted(missing))),
             )
         # Install any extra build dependencies that the backend requests.
         # This must be done in a second pass, as the pyproject.toml
         # dependencies must be installed before we can call the backend.
         with self.req.build_env:
-            runner = runner_with_spinner_message(
-                "Getting requirements to build wheel"
-            )
+            runner = runner_with_spinner_message("Getting requirements to build wheel")
             backend = self.req.pep517_backend
             assert backend is not None
             with backend.subprocess_runner(runner):
@@ -95,6 +91,5 @@ class SourceDistribution(AbstractDistribution):
         if conflicting:
             _raise_conflicts("the backend dependencies", conflicting)
         self.req.build_env.install_requirements(
-            finder, missing, 'normal',
-            "Installing backend dependencies"
+            finder, missing, "normal", "Installing backend dependencies"
         )
diff --git a/src/pip/_internal/exceptions.py b/src/pip/_internal/exceptions.py
index 01ee4b769..8aacf8120 100644
--- a/src/pip/_internal/exceptions.py
+++ b/src/pip/_internal/exceptions.py
@@ -59,6 +59,21 @@ class NoneMetadataError(PipError):
         )
 
 
+class UserInstallationInvalid(InstallationError):
+    """A --user install is requested on an environment without user site."""
+
+    def __str__(self):
+        # type: () -> str
+        return "User base directory is not specified"
+
+
+class InvalidSchemeCombination(InstallationError):
+    def __str__(self):
+        # type: () -> str
+        before = ", ".join(str(a) for a in self.args[:-1])
+        return f"Cannot set {before} and {self.args[-1]} together"
+
+
 class DistributionNotFound(InstallationError):
     """Raised when a distribution cannot be found to satisfy a requirement"""
 
diff --git a/src/pip/_internal/index/collector.py b/src/pip/_internal/index/collector.py
index 3dd6c7df4..f6b0536d0 100644
--- a/src/pip/_internal/index/collector.py
+++ b/src/pip/_internal/index/collector.py
@@ -4,6 +4,7 @@ The main purpose of this module is to expose LinkCollector.collect_links().
 
 import cgi
 import functools
+import html
 import itertools
 import logging
 import mimetypes
@@ -26,7 +27,6 @@ from typing import (
 )
 
 from pip._vendor import html5lib, requests
-from pip._vendor.distlib.compat import unescape
 from pip._vendor.requests import Response
 from pip._vendor.requests.exceptions import RetryError, SSLError
 
@@ -261,12 +261,11 @@ def _create_link_from_element(
 
     url = _clean_link(urllib.parse.urljoin(base_url, href))
     pyrequire = anchor.get('data-requires-python')
-    pyrequire = unescape(pyrequire) if pyrequire else None
+    pyrequire = html.unescape(pyrequire) if pyrequire else None
 
     yanked_reason = anchor.get('data-yanked')
     if yanked_reason:
-        # This is a unicode string in Python 2 (and 3).
-        yanked_reason = unescape(yanked_reason)
+        yanked_reason = html.unescape(yanked_reason)
 
     link = Link(
         url,
diff --git a/src/pip/_internal/index/package_finder.py b/src/pip/_internal/index/package_finder.py
index b826690fa..a6423cce1 100644
--- a/src/pip/_internal/index/package_finder.py
+++ b/src/pip/_internal/index/package_finder.py
@@ -44,7 +44,7 @@ logger = logging.getLogger(__name__)
 
 BuildTag = Union[Tuple[()], Tuple[int, str]]
 CandidateSortingKey = (
-    Tuple[int, int, int, _BaseVersion, BuildTag, Optional[int]]
+    Tuple[int, int, int, _BaseVersion, Optional[int], BuildTag]
 )
 
 
@@ -434,6 +434,12 @@ class CandidateEvaluator:
         self._project_name = project_name
         self._specifier = specifier
         self._supported_tags = supported_tags
+        # Since the index of the tag in the _supported_tags list is used
+        # as a priority, precompute a map from tag to index/priority to be
+        # used in wheel.find_most_preferred_tag.
+        self._wheel_tag_preferences = {
+            tag: idx for idx, tag in enumerate(supported_tags)
+        }
 
     def get_applicable_candidates(
         self,
@@ -512,14 +518,17 @@ class CandidateEvaluator:
         if link.is_wheel:
             # can raise InvalidWheelFilename
             wheel = Wheel(link.filename)
-            if not wheel.supported(valid_tags):
+            try:
+                pri = -(wheel.find_most_preferred_tag(
+                    valid_tags, self._wheel_tag_preferences
+                ))
+            except ValueError:
                 raise UnsupportedWheel(
                     "{} is not a supported wheel for this platform. It "
                     "can't be sorted.".format(wheel.filename)
                 )
             if self._prefer_binary:
                 binary_preference = 1
-            pri = -(wheel.support_index_min(valid_tags))
             if wheel.build_tag is not None:
                 match = re.match(r'^(\d+)(.*)$', wheel.build_tag)
                 build_tag_groups = match.groups()
@@ -530,7 +539,7 @@ class CandidateEvaluator:
         yank_value = -1 * int(link.is_yanked)  # -1 for yanked.
         return (
             has_allowed_hash, yank_value, binary_preference, candidate.version,
-            build_tag, pri,
+            pri, build_tag,
         )
 
     def sort_best_candidate(
diff --git a/src/pip/_internal/locations/__init__.py b/src/pip/_internal/locations/__init__.py
new file mode 100644
index 000000000..18bf0319f
--- /dev/null
+++ b/src/pip/_internal/locations/__init__.py
@@ -0,0 +1,184 @@
+import logging
+import pathlib
+import sys
+import sysconfig
+from typing import List, Optional
+
+from pip._internal.models.scheme import SCHEME_KEYS, Scheme
+
+from . import _distutils, _sysconfig
+from .base import (
+    USER_CACHE_DIR,
+    get_major_minor_version,
+    get_src_prefix,
+    site_packages,
+    user_site,
+)
+
+__all__ = [
+    "USER_CACHE_DIR",
+    "get_bin_prefix",
+    "get_bin_user",
+    "get_major_minor_version",
+    "get_platlib",
+    "get_prefixed_libs",
+    "get_purelib",
+    "get_scheme",
+    "get_src_prefix",
+    "site_packages",
+    "user_site",
+]
+
+
+logger = logging.getLogger(__name__)
+
+
+def _default_base(*, user: bool) -> str:
+    if user:
+        base = sysconfig.get_config_var("userbase")
+    else:
+        base = sysconfig.get_config_var("base")
+    assert base is not None
+    return base
+
+
+def _warn_if_mismatch(old: pathlib.Path, new: pathlib.Path, *, key: str) -> bool:
+    if old == new:
+        return False
+    issue_url = "https://github.com/pypa/pip/issues/9617"
+    message = (
+        "Value for %s does not match. Please report this to <%s>"
+        "\ndistutils: %s"
+        "\nsysconfig: %s"
+    )
+    logger.warning(message, key, issue_url, old, new)
+    return True
+
+
+def _log_context(
+    *,
+    user: bool = False,
+    home: Optional[str] = None,
+    root: Optional[str] = None,
+    prefix: Optional[str] = None,
+) -> None:
+    message = (
+        "Additional context:" "\nuser = %r" "\nhome = %r" "\nroot = %r" "\nprefix = %r"
+    )
+    logger.warning(message, user, home, root, prefix)
+
+
+def get_scheme(
+    dist_name,  # type: str
+    user=False,  # type: bool
+    home=None,  # type: Optional[str]
+    root=None,  # type: Optional[str]
+    isolated=False,  # type: bool
+    prefix=None,  # type: Optional[str]
+):
+    # type: (...) -> Scheme
+    old = _distutils.get_scheme(
+        dist_name,
+        user=user,
+        home=home,
+        root=root,
+        isolated=isolated,
+        prefix=prefix,
+    )
+    new = _sysconfig.get_scheme(
+        dist_name,
+        user=user,
+        home=home,
+        root=root,
+        isolated=isolated,
+        prefix=prefix,
+    )
+
+    base = prefix or home or _default_base(user=user)
+    warned = []
+    for k in SCHEME_KEYS:
+        # Extra join because distutils can return relative paths.
+        old_v = pathlib.Path(base, getattr(old, k))
+        new_v = pathlib.Path(getattr(new, k))
+
+        # distutils incorrectly put PyPy packages under ``site-packages/python``
+        # in the ``posix_home`` scheme, but PyPy devs said they expect the
+        # directory name to be ``pypy`` instead. So we treat this as a bug fix
+        # and not warn about it. See bpo-43307 and python/cpython#24628.
+        skip_pypy_special_case = (
+            sys.implementation.name == "pypy"
+            and home is not None
+            and k in ("platlib", "purelib")
+            and old_v.parent == new_v.parent
+            and old_v.name == "python"
+            and new_v.name == "pypy"
+        )
+        if skip_pypy_special_case:
+            continue
+
+        warned.append(_warn_if_mismatch(old_v, new_v, key=f"scheme.{k}"))
+
+    if any(warned):
+        _log_context(user=user, home=home, root=root, prefix=prefix)
+
+    return old
+
+
+def get_bin_prefix():
+    # type: () -> str
+    old = _distutils.get_bin_prefix()
+    new = _sysconfig.get_bin_prefix()
+    if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="bin_prefix"):
+        _log_context()
+    return old
+
+
+def get_bin_user():
+    # type: () -> str
+    return _sysconfig.get_scheme("", user=True).scripts
+
+
+def get_purelib():
+    # type: () -> str
+    """Return the default pure-Python lib location."""
+    old = _distutils.get_purelib()
+    new = _sysconfig.get_purelib()
+    if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="purelib"):
+        _log_context()
+    return old
+
+
+def get_platlib():
+    # type: () -> str
+    """Return the default platform-shared lib location."""
+    old = _distutils.get_platlib()
+    new = _sysconfig.get_platlib()
+    if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="platlib"):
+        _log_context()
+    return old
+
+
+def get_prefixed_libs(prefix):
+    # type: (str) -> List[str]
+    """Return the lib locations under ``prefix``."""
+    old_pure, old_plat = _distutils.get_prefixed_libs(prefix)
+    new_pure, new_plat = _sysconfig.get_prefixed_libs(prefix)
+
+    warned = [
+        _warn_if_mismatch(
+            pathlib.Path(old_pure),
+            pathlib.Path(new_pure),
+            key="prefixed-purelib",
+        ),
+        _warn_if_mismatch(
+            pathlib.Path(old_plat),
+            pathlib.Path(new_plat),
+            key="prefixed-platlib",
+        ),
+    ]
+    if any(warned):
+        _log_context(prefix=prefix)
+
+    if old_pure == old_plat:
+        return [old_pure]
+    return [old_pure, old_plat]
diff --git a/src/pip/_internal/locations.py b/src/pip/_internal/locations/_distutils.py
similarity index 60%
rename from src/pip/_internal/locations.py
rename to src/pip/_internal/locations/_distutils.py
index 19c039eab..2d7ab7321 100644
--- a/src/pip/_internal/locations.py
+++ b/src/pip/_internal/locations/_distutils.py
@@ -4,81 +4,21 @@
 # mypy: strict-optional=False
 
 import os
-import os.path
-import site
 import sys
-import sysconfig
 from distutils.cmd import Command as DistutilsCommand
 from distutils.command.install import SCHEME_KEYS
 from distutils.command.install import install as distutils_install_command
-from typing import Dict, List, Optional, Union, cast
+from distutils.sysconfig import get_python_lib
+from typing import Dict, List, Optional, Tuple, Union, cast
 
 from pip._internal.models.scheme import Scheme
-from pip._internal.utils import appdirs
 from pip._internal.utils.compat import WINDOWS
 from pip._internal.utils.virtualenv import running_under_virtualenv
 
-# Application Directories
-USER_CACHE_DIR = appdirs.user_cache_dir("pip")
+from .base import get_major_minor_version
 
 
-def get_major_minor_version():
-    # type: () -> str
-    """
-    Return the major-minor version of the current Python as a string, e.g.
-    "3.7" or "3.10".
-    """
-    return '{}.{}'.format(*sys.version_info)
-
-
-def get_src_prefix():
-    # type: () -> str
-    if running_under_virtualenv():
-        src_prefix = os.path.join(sys.prefix, 'src')
-    else:
-        # FIXME: keep src in cwd for now (it is not a temporary folder)
-        try:
-            src_prefix = os.path.join(os.getcwd(), 'src')
-        except OSError:
-            # In case the current working directory has been renamed or deleted
-            sys.exit(
-                "The folder you are executing pip from can no longer be found."
-            )
-
-    # under macOS + virtualenv sys.prefix is not properly resolved
-    # it is something like /path/to/python/bin/..
-    return os.path.abspath(src_prefix)
-
-
-# FIXME doesn't account for venv linked to global site-packages
-
-site_packages = sysconfig.get_path("purelib")  # type: Optional[str]
-
-try:
-    # Use getusersitepackages if this is present, as it ensures that the
-    # value is initialised properly.
-    user_site = site.getusersitepackages()
-except AttributeError:
-    user_site = site.USER_SITE
-
-if WINDOWS:
-    bin_py = os.path.join(sys.prefix, 'Scripts')
-    bin_user = os.path.join(user_site, 'Scripts')
-    # buildout uses 'bin' on Windows too?
-    if not os.path.exists(bin_py):
-        bin_py = os.path.join(sys.prefix, 'bin')
-        bin_user = os.path.join(user_site, 'bin')
-else:
-    bin_py = os.path.join(sys.prefix, 'bin')
-    bin_user = os.path.join(user_site, 'bin')
-
-    # Forcing to use /usr/local/bin for standard macOS framework installs
-    # Also log to ~/Library/Logs/ for use with the Console.app log viewer
-    if sys.platform[:6] == 'darwin' and sys.prefix[:16] == '/System/Library/':
-        bin_py = '/usr/local/bin'
-
-
-def distutils_scheme(
+def _distutils_scheme(
     dist_name, user=False, home=None, root=None, isolated=False, prefix=None
 ):
     # type:(str, bool, str, str, bool, str) -> Dict[str, str]
@@ -87,14 +27,14 @@ def distutils_scheme(
     """
     from distutils.dist import Distribution
 
-    dist_args = {'name': dist_name}  # type: Dict[str, Union[str, List[str]]]
+    dist_args = {"name": dist_name}  # type: Dict[str, Union[str, List[str]]]
     if isolated:
         dist_args["script_args"] = ["--no-user-cfg"]
 
     d = Distribution(dist_args)
     d.parse_config_files()
     obj = None  # type: Optional[DistutilsCommand]
-    obj = d.get_command_obj('install', create=True)
+    obj = d.get_command_obj("install", create=True)
     assert obj is not None
     i = cast(distutils_install_command, obj)
     # NOTE: setting user or home has the side-effect of creating the home dir
@@ -112,28 +52,27 @@ def distutils_scheme(
 
     scheme = {}
     for key in SCHEME_KEYS:
-        scheme[key] = getattr(i, 'install_' + key)
+        scheme[key] = getattr(i, "install_" + key)
 
     # install_lib specified in setup.cfg should install *everything*
     # into there (i.e. it takes precedence over both purelib and
     # platlib).  Note, i.install_lib is *always* set after
     # finalize_options(); we only want to override here if the user
     # has explicitly requested it hence going back to the config
-    if 'install_lib' in d.get_option_dict('install'):
+    if "install_lib" in d.get_option_dict("install"):
         scheme.update(dict(purelib=i.install_lib, platlib=i.install_lib))
 
     if running_under_virtualenv():
-        scheme['headers'] = os.path.join(
+        scheme["headers"] = os.path.join(
             i.prefix,
-            'include',
-            'site',
-            f'python{get_major_minor_version()}',
+            "include",
+            "site",
+            f"python{get_major_minor_version()}",
             dist_name,
         )
 
         if root is not None:
-            path_no_drive = os.path.splitdrive(
-                os.path.abspath(scheme["headers"]))[1]
+            path_no_drive = os.path.splitdrive(os.path.abspath(scheme["headers"]))[1]
             scheme["headers"] = os.path.join(
                 root,
                 path_no_drive[1:],
@@ -168,9 +107,7 @@ def get_scheme(
     :param prefix: indicates to use the "prefix" scheme and provides the
         base directory for the same
     """
-    scheme = distutils_scheme(
-        dist_name, user, home, root, isolated, prefix
-    )
+    scheme = _distutils_scheme(dist_name, user, home, root, isolated, prefix)
     return Scheme(
         platlib=scheme["platlib"],
         purelib=scheme["purelib"],
@@ -178,3 +115,36 @@ def get_scheme(
         scripts=scheme["scripts"],
         data=scheme["data"],
     )
+
+
+def get_bin_prefix():
+    # type: () -> str
+    if WINDOWS:
+        bin_py = os.path.join(sys.prefix, "Scripts")
+        # buildout uses 'bin' on Windows too?
+        if not os.path.exists(bin_py):
+            bin_py = os.path.join(sys.prefix, "bin")
+        return bin_py
+    # Forcing to use /usr/local/bin for standard macOS framework installs
+    # Also log to ~/Library/Logs/ for use with the Console.app log viewer
+    if sys.platform[:6] == "darwin" and sys.prefix[:16] == "/System/Library/":
+        return "/usr/local/bin"
+    return os.path.join(sys.prefix, "bin")
+
+
+def get_purelib():
+    # type: () -> str
+    return get_python_lib(plat_specific=False)
+
+
+def get_platlib():
+    # type: () -> str
+    return get_python_lib(plat_specific=True)
+
+
+def get_prefixed_libs(prefix):
+    # type: (str) -> Tuple[str, str]
+    return (
+        get_python_lib(plat_specific=False, prefix=prefix),
+        get_python_lib(plat_specific=True, prefix=prefix),
+    )
diff --git a/src/pip/_internal/locations/_sysconfig.py b/src/pip/_internal/locations/_sysconfig.py
new file mode 100644
index 000000000..e4d66d25d
--- /dev/null
+++ b/src/pip/_internal/locations/_sysconfig.py
@@ -0,0 +1,174 @@
+import distutils.util  # FIXME: For change_root.
+import logging
+import os
+import sys
+import sysconfig
+import typing
+
+from pip._internal.exceptions import InvalidSchemeCombination, UserInstallationInvalid
+from pip._internal.models.scheme import SCHEME_KEYS, Scheme
+from pip._internal.utils.virtualenv import running_under_virtualenv
+
+from .base import get_major_minor_version
+
+logger = logging.getLogger(__name__)
+
+
+# Notes on _infer_* functions.
+# Unfortunately ``_get_default_scheme()`` is private, so there's no way to
+# ask things like "what is the '_prefix' scheme on this platform". These
+# functions try to answer that with some heuristics while accounting for ad-hoc
+# platforms not covered by CPython's default sysconfig implementation. If the
+# ad-hoc implementation does not fully implement sysconfig, we'll fall back to
+# a POSIX scheme.
+
+_AVAILABLE_SCHEMES = set(sysconfig.get_scheme_names())
+
+
+def _infer_prefix():
+    # type: () -> str
+    """Try to find a prefix scheme for the current platform.
+
+    This tries:
+
+    * Implementation + OS, used by PyPy on Windows (``pypy_nt``).
+    * Implementation without OS, used by PyPy on POSIX (``pypy``).
+    * OS + "prefix", used by CPython on POSIX (``posix_prefix``).
+    * Just the OS name, used by CPython on Windows (``nt``).
+
+    If none of the above works, fall back to ``posix_prefix``.
+    """
+    implementation_suffixed = f"{sys.implementation.name}_{os.name}"
+    if implementation_suffixed in _AVAILABLE_SCHEMES:
+        return implementation_suffixed
+    if sys.implementation.name in _AVAILABLE_SCHEMES:
+        return sys.implementation.name
+    suffixed = f"{os.name}_prefix"
+    if suffixed in _AVAILABLE_SCHEMES:
+        return suffixed
+    if os.name in _AVAILABLE_SCHEMES:  # On Windows, prefx is just called "nt".
+        return os.name
+    return "posix_prefix"
+
+
+def _infer_user():
+    # type: () -> str
+    """Try to find a user scheme for the current platform."""
+    suffixed = f"{os.name}_user"
+    if suffixed in _AVAILABLE_SCHEMES:
+        return suffixed
+    if "posix_user" not in _AVAILABLE_SCHEMES:  # User scheme unavailable.
+        raise UserInstallationInvalid()
+    return "posix_user"
+
+
+def _infer_home():
+    # type: () -> str
+    """Try to find a home for the current platform."""
+    suffixed = f"{os.name}_home"
+    if suffixed in _AVAILABLE_SCHEMES:
+        return suffixed
+    return "posix_home"
+
+
+# Update these keys if the user sets a custom home.
+_HOME_KEYS = [
+    "installed_base",
+    "base",
+    "installed_platbase",
+    "platbase",
+    "prefix",
+    "exec_prefix",
+]
+if sysconfig.get_config_var("userbase") is not None:
+    _HOME_KEYS.append("userbase")
+
+
+def get_scheme(
+    dist_name,  # type: str
+    user=False,  # type: bool
+    home=None,  # type: typing.Optional[str]
+    root=None,  # type: typing.Optional[str]
+    isolated=False,  # type: bool
+    prefix=None,  # type: typing.Optional[str]
+):
+    # type: (...) -> Scheme
+    """
+    Get the "scheme" corresponding to the input parameters.
+
+    :param dist_name: the name of the package to retrieve the scheme for, used
+        in the headers scheme path
+    :param user: indicates to use the "user" scheme
+    :param home: indicates to use the "home" scheme
+    :param root: root under which other directories are re-based
+    :param isolated: ignored, but kept for distutils compatibility (where
+        this controls whether the user-site pydistutils.cfg is honored)
+    :param prefix: indicates to use the "prefix" scheme and provides the
+        base directory for the same
+    """
+    if user and prefix:
+        raise InvalidSchemeCombination("--user", "--prefix")
+    if home and prefix:
+        raise InvalidSchemeCombination("--home", "--prefix")
+
+    if home is not None:
+        scheme_name = _infer_home()
+    elif user:
+        scheme_name = _infer_user()
+    else:
+        scheme_name = _infer_prefix()
+
+    if home is not None:
+        variables = {k: home for k in _HOME_KEYS}
+    elif prefix is not None:
+        variables = {k: prefix for k in _HOME_KEYS}
+    else:
+        variables = {}
+
+    paths = sysconfig.get_paths(scheme=scheme_name, vars=variables)
+
+    # Pip historically uses a special header path in virtual environments.
+    if running_under_virtualenv():
+        if user:
+            base = variables.get("userbase", sys.prefix)
+        else:
+            base = variables.get("base", sys.prefix)
+        python_xy = f"python{get_major_minor_version()}"
+        paths["include"] = os.path.join(base, "include", "site", python_xy)
+
+    scheme = Scheme(
+        platlib=paths["platlib"],
+        purelib=paths["purelib"],
+        headers=os.path.join(paths["include"], dist_name),
+        scripts=paths["scripts"],
+        data=paths["data"],
+    )
+    if root is not None:
+        for key in SCHEME_KEYS:
+            value = distutils.util.change_root(root, getattr(scheme, key))
+            setattr(scheme, key, value)
+    return scheme
+
+
+def get_bin_prefix():
+    # type: () -> str
+    # Forcing to use /usr/local/bin for standard macOS framework installs.
+    if sys.platform[:6] == "darwin" and sys.prefix[:16] == "/System/Library/":
+        return "/usr/local/bin"
+    return sysconfig.get_paths()["scripts"]
+
+
+def get_purelib():
+    # type: () -> str
+    return sysconfig.get_paths()["purelib"]
+
+
+def get_platlib():
+    # type: () -> str
+    return sysconfig.get_paths()["platlib"]
+
+
+def get_prefixed_libs(prefix):
+    # type: (str) -> typing.Tuple[str, str]
+    paths = sysconfig.get_paths(vars={"base": prefix, "platbase": prefix})
+    return (paths["purelib"], paths["platlib"])
diff --git a/src/pip/_internal/locations/base.py b/src/pip/_internal/locations/base.py
new file mode 100644
index 000000000..98557abbe
--- /dev/null
+++ b/src/pip/_internal/locations/base.py
@@ -0,0 +1,48 @@
+import os
+import site
+import sys
+import sysconfig
+import typing
+
+from pip._internal.utils import appdirs
+from pip._internal.utils.virtualenv import running_under_virtualenv
+
+# Application Directories
+USER_CACHE_DIR = appdirs.user_cache_dir("pip")
+
+# FIXME doesn't account for venv linked to global site-packages
+site_packages = sysconfig.get_path("purelib")  # type: typing.Optional[str]
+
+
+def get_major_minor_version():
+    # type: () -> str
+    """
+    Return the major-minor version of the current Python as a string, e.g.
+    "3.7" or "3.10".
+    """
+    return "{}.{}".format(*sys.version_info)
+
+
+def get_src_prefix():
+    # type: () -> str
+    if running_under_virtualenv():
+        src_prefix = os.path.join(sys.prefix, "src")
+    else:
+        # FIXME: keep src in cwd for now (it is not a temporary folder)
+        try:
+            src_prefix = os.path.join(os.getcwd(), "src")
+        except OSError:
+            # In case the current working directory has been renamed or deleted
+            sys.exit("The folder you are executing pip from can no longer be found.")
+
+    # under macOS + virtualenv sys.prefix is not properly resolved
+    # it is something like /path/to/python/bin/..
+    return os.path.abspath(src_prefix)
+
+
+try:
+    # Use getusersitepackages if this is present, as it ensures that the
+    # value is initialised properly.
+    user_site = site.getusersitepackages()  # type: typing.Optional[str]
+except AttributeError:
+    user_site = site.USER_SITE
diff --git a/src/pip/_internal/metadata/base.py b/src/pip/_internal/metadata/base.py
index 724b0c044..37f9a8232 100644
--- a/src/pip/_internal/metadata/base.py
+++ b/src/pip/_internal/metadata/base.py
@@ -1,11 +1,28 @@
-from typing import Container, Iterator, List, Optional
+import logging
+import re
+from typing import Container, Iterator, List, Optional, Union
 
-from pip._vendor.packaging.version import _BaseVersion
+from pip._vendor.packaging.version import LegacyVersion, Version
 
 from pip._internal.utils.misc import stdlib_pkgs  # TODO: Move definition here.
 
+DistributionVersion = Union[LegacyVersion, Version]
+
+logger = logging.getLogger(__name__)
+
 
 class BaseDistribution:
+    @property
+    def location(self):
+        # type: () -> Optional[str]
+        """Where the distribution is loaded from.
+
+        A string value is not necessarily a filesystem path, since distributions
+        can be loaded from other sources, e.g. arbitrary zip archives. ``None``
+        means the distribution is created in-memory.
+        """
+        raise NotImplementedError()
+
     @property
     def metadata_version(self):
         # type: () -> Optional[str]
@@ -19,7 +36,7 @@ class BaseDistribution:
 
     @property
     def version(self):
-        # type: () -> _BaseVersion
+        # type: () -> DistributionVersion
         raise NotImplementedError()
 
     @property
@@ -61,10 +78,37 @@ class BaseEnvironment:
         """Given a requirement name, return the installed distributions."""
         raise NotImplementedError()
 
+    def _iter_distributions(self):
+        # type: () -> Iterator[BaseDistribution]
+        """Iterate through installed distributions.
+
+        This function should be implemented by subclass, but never called
+        directly. Use the public ``iter_distribution()`` instead, which
+        implements additional logic to make sure the distributions are valid.
+        """
+        raise NotImplementedError()
+
     def iter_distributions(self):
         # type: () -> Iterator[BaseDistribution]
         """Iterate through installed distributions."""
-        raise NotImplementedError()
+        for dist in self._iter_distributions():
+            # Make sure the distribution actually comes from a valid Python
+            # packaging distribution. Pip's AdjacentTempDirectory leaves folders
+            # e.g. ``~atplotlib.dist-info`` if cleanup was interrupted. The
+            # valid project name pattern is taken from PEP 508.
+            project_name_valid = re.match(
+                r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$",
+                dist.canonical_name,
+                flags=re.IGNORECASE,
+            )
+            if not project_name_valid:
+                logger.warning(
+                    "Ignoring invalid distribution %s (%s)",
+                    dist.canonical_name,
+                    dist.location,
+                )
+                continue
+            yield dist
 
     def iter_installed_distributions(
         self,
diff --git a/src/pip/_internal/metadata/pkg_resources.py b/src/pip/_internal/metadata/pkg_resources.py
index d2fb29e2e..f39a39ebe 100644
--- a/src/pip/_internal/metadata/pkg_resources.py
+++ b/src/pip/_internal/metadata/pkg_resources.py
@@ -3,13 +3,13 @@ from typing import Iterator, List, Optional
 
 from pip._vendor import pkg_resources
 from pip._vendor.packaging.utils import canonicalize_name
-from pip._vendor.packaging.version import _BaseVersion
+from pip._vendor.packaging.version import parse as parse_version
 
 from pip._internal.utils import misc  # TODO: Move definition here.
 from pip._internal.utils.packaging import get_installer
 from pip._internal.utils.wheel import pkg_resources_distribution_for_wheel
 
-from .base import BaseDistribution, BaseEnvironment
+from .base import BaseDistribution, BaseEnvironment, DistributionVersion
 
 
 class Distribution(BaseDistribution):
@@ -24,6 +24,11 @@ class Distribution(BaseDistribution):
             dist = pkg_resources_distribution_for_wheel(zf, name, path)
         return cls(dist)
 
+    @property
+    def location(self):
+        # type: () -> Optional[str]
+        return self._dist.location
+
     @property
     def metadata_version(self):
         # type: () -> Optional[str]
@@ -39,8 +44,8 @@ class Distribution(BaseDistribution):
 
     @property
     def version(self):
-        # type: () -> _BaseVersion
-        return self._dist.parsed_version
+        # type: () -> DistributionVersion
+        return parse_version(self._dist.version)
 
     @property
     def installer(self):
@@ -115,7 +120,7 @@ class Environment(BaseEnvironment):
             return None
         return self._search_distribution(name)
 
-    def iter_distributions(self):
+    def _iter_distributions(self):
         # type: () -> Iterator[BaseDistribution]
         for dist in self._ws:
             yield Distribution(dist)
diff --git a/src/pip/_internal/models/candidate.py b/src/pip/_internal/models/candidate.py
index 10a144620..3b91704a2 100644
--- a/src/pip/_internal/models/candidate.py
+++ b/src/pip/_internal/models/candidate.py
@@ -1,4 +1,3 @@
-from pip._vendor.packaging.version import _BaseVersion
 from pip._vendor.packaging.version import parse as parse_version
 
 from pip._internal.models.link import Link
@@ -14,7 +13,7 @@ class InstallationCandidate(KeyBasedCompareMixin):
     def __init__(self, name, version, link):
         # type: (str, str, Link) -> None
         self.name = name
-        self.version = parse_version(version)  # type: _BaseVersion
+        self.version = parse_version(version)
         self.link = link
 
         super().__init__(
diff --git a/src/pip/_internal/models/link.py b/src/pip/_internal/models/link.py
index d79c66a90..6f9a32443 100644
--- a/src/pip/_internal/models/link.py
+++ b/src/pip/_internal/models/link.py
@@ -111,8 +111,7 @@ class Link(KeyBasedCompareMixin):
             return netloc
 
         name = urllib.parse.unquote(name)
-        assert name, (
-            'URL {self._url!r} produced no filename'.format(**locals()))
+        assert name, f'URL {self._url!r} produced no filename'
         return name
 
     @property
diff --git a/src/pip/_internal/models/wheel.py b/src/pip/_internal/models/wheel.py
index 708bff330..c206d13cb 100644
--- a/src/pip/_internal/models/wheel.py
+++ b/src/pip/_internal/models/wheel.py
@@ -2,7 +2,7 @@
 name that have meaning.
 """
 import re
-from typing import List
+from typing import Dict, List
 
 from pip._vendor.packaging.tags import Tag
 
@@ -66,8 +66,26 @@ class Wheel:
         """
         return min(tags.index(tag) for tag in self.file_tags if tag in tags)
 
+    def find_most_preferred_tag(self, tags, tag_to_priority):
+        # type: (List[Tag], Dict[Tag, int]) -> int
+        """Return the priority of the most preferred tag that one of the wheel's file
+        tag combinations acheives in the given list of supported tags using the given
+        tag_to_priority mapping, where lower priorities are more-preferred.
+
+        This is used in place of support_index_min in some cases in order to avoid
+        an expensive linear scan of a large list of tags.
+
+        :param tags: the PEP 425 tags to check the wheel against.
+        :param tag_to_priority: a mapping from tag to priority of that tag, where
+            lower is more preferred.
+
+        :raises ValueError: If none of the wheel's file tags match one of
+            the supported tags.
+        """
+        return min(tag_to_priority[tag] for tag in self.file_tags if tag in tag_to_priority)
+
     def supported(self, tags):
-        # type: (List[Tag]) -> bool
+        # type: (Iterable[Tag]) -> bool
         """Return whether the wheel is compatible with one of the given tags.
 
         :param tags: the PEP 425 tags to check the wheel against.
diff --git a/src/pip/_internal/network/auth.py b/src/pip/_internal/network/auth.py
index cad22a02c..bd54a5cba 100644
--- a/src/pip/_internal/network/auth.py
+++ b/src/pip/_internal/network/auth.py
@@ -37,7 +37,7 @@ except Exception as exc:
 
 
 def get_keyring_auth(url, username):
-    # type: (str, str) -> Optional[AuthInfo]
+    # type: (Optional[str], Optional[str]) -> Optional[AuthInfo]
     """Return the tuple auth for a given url from keyring."""
     global keyring
     if not url or not keyring:
diff --git a/src/pip/_internal/network/session.py b/src/pip/_internal/network/session.py
index 423922a00..4af800f12 100644
--- a/src/pip/_internal/network/session.py
+++ b/src/pip/_internal/network/session.py
@@ -2,8 +2,15 @@
 network request configuration and behavior.
 """
 
-# The following comment should be removed at some point in the future.
-# mypy: disallow-untyped-defs=False
+# When mypy runs on Windows the call to distro.linux_distribution() is skipped
+# resulting in the failure:
+#
+#     error: unused 'type: ignore' comment
+#
+# If the upstream module adds typing, this comment should be removed. See
+# https://github.com/nir0s/distro/pull/269
+#
+# mypy: warn-unused-ignores=False
 
 import email.utils
 import ipaddress
@@ -15,13 +22,14 @@ import platform
 import sys
 import urllib.parse
 import warnings
-from typing import Any, Iterator, List, Optional, Sequence, Tuple, Union
+from typing import Any, Dict, Iterator, List, Mapping, Optional, Sequence, Tuple, Union
 
 from pip._vendor import requests, urllib3
 from pip._vendor.cachecontrol import CacheControlAdapter
 from pip._vendor.requests.adapters import BaseAdapter, HTTPAdapter
-from pip._vendor.requests.models import Response
+from pip._vendor.requests.models import PreparedRequest, Response
 from pip._vendor.requests.structures import CaseInsensitiveDict
+from pip._vendor.urllib3.connectionpool import ConnectionPool
 from pip._vendor.urllib3.exceptions import InsecureRequestWarning
 
 from pip import __version__
@@ -89,6 +97,7 @@ def looks_like_ci():
 
 
 def user_agent():
+    # type: () -> str
     """
     Return a string representing the user agent.
     """
@@ -98,15 +107,14 @@ def user_agent():
         "implementation": {
             "name": platform.python_implementation(),
         },
-    }
+    }  # type: Dict[str, Any]
 
     if data["implementation"]["name"] == 'CPython':
         data["implementation"]["version"] = platform.python_version()
     elif data["implementation"]["name"] == 'PyPy':
-        if sys.pypy_version_info.releaselevel == 'final':
-            pypy_version_info = sys.pypy_version_info[:3]
-        else:
-            pypy_version_info = sys.pypy_version_info
+        pypy_version_info = sys.pypy_version_info  # type: ignore
+        if pypy_version_info.releaselevel == 'final':
+            pypy_version_info = pypy_version_info[:3]
         data["implementation"]["version"] = ".".join(
             [str(x) for x in pypy_version_info]
         )
@@ -119,9 +127,12 @@ def user_agent():
 
     if sys.platform.startswith("linux"):
         from pip._vendor import distro
+
+        # https://github.com/nir0s/distro/pull/269
+        linux_distribution = distro.linux_distribution()  # type: ignore
         distro_infos = dict(filter(
             lambda x: x[1],
-            zip(["name", "version", "id"], distro.linux_distribution()),
+            zip(["name", "version", "id"], linux_distribution),
         ))
         libc = dict(filter(
             lambda x: x[1],
@@ -170,8 +181,16 @@ def user_agent():
 
 class LocalFSAdapter(BaseAdapter):
 
-    def send(self, request, stream=None, timeout=None, verify=None, cert=None,
-             proxies=None):
+    def send(
+        self,
+        request,  # type: PreparedRequest
+        stream=False,  # type: bool
+        timeout=None,  # type: Optional[Union[float, Tuple[float, float]]]
+        verify=True,  # type: Union[bool, str]
+        cert=None,  # type: Optional[Union[str, Tuple[str, str]]]
+        proxies=None,  # type:Optional[Mapping[str, str]]
+    ):
+        # type: (...) -> Response
         pathname = url_to_path(request.url)
 
         resp = Response()
@@ -198,18 +217,33 @@ class LocalFSAdapter(BaseAdapter):
         return resp
 
     def close(self):
+        # type: () -> None
         pass
 
 
 class InsecureHTTPAdapter(HTTPAdapter):
 
-    def cert_verify(self, conn, url, verify, cert):
+    def cert_verify(
+        self,
+        conn,  # type: ConnectionPool
+        url,  # type: str
+        verify,  # type: Union[bool, str]
+        cert,  # type: Optional[Union[str, Tuple[str, str]]]
+    ):
+        # type: (...) -> None
         super().cert_verify(conn=conn, url=url, verify=False, cert=cert)
 
 
 class InsecureCacheControlAdapter(CacheControlAdapter):
 
-    def cert_verify(self, conn, url, verify, cert):
+    def cert_verify(
+        self,
+        conn,  # type: ConnectionPool
+        url,  # type: str
+        verify,  # type: Union[bool, str]
+        cert,  # type: Optional[Union[str, Tuple[str, str]]]
+    ):
+        # type: (...) -> None
         super().cert_verify(conn=conn, url=url, verify=False, cert=cert)
 
 
@@ -261,7 +295,7 @@ class PipSession(requests.Session):
             # Add a small amount of back off between failed requests in
             # order to prevent hammering the service.
             backoff_factor=0.25,
-        )
+        )  # type: ignore
 
         # Our Insecure HTTPAdapter disables HTTPS validation. It does not
         # support caching so we'll use it for all http:// URLs.
@@ -407,6 +441,7 @@ class PipSession(requests.Session):
         return False
 
     def request(self, method, url, *args, **kwargs):
+        # type: (str, str, *Any, **Any) -> Response
         # Allow setting a default timeout on a session
         kwargs.setdefault("timeout", self.timeout)
 
diff --git a/src/pip/_internal/network/utils.py b/src/pip/_internal/network/utils.py
index d29c7c076..6e5cf0d1d 100644
--- a/src/pip/_internal/network/utils.py
+++ b/src/pip/_internal/network/utils.py
@@ -42,12 +42,12 @@ def raise_for_status(resp):
         reason = resp.reason
 
     if 400 <= resp.status_code < 500:
-        http_error_msg = '%s Client Error: %s for url: %s' % (
-            resp.status_code, reason, resp.url)
+        http_error_msg = (
+            f'{resp.status_code} Client Error: {reason} for url: {resp.url}')
 
     elif 500 <= resp.status_code < 600:
-        http_error_msg = '%s Server Error: %s for url: %s' % (
-            resp.status_code, reason, resp.url)
+        http_error_msg = (
+            f'{resp.status_code} Server Error: {reason} for url: {resp.url}')
 
     if http_error_msg:
         raise NetworkConnectionError(http_error_msg, response=resp)
diff --git a/src/pip/_internal/network/xmlrpc.py b/src/pip/_internal/network/xmlrpc.py
index c9f3c5db8..b92b8d9ae 100644
--- a/src/pip/_internal/network/xmlrpc.py
+++ b/src/pip/_internal/network/xmlrpc.py
@@ -3,20 +3,20 @@
 
 import logging
 import urllib.parse
-from typing import Dict
-
-# NOTE: XMLRPC Client is not annotated in typeshed as on 2017-07-17, which is
-#       why we ignore the type on this import
-from pip._vendor.six.moves import xmlrpc_client  # type: ignore
+import xmlrpc.client
+from typing import TYPE_CHECKING, Tuple
 
 from pip._internal.exceptions import NetworkConnectionError
 from pip._internal.network.session import PipSession
 from pip._internal.network.utils import raise_for_status
 
+if TYPE_CHECKING:
+    from xmlrpc.client import _HostType, _Marshallable
+
 logger = logging.getLogger(__name__)
 
 
-class PipXmlrpcTransport(xmlrpc_client.Transport):
+class PipXmlrpcTransport(xmlrpc.client.Transport):
     """Provide a `xmlrpclib.Transport` implementation via a `PipSession`
     object.
     """
@@ -29,7 +29,8 @@ class PipXmlrpcTransport(xmlrpc_client.Transport):
         self._session = session
 
     def request(self, host, handler, request_body, verbose=False):
-        # type: (str, str, Dict[str, str], bool) -> None
+        # type: (_HostType, str, bytes, bool) -> Tuple[_Marshallable, ...]
+        assert isinstance(host, str)
         parts = (self._scheme, host, handler, None, None, None)
         url = urllib.parse.urlunparse(parts)
         try:
diff --git a/src/pip/_internal/operations/check.py b/src/pip/_internal/operations/check.py
index 224633561..5699c0b91 100644
--- a/src/pip/_internal/operations/check.py
+++ b/src/pip/_internal/operations/check.py
@@ -3,7 +3,7 @@
 
 import logging
 from collections import namedtuple
-from typing import Any, Callable, Dict, List, Optional, Set, Tuple
+from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Set, Tuple
 
 from pip._vendor.packaging.utils import canonicalize_name
 from pip._vendor.pkg_resources import RequirementParseError
@@ -12,23 +12,25 @@ from pip._internal.distributions import make_distribution_for_install_requiremen
 from pip._internal.req.req_install import InstallRequirement
 from pip._internal.utils.misc import get_installed_distributions
 
+if TYPE_CHECKING:
+    from pip._vendor.packaging.utils import NormalizedName
+
 logger = logging.getLogger(__name__)
 
 # Shorthands
-PackageSet = Dict[str, 'PackageDetails']
+PackageSet = Dict['NormalizedName', 'PackageDetails']
 Missing = Tuple[str, Any]
 Conflicting = Tuple[str, str, Any]
 
-MissingDict = Dict[str, List[Missing]]
-ConflictingDict = Dict[str, List[Conflicting]]
+MissingDict = Dict['NormalizedName', List[Missing]]
+ConflictingDict = Dict['NormalizedName', List[Conflicting]]
 CheckResult = Tuple[MissingDict, ConflictingDict]
 ConflictDetails = Tuple[PackageSet, CheckResult]
 
 PackageDetails = namedtuple('PackageDetails', ['version', 'requires'])
 
 
-def create_package_set_from_installed(**kwargs):
-    # type: (**Any) -> Tuple[PackageSet, bool]
+def create_package_set_from_installed(**kwargs: Any) -> Tuple["PackageSet", bool]:
     """Converts a list of distributions into a PackageSet.
     """
     # Default to using all packages installed on the system
@@ -59,7 +61,7 @@ def check_package_set(package_set, should_ignore=None):
     missing = {}
     conflicting = {}
 
-    for package_name in package_set:
+    for package_name, package_detail in package_set.items():
         # Info about dependencies of package_name
         missing_deps = set()  # type: Set[Missing]
         conflicting_deps = set()  # type: Set[Conflicting]
@@ -67,8 +69,8 @@ def check_package_set(package_set, should_ignore=None):
         if should_ignore and should_ignore(package_name):
             continue
 
-        for req in package_set[package_name].requires:
-            name = canonicalize_name(req.project_name)  # type: str
+        for req in package_detail.requires:
+            name = canonicalize_name(req.project_name)
 
             # Check if it's missing
             if name not in package_set:
@@ -114,7 +116,7 @@ def check_install_conflicts(to_install):
 
 
 def _simulate_installation_of(to_install, package_set):
-    # type: (List[InstallRequirement], PackageSet) -> Set[str]
+    # type: (List[InstallRequirement], PackageSet) -> Set[NormalizedName]
     """Computes the version of packages after installing to_install.
     """
 
@@ -136,7 +138,7 @@ def _simulate_installation_of(to_install, package_set):
 
 
 def _create_whitelist(would_be_installed, package_set):
-    # type: (Set[str], PackageSet) -> Set[str]
+    # type: (Set[NormalizedName], PackageSet) -> Set[NormalizedName]
     packages_affected = set(would_be_installed)
 
     for package_name in package_set:
diff --git a/src/pip/_internal/operations/prepare.py b/src/pip/_internal/operations/prepare.py
index 72743648a..3d074f9f6 100644
--- a/src/pip/_internal/operations/prepare.py
+++ b/src/pip/_internal/operations/prepare.py
@@ -35,6 +35,7 @@ from pip._internal.network.lazy_wheel import (
 from pip._internal.network.session import PipSession
 from pip._internal.req.req_install import InstallRequirement
 from pip._internal.req.req_tracker import RequirementTracker
+from pip._internal.utils.deprecation import deprecated
 from pip._internal.utils.filesystem import copy2_fixed
 from pip._internal.utils.hashes import Hashes, MissingHashes
 from pip._internal.utils.logging import indent_log
@@ -207,8 +208,23 @@ def unpack_url(
         unpack_vcs_link(link, location)
         return None
 
-    # If it's a url to a local directory
+    # Once out-of-tree-builds are no longer supported, could potentially
+    # replace the below condition with `assert not link.is_existing_dir`
+    # - unpack_url does not need to be called for in-tree-builds.
+    #
+    # As further cleanup, _copy_source_tree and accompanying tests can
+    # be removed.
     if link.is_existing_dir():
+        deprecated(
+            "A future pip version will change local packages to be built "
+            "in-place without first copying to a temporary directory. "
+            "We recommend you use --use-feature=in-tree-build to test "
+            "your packages with this new behavior before it becomes the "
+            "default.\n",
+            replacement=None,
+            gone_in="21.3",
+            issue=7555
+        )
         if os.path.isdir(location):
             rmtree(location)
         _copy_source_tree(link.file_path, location)
@@ -278,6 +294,7 @@ class RequirementPreparer:
         require_hashes,  # type: bool
         use_user_site,  # type: bool
         lazy_wheel,  # type: bool
+        in_tree_build,  # type: bool
     ):
         # type: (...) -> None
         super().__init__()
@@ -306,6 +323,9 @@ class RequirementPreparer:
         # Should wheels be downloaded lazily?
         self.use_lazy_wheel = lazy_wheel
 
+        # Should in-tree builds be used for local paths?
+        self.in_tree_build = in_tree_build
+
         # Memoized downloaded files, as mapping of url: (path, mime type)
         self._downloaded = {}  # type: Dict[str, Tuple[str, str]]
 
@@ -339,6 +359,11 @@ class RequirementPreparer:
             # directory.
             return
         assert req.source_dir is None
+        if req.link.is_existing_dir() and self.in_tree_build:
+            # build local directories in-tree
+            req.source_dir = req.link.file_path
+            return
+
         # We always delete unpacked sdists after pip runs.
         req.ensure_has_source_dir(
             self.build_dir,
@@ -517,11 +542,14 @@ class RequirementPreparer:
 
         self._ensure_link_req_src_dir(req, parallel_builds)
         hashes = self._get_linked_req_hashes(req)
-        if link.url not in self._downloaded:
+
+        if link.is_existing_dir() and self.in_tree_build:
+            local_file = None
+        elif link.url not in self._downloaded:
             try:
                 local_file = unpack_url(
                     link, req.source_dir, self._download,
-                    self.download_dir, hashes,
+                    self.download_dir, hashes
                 )
             except NetworkConnectionError as exc:
                 raise InstallationError(
diff --git a/src/pip/_internal/req/constructors.py b/src/pip/_internal/req/constructors.py
index 784cd81f6..a659b4d6e 100644
--- a/src/pip/_internal/req/constructors.py
+++ b/src/pip/_internal/req/constructors.py
@@ -77,16 +77,19 @@ def parse_editable(editable_req):
     url_no_extras, extras = _strip_extras(url)
 
     if os.path.isdir(url_no_extras):
-        if not os.path.exists(os.path.join(url_no_extras, 'setup.py')):
+        setup_py = os.path.join(url_no_extras, 'setup.py')
+        setup_cfg = os.path.join(url_no_extras, 'setup.cfg')
+        if not os.path.exists(setup_py) and not os.path.exists(setup_cfg):
             msg = (
-                'File "setup.py" not found. Directory cannot be installed '
-                'in editable mode: {}'.format(os.path.abspath(url_no_extras))
+                'File "setup.py" or "setup.cfg" not found. Directory cannot be '
+                'installed in editable mode: {}'
+                .format(os.path.abspath(url_no_extras))
             )
             pyproject_path = make_pyproject_path(url_no_extras)
             if os.path.isfile(pyproject_path):
                 msg += (
                     '\n(A "pyproject.toml" file was found, but editable '
-                    'mode currently requires a setup.py based build.)'
+                    'mode currently requires a setuptools-based build.)'
                 )
             raise InstallationError(msg)
 
@@ -140,7 +143,7 @@ def deduce_helpful_msg(req):
         msg = " The path does exist. "
         # Try to parse and check if it is a requirements file.
         try:
-            with open(req, 'r') as fp:
+            with open(req) as fp:
                 # parse first line only
                 next(parse_requirements(fp.read()))
                 msg += (
@@ -179,7 +182,7 @@ def parse_req_from_editable(editable_req):
 
     if name is not None:
         try:
-            req = Requirement(name)
+            req = Requirement(name)  # type: Optional[Requirement]
         except InvalidRequirement:
             raise InstallationError(f"Invalid requirement: '{name}'")
     else:
@@ -256,8 +259,8 @@ def _get_url_from_path(path, name):
         if is_installable_dir(path):
             return path_to_url(path)
         raise InstallationError(
-            "Directory {name!r} is not installable. Neither 'setup.py' "
-            "nor 'pyproject.toml' found.".format(**locals())
+            f"Directory {name!r} is not installable. Neither 'setup.py' "
+            "nor 'pyproject.toml' found."
         )
     if not is_archive_file(path):
         return None
@@ -314,7 +317,7 @@ def parse_req_from_line(name, line_source):
         # wheel file
         if link.is_wheel:
             wheel = Wheel(link.filename)  # can raise InvalidWheelFilename
-            req_as_string = "{wheel.name}=={wheel.version}".format(**locals())
+            req_as_string = f"{wheel.name}=={wheel.version}"
         else:
             # set the req to the egg fragment.  when it's not there, this
             # will become an 'unnamed' requirement
@@ -332,7 +335,7 @@ def parse_req_from_line(name, line_source):
             return text
         return f'{text} (from {line_source})'
 
-    if req_as_string is not None:
+    def _parse_req_string(req_as_string: str) -> Requirement:
         try:
             req = Requirement(req_as_string)
         except InvalidRequirement:
@@ -360,6 +363,10 @@ def parse_req_from_line(name, line_source):
                 if spec_str.endswith(']'):
                     msg = f"Extras after version '{spec_str}'."
                     raise InstallationError(msg)
+        return req
+
+    if req_as_string is not None:
+        req = _parse_req_string(req_as_string)  # type: Optional[Requirement]
     else:
         req = None
 
diff --git a/src/pip/_internal/req/req_file.py b/src/pip/_internal/req/req_file.py
index 336cd137e..f6bdfd19b 100644
--- a/src/pip/_internal/req/req_file.py
+++ b/src/pip/_internal/req/req_file.py
@@ -17,7 +17,6 @@ from typing import (
     List,
     NoReturn,
     Optional,
-    Text,
     Tuple,
 )
 
@@ -34,9 +33,9 @@ if TYPE_CHECKING:
 
 __all__ = ['parse_requirements']
 
-ReqFileLines = Iterator[Tuple[int, Text]]
+ReqFileLines = Iterator[Tuple[int, str]]
 
-LineParser = Callable[[Text], Tuple[str, Values]]
+LineParser = Callable[[str], Tuple[str, Values]]
 
 SCHEME_RE = re.compile(r'^(http|https|file):', re.I)
 COMMENT_RE = re.compile(r'(^|\s+)#.*$')
diff --git a/src/pip/_internal/req/req_install.py b/src/pip/_internal/req/req_install.py
index f8643c21a..7f4e974cc 100644
--- a/src/pip/_internal/req/req_install.py
+++ b/src/pip/_internal/req/req_install.py
@@ -349,7 +349,7 @@ class InstallRequirement:
 
         # When parallel builds are enabled, add a UUID to the build directory
         # name so multiple builds do not interfere with each other.
-        dir_name = canonicalize_name(self.name)
+        dir_name = canonicalize_name(self.name)  # type: str
         if parallel_builds:
             dir_name = f"{dir_name}_{uuid.uuid4().hex}"
 
@@ -419,8 +419,16 @@ class InstallRequirement:
         if not existing_dist:
             return
 
-        existing_version = existing_dist.parsed_version
-        if not self.req.specifier.contains(existing_version, prereleases=True):
+        # pkg_resouces may contain a different copy of packaging.version from
+        # pip in if the downstream distributor does a poor job debundling pip.
+        # We avoid existing_dist.parsed_version and let SpecifierSet.contains
+        # parses the version instead.
+        existing_version = existing_dist.version
+        version_compatible = (
+            existing_version is not None and
+            self.req.specifier.contains(existing_version, prereleases=True)
+        )
+        if not version_compatible:
             self.satisfied_by = None
             if use_user_site:
                 if dist_in_usersite(existing_dist):
@@ -651,8 +659,7 @@ class InstallRequirement:
         def _clean_zip_name(name, prefix):
             # type: (str, str) -> str
             assert name.startswith(prefix + os.path.sep), (
-                "name {name!r} doesn't start with prefix {prefix!r}"
-                .format(**locals())
+                f"name {name!r} doesn't start with prefix {prefix!r}"
             )
             name = name[len(prefix) + 1:]
             name = name.replace(os.path.sep, '/')
diff --git a/src/pip/_internal/req/req_set.py b/src/pip/_internal/req/req_set.py
index 7ab236dc1..59c584355 100644
--- a/src/pip/_internal/req/req_set.py
+++ b/src/pip/_internal/req/req_set.py
@@ -28,7 +28,7 @@ class RequirementSet:
         # type: () -> str
         requirements = sorted(
             (req for req in self.requirements.values() if not req.comes_from),
-            key=lambda req: canonicalize_name(req.name),
+            key=lambda req: canonicalize_name(req.name or ""),
         )
         return ' '.join(str(req.req) for req in requirements)
 
@@ -36,7 +36,7 @@ class RequirementSet:
         # type: () -> str
         requirements = sorted(
             self.requirements.values(),
-            key=lambda req: canonicalize_name(req.name),
+            key=lambda req: canonicalize_name(req.name or ""),
         )
 
         format_string = '<{classname} object; {count} requirement(s): {reqs}>'
@@ -122,6 +122,8 @@ class RequirementSet:
             existing_req and
             not existing_req.constraint and
             existing_req.extras == install_req.extras and
+            existing_req.req and
+            install_req.req and
             existing_req.req.specifier != install_req.req.specifier
         )
         if has_conflicting_requirement:
@@ -189,7 +191,7 @@ class RequirementSet:
         if project_name in self.requirements:
             return self.requirements[project_name]
 
-        raise KeyError("No project with the name {name!r}".format(**locals()))
+        raise KeyError(f"No project with the name {name!r}")
 
     @property
     def all_requirements(self):
diff --git a/src/pip/_internal/req/req_uninstall.py b/src/pip/_internal/req/req_uninstall.py
index 519b79166..b72234175 100644
--- a/src/pip/_internal/req/req_uninstall.py
+++ b/src/pip/_internal/req/req_uninstall.py
@@ -11,7 +11,7 @@ from pip._vendor import pkg_resources
 from pip._vendor.pkg_resources import Distribution
 
 from pip._internal.exceptions import UninstallationError
-from pip._internal.locations import bin_py, bin_user
+from pip._internal.locations import get_bin_prefix, get_bin_user
 from pip._internal.utils.compat import WINDOWS
 from pip._internal.utils.logging import indent_log
 from pip._internal.utils.misc import (
@@ -36,9 +36,9 @@ def _script_names(dist, script_name, is_gui):
     Returns the list of file names
     """
     if dist_in_usersite(dist):
-        bin_dir = bin_user
+        bin_dir = get_bin_user()
     else:
-        bin_dir = bin_py
+        bin_dir = get_bin_prefix()
     exe_name = os.path.join(bin_dir, script_name)
     paths_to_remove = [exe_name]
     if WINDOWS:
@@ -529,7 +529,7 @@ class UninstallPathSet:
 
         elif develop_egg_link:
             # develop egg
-            with open(develop_egg_link, 'r') as fh:
+            with open(develop_egg_link) as fh:
                 link_pointer = os.path.normcase(fh.readline().strip())
             assert (link_pointer == dist.location), (
                 'Egg-link {} does not match installed location of {} '
@@ -551,9 +551,9 @@ class UninstallPathSet:
         if dist.has_metadata('scripts') and dist.metadata_isdir('scripts'):
             for script in dist.metadata_listdir('scripts'):
                 if dist_in_usersite(dist):
-                    bin_dir = bin_user
+                    bin_dir = get_bin_user()
                 else:
-                    bin_dir = bin_py
+                    bin_dir = get_bin_prefix()
                 paths_to_remove.add(os.path.join(bin_dir, script))
                 if WINDOWS:
                     paths_to_remove.add(os.path.join(bin_dir, script) + '.bat')
diff --git a/src/pip/_internal/resolution/base.py b/src/pip/_internal/resolution/base.py
index 7526bfe84..1be0cb279 100644
--- a/src/pip/_internal/resolution/base.py
+++ b/src/pip/_internal/resolution/base.py
@@ -3,9 +3,7 @@ from typing import Callable, List
 from pip._internal.req.req_install import InstallRequirement
 from pip._internal.req.req_set import RequirementSet
 
-InstallRequirementProvider = Callable[
-    [str, InstallRequirement], InstallRequirement
-]
+InstallRequirementProvider = Callable[[str, InstallRequirement], InstallRequirement]
 
 
 class BaseResolver:
diff --git a/src/pip/_internal/resolution/legacy/resolver.py b/src/pip/_internal/resolution/legacy/resolver.py
index d5ea3a0ac..17de7f09a 100644
--- a/src/pip/_internal/resolution/legacy/resolver.py
+++ b/src/pip/_internal/resolution/legacy/resolver.py
@@ -12,13 +12,12 @@ for sub-dependencies
 
 # The following comment should be removed at some point in the future.
 # mypy: strict-optional=False
-# mypy: disallow-untyped-defs=False
 
 import logging
 import sys
 from collections import defaultdict
 from itertools import chain
-from typing import DefaultDict, List, Optional, Set, Tuple
+from typing import DefaultDict, Iterable, List, Optional, Set, Tuple
 
 from pip._vendor.packaging import specifiers
 from pip._vendor.pkg_resources import Distribution
@@ -71,31 +70,32 @@ def _check_dist_requires_python(
     requires_python = get_requires_python(dist)
     try:
         is_compatible = check_requires_python(
-            requires_python, version_info=version_info,
+            requires_python, version_info=version_info
         )
     except specifiers.InvalidSpecifier as exc:
         logger.warning(
-            "Package %r has an invalid Requires-Python: %s",
-            dist.project_name, exc,
+            "Package %r has an invalid Requires-Python: %s", dist.project_name, exc
         )
         return
 
     if is_compatible:
         return
 
-    version = '.'.join(map(str, version_info))
+    version = ".".join(map(str, version_info))
     if ignore_requires_python:
         logger.debug(
-            'Ignoring failed Requires-Python check for package %r: '
-            '%s not in %r',
-            dist.project_name, version, requires_python,
+            "Ignoring failed Requires-Python check for package %r: " "%s not in %r",
+            dist.project_name,
+            version,
+            requires_python,
         )
         return
 
     raise UnsupportedPythonVersion(
-        'Package {!r} requires a different Python: {} not in {!r}'.format(
-            dist.project_name, version, requires_python,
-        ))
+        "Package {!r} requires a different Python: {} not in {!r}".format(
+            dist.project_name, version, requires_python
+        )
+    )
 
 
 class Resolver(BaseResolver):
@@ -142,8 +142,9 @@ class Resolver(BaseResolver):
         self.use_user_site = use_user_site
         self._make_install_req = make_install_req
 
-        self._discovered_dependencies = \
-            defaultdict(list)  # type: DiscoveredDependencies
+        self._discovered_dependencies = defaultdict(
+            list
+        )  # type: DiscoveredDependencies
 
     def resolve(self, root_reqs, check_supported_wheels):
         # type: (List[InstallRequirement], bool) -> RequirementSet
@@ -157,9 +158,7 @@ class Resolver(BaseResolver):
         possible to move the preparation to become a step separated from
         dependency resolution.
         """
-        requirement_set = RequirementSet(
-            check_supported_wheels=check_supported_wheels
-        )
+        requirement_set = RequirementSet(check_supported_wheels=check_supported_wheels)
         for req in root_reqs:
             if req.constraint:
                 check_invalid_constraint_type(req)
@@ -236,8 +235,8 @@ class Resolver(BaseResolver):
 
         if not self._is_upgrade_allowed(req_to_install):
             if self.upgrade_strategy == "only-if-needed":
-                return 'already satisfied, skipping upgrade'
-            return 'already satisfied'
+                return "already satisfied, skipping upgrade"
+            return "already satisfied"
 
         # Check for the possibility of an upgrade.  For link-based
         # requirements we have to pull the tree down and inspect to assess
@@ -247,7 +246,7 @@ class Resolver(BaseResolver):
                 self.finder.find_requirement(req_to_install, upgrade=True)
             except BestVersionAlreadyInstalled:
                 # Then the best version is installed.
-                return 'already up-to-date'
+                return "already up-to-date"
             except DistributionNotFound:
                 # No distribution found, so we squash the error.  It will
                 # be raised later when we re-try later to do the install.
@@ -267,14 +266,14 @@ class Resolver(BaseResolver):
         # Log a warning per PEP 592 if necessary before returning.
         link = best_candidate.link
         if link.is_yanked:
-            reason = link.yanked_reason or ''
+            reason = link.yanked_reason or ""
             msg = (
                 # Mark this as a unicode string to prevent
                 # "UnicodeEncodeError: 'ascii' codec can't encode character"
                 # in Python 2 when the reason contains non-ascii characters.
-                'The candidate selected for download or install is a '
-                'yanked version: {candidate}\n'
-                'Reason for being yanked: {reason}'
+                "The candidate selected for download or install is a "
+                "yanked version: {candidate}\n"
+                "Reason for being yanked: {reason}"
             ).format(candidate=best_candidate, reason=reason)
             logger.warning(msg)
 
@@ -305,7 +304,7 @@ class Resolver(BaseResolver):
             supported_tags=get_supported(),
         )
         if cache_entry is not None:
-            logger.debug('Using cached wheel link: %s', cache_entry.link)
+            logger.debug("Using cached wheel link: %s", cache_entry.link)
             if req.link is req.original_link and cache_entry.persistent:
                 req.original_link_is_in_wheel_cache = True
             req.link = cache_entry.link
@@ -324,9 +323,7 @@ class Resolver(BaseResolver):
         skip_reason = self._check_skip_installed(req)
 
         if req.satisfied_by:
-            return self.preparer.prepare_installed_requirement(
-                req, skip_reason
-            )
+            return self.preparer.prepare_installed_requirement(req, skip_reason)
 
         # We eagerly populate the link, since that's our "legacy" behavior.
         self._populate_link(req)
@@ -345,17 +342,17 @@ class Resolver(BaseResolver):
 
         if req.satisfied_by:
             should_modify = (
-                self.upgrade_strategy != "to-satisfy-only" or
-                self.force_reinstall or
-                self.ignore_installed or
-                req.link.scheme == 'file'
+                self.upgrade_strategy != "to-satisfy-only"
+                or self.force_reinstall
+                or self.ignore_installed
+                or req.link.scheme == "file"
             )
             if should_modify:
                 self._set_req_to_reinstall(req)
             else:
                 logger.info(
-                    'Requirement already satisfied (use --upgrade to upgrade):'
-                    ' %s', req,
+                    "Requirement already satisfied (use --upgrade to upgrade):" " %s",
+                    req,
                 )
         return dist
 
@@ -382,13 +379,15 @@ class Resolver(BaseResolver):
         # This will raise UnsupportedPythonVersion if the given Python
         # version isn't compatible with the distribution's Requires-Python.
         _check_dist_requires_python(
-            dist, version_info=self._py_version_info,
+            dist,
+            version_info=self._py_version_info,
             ignore_requires_python=self.ignore_requires_python,
         )
 
         more_reqs = []  # type: List[InstallRequirement]
 
         def add_req(subreq, extras_requested):
+            # type: (Distribution, Iterable[str]) -> None
             sub_install_req = self._make_install_req(
                 str(subreq),
                 req_to_install,
@@ -400,9 +399,7 @@ class Resolver(BaseResolver):
                 extras_requested=extras_requested,
             )
             if parent_req_name and add_to_parent:
-                self._discovered_dependencies[parent_req_name].append(
-                    add_to_parent
-                )
+                self._discovered_dependencies[parent_req_name].append(add_to_parent)
             more_reqs.extend(to_scan_again)
 
         with indent_log():
@@ -413,24 +410,19 @@ class Resolver(BaseResolver):
                 # 'unnamed' requirements can only come from being directly
                 # provided by the user.
                 assert req_to_install.user_supplied
-                requirement_set.add_requirement(
-                    req_to_install, parent_req_name=None,
-                )
+                requirement_set.add_requirement(req_to_install, parent_req_name=None)
 
             if not self.ignore_dependencies:
                 if req_to_install.extras:
                     logger.debug(
                         "Installing extra requirements: %r",
-                        ','.join(req_to_install.extras),
+                        ",".join(req_to_install.extras),
                     )
                 missing_requested = sorted(
                     set(req_to_install.extras) - set(dist.extras)
                 )
                 for missing in missing_requested:
-                    logger.warning(
-                        "%s does not provide the extra '%s'",
-                        dist, missing
-                    )
+                    logger.warning("%s does not provide the extra '%s'", dist, missing)
 
                 available_requested = sorted(
                     set(dist.extras) & set(req_to_install.extras)
@@ -455,6 +447,7 @@ class Resolver(BaseResolver):
         ordered_reqs = set()  # type: Set[InstallRequirement]
 
         def schedule(req):
+            # type: (InstallRequirement) -> None
             if req.satisfied_by or req in ordered_reqs:
                 return
             if req.constraint:
diff --git a/src/pip/_internal/resolution/resolvelib/base.py b/src/pip/_internal/resolution/resolvelib/base.py
index 81fee9b9e..0295b0ed8 100644
--- a/src/pip/_internal/resolution/resolvelib/base.py
+++ b/src/pip/_internal/resolution/resolvelib/base.py
@@ -1,14 +1,15 @@
-from typing import FrozenSet, Iterable, Optional, Tuple
+from typing import FrozenSet, Iterable, Optional, Tuple, Union
 
 from pip._vendor.packaging.specifiers import SpecifierSet
-from pip._vendor.packaging.utils import canonicalize_name
-from pip._vendor.packaging.version import _BaseVersion
+from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
+from pip._vendor.packaging.version import LegacyVersion, Version
 
 from pip._internal.models.link import Link
 from pip._internal.req.req_install import InstallRequirement
 from pip._internal.utils.hashes import Hashes
 
 CandidateLookup = Tuple[Optional["Candidate"], Optional[InstallRequirement]]
+CandidateVersion = Union[LegacyVersion, Version]
 
 
 def format_name(project, extras):
@@ -62,7 +63,7 @@ class Constraint:
 class Requirement:
     @property
     def project_name(self):
-        # type: () -> str
+        # type: () -> NormalizedName
         """The "project name" of a requirement.
 
         This is different from ``name`` if this requirement contains extras,
@@ -97,7 +98,7 @@ class Requirement:
 class Candidate:
     @property
     def project_name(self):
-        # type: () -> str
+        # type: () -> NormalizedName
         """The "project name" of the candidate.
 
         This is different from ``name`` if this candidate contains extras,
@@ -118,7 +119,7 @@ class Candidate:
 
     @property
     def version(self):
-        # type: () -> _BaseVersion
+        # type: () -> CandidateVersion
         raise NotImplementedError("Override in subclass")
 
     @property
diff --git a/src/pip/_internal/resolution/resolvelib/candidates.py b/src/pip/_internal/resolution/resolvelib/candidates.py
index fbd0a613d..184884cbd 100644
--- a/src/pip/_internal/resolution/resolvelib/candidates.py
+++ b/src/pip/_internal/resolution/resolvelib/candidates.py
@@ -1,10 +1,11 @@
 import logging
 import sys
-from typing import TYPE_CHECKING, Any, FrozenSet, Iterable, Optional, Tuple, Union
+from typing import TYPE_CHECKING, Any, FrozenSet, Iterable, Optional, Tuple, Union, cast
 
 from pip._vendor.packaging.specifiers import InvalidSpecifier, SpecifierSet
-from pip._vendor.packaging.utils import canonicalize_name
-from pip._vendor.packaging.version import Version, _BaseVersion
+from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
+from pip._vendor.packaging.version import Version
+from pip._vendor.packaging.version import parse as parse_version
 from pip._vendor.pkg_resources import Distribution
 
 from pip._internal.exceptions import HashError, MetadataInconsistent
@@ -18,7 +19,7 @@ from pip._internal.req.req_install import InstallRequirement
 from pip._internal.utils.misc import dist_is_editable, normalize_version_info
 from pip._internal.utils.packaging import get_requires_python
 
-from .base import Candidate, Requirement, format_name
+from .base import Candidate, CandidateVersion, Requirement, format_name
 
 if TYPE_CHECKING:
     from .factory import Factory
@@ -49,7 +50,7 @@ def make_install_req_from_link(link, template):
         options=dict(
             install_options=template.install_options,
             global_options=template.global_options,
-            hashes=template.hash_options
+            hashes=template.hash_options,
         ),
     )
     ireq.original_link = template.original_link
@@ -70,7 +71,7 @@ def make_install_req_from_editable(link, template):
         options=dict(
             install_options=template.install_options,
             global_options=template.global_options,
-            hashes=template.hash_options
+            hashes=template.hash_options,
         ),
     )
 
@@ -94,7 +95,7 @@ def make_install_req_from_dist(dist, template):
         options=dict(
             install_options=template.install_options,
             global_options=template.global_options,
-            hashes=template.hash_options
+            hashes=template.hash_options,
         ),
     )
     ireq.satisfied_by = dist
@@ -116,16 +117,17 @@ class _InstallRequirementBackedCandidate(Candidate):
         ``link`` would point to the wheel cache, while this points to the
         found remote link (e.g. from pypi.org).
     """
+
     is_installed = False
 
     def __init__(
         self,
-        link,          # type: Link
-        source_link,   # type: Link
-        ireq,          # type: InstallRequirement
-        factory,       # type: Factory
-        name=None,     # type: Optional[str]
-        version=None,  # type: Optional[_BaseVersion]
+        link,  # type: Link
+        source_link,  # type: Link
+        ireq,  # type: InstallRequirement
+        factory,  # type: Factory
+        name=None,  # type: Optional[NormalizedName]
+        version=None,  # type: Optional[CandidateVersion]
     ):
         # type: (...) -> None
         self._link = link
@@ -164,7 +166,7 @@ class _InstallRequirementBackedCandidate(Candidate):
 
     @property
     def project_name(self):
-        # type: () -> str
+        # type: () -> NormalizedName
         """The normalised name of the project the candidate refers to"""
         if self._name is None:
             self._name = canonicalize_name(self.dist.project_name)
@@ -177,9 +179,9 @@ class _InstallRequirementBackedCandidate(Candidate):
 
     @property
     def version(self):
-        # type: () -> _BaseVersion
+        # type: () -> CandidateVersion
         if self._version is None:
-            self._version = self.dist.parsed_version
+            self._version = parse_version(self.dist.version)
         return self._version
 
     def format_for_error(self):
@@ -187,7 +189,7 @@ class _InstallRequirementBackedCandidate(Candidate):
         return "{} {} (from {})".format(
             self.name,
             self.version,
-            self._link.file_path if self._link.is_file else self._link
+            self._link.file_path if self._link.is_file else self._link,
         )
 
     def _prepare_distribution(self):
@@ -205,7 +207,8 @@ class _InstallRequirementBackedCandidate(Candidate):
                 self._name,
                 dist.project_name,
             )
-        if self._version is not None and self._version != dist.parsed_version:
+        parsed_version = parse_version(dist.version)
+        if self._version is not None and self._version != parsed_version:
             raise MetadataInconsistent(
                 self._ireq,
                 "version",
@@ -256,11 +259,11 @@ class LinkCandidate(_InstallRequirementBackedCandidate):
 
     def __init__(
         self,
-        link,          # type: Link
-        template,        # type: InstallRequirement
-        factory,       # type: Factory
-        name=None,     # type: Optional[str]
-        version=None,  # type: Optional[_BaseVersion]
+        link,  # type: Link
+        template,  # type: InstallRequirement
+        factory,  # type: Factory
+        name=None,  # type: Optional[NormalizedName]
+        version=None,  # type: Optional[CandidateVersion]
     ):
         # type: (...) -> None
         source_link = link
@@ -273,21 +276,19 @@ class LinkCandidate(_InstallRequirementBackedCandidate):
         if ireq.link.is_wheel and not ireq.link.is_file:
             wheel = Wheel(ireq.link.filename)
             wheel_name = canonicalize_name(wheel.name)
-            assert name == wheel_name, (
-                f"{name!r} != {wheel_name!r} for wheel"
-            )
+            assert name == wheel_name, f"{name!r} != {wheel_name!r} for wheel"
             # Version may not be present for PEP 508 direct URLs
             if version is not None:
                 wheel_version = Version(wheel.version)
-                assert version == wheel_version, (
-                    "{!r} != {!r} for wheel {}".format(
-                        version, wheel_version, name
-                    )
+                assert version == wheel_version, "{!r} != {!r} for wheel {}".format(
+                    version, wheel_version, name
                 )
 
-        if (cache_entry is not None and
-                cache_entry.persistent and
-                template.link is template.original_link):
+        if (
+            cache_entry is not None
+            and cache_entry.persistent
+            and template.link is template.original_link
+        ):
             ireq.original_link_is_in_wheel_cache = True
 
         super().__init__(
@@ -302,7 +303,7 @@ class LinkCandidate(_InstallRequirementBackedCandidate):
     def _prepare_distribution(self):
         # type: () -> Distribution
         return self._factory.preparer.prepare_linked_requirement(
-            self._ireq, parallel_builds=True,
+            self._ireq, parallel_builds=True
         )
 
 
@@ -311,11 +312,11 @@ class EditableCandidate(_InstallRequirementBackedCandidate):
 
     def __init__(
         self,
-        link,          # type: Link
-        template,        # type: InstallRequirement
-        factory,       # type: Factory
-        name=None,     # type: Optional[str]
-        version=None,  # type: Optional[_BaseVersion]
+        link,  # type: Link
+        template,  # type: InstallRequirement
+        factory,  # type: Factory
+        name=None,  # type: Optional[NormalizedName]
+        version=None,  # type: Optional[CandidateVersion]
     ):
         # type: (...) -> None
         super().__init__(
@@ -377,7 +378,7 @@ class AlreadyInstalledCandidate(Candidate):
 
     @property
     def project_name(self):
-        # type: () -> str
+        # type: () -> NormalizedName
         return canonicalize_name(self.dist.project_name)
 
     @property
@@ -387,8 +388,8 @@ class AlreadyInstalledCandidate(Candidate):
 
     @property
     def version(self):
-        # type: () -> _BaseVersion
-        return self.dist.parsed_version
+        # type: () -> CandidateVersion
+        return parse_version(self.dist.version)
 
     @property
     def is_editable(self):
@@ -435,6 +436,7 @@ class ExtrasCandidate(Candidate):
     version 2.0. Having those candidates depend on foo=1.0 and foo=2.0
     respectively forces the resolver to recognise that this is a conflict.
     """
+
     def __init__(
         self,
         base,  # type: BaseCandidate
@@ -469,7 +471,7 @@ class ExtrasCandidate(Candidate):
 
     @property
     def project_name(self):
-        # type: () -> str
+        # type: () -> NormalizedName
         return self.base.project_name
 
     @property
@@ -480,14 +482,13 @@ class ExtrasCandidate(Candidate):
 
     @property
     def version(self):
-        # type: () -> _BaseVersion
+        # type: () -> CandidateVersion
         return self.base.version
 
     def format_for_error(self):
         # type: () -> str
         return "{} [{}]".format(
-            self.base.format_for_error(),
-            ", ".join(sorted(self.extras))
+            self.base.format_for_error(), ", ".join(sorted(self.extras))
         )
 
     @property
@@ -524,12 +525,12 @@ class ExtrasCandidate(Candidate):
                 "%s %s does not provide the extra '%s'",
                 self.base.name,
                 self.version,
-                extra
+                extra,
             )
 
         for r in self.base.dist.requires(valid_extras):
             requirement = factory.make_requirement_from_spec(
-                str(r), self.base._ireq, valid_extras,
+                str(r), self.base._ireq, valid_extras
             )
             if requirement:
                 yield requirement
@@ -564,9 +565,9 @@ class RequiresPythonCandidate(Candidate):
 
     @property
     def project_name(self):
-        # type: () -> str
+        # type: () -> NormalizedName
         # Avoid conflicting with the PyPI package "Python".
-        return ""
+        return cast(NormalizedName, "")
 
     @property
     def name(self):
@@ -575,7 +576,7 @@ class RequiresPythonCandidate(Candidate):
 
     @property
     def version(self):
-        # type: () -> _BaseVersion
+        # type: () -> CandidateVersion
         return self._version
 
     def format_for_error(self):
diff --git a/src/pip/_internal/resolution/resolvelib/factory.py b/src/pip/_internal/resolution/resolvelib/factory.py
index 259d76af6..dd747198f 100644
--- a/src/pip/_internal/resolution/resolvelib/factory.py
+++ b/src/pip/_internal/resolution/resolvelib/factory.py
@@ -1,6 +1,7 @@
 import functools
 import logging
 from typing import (
+    TYPE_CHECKING,
     Dict,
     FrozenSet,
     Iterable,
@@ -11,11 +12,11 @@ from typing import (
     Set,
     Tuple,
     TypeVar,
+    cast,
 )
 
 from pip._vendor.packaging.specifiers import SpecifierSet
-from pip._vendor.packaging.utils import canonicalize_name
-from pip._vendor.packaging.version import _BaseVersion
+from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
 from pip._vendor.pkg_resources import Distribution
 from pip._vendor.resolvelib import ResolutionImpossible
 
@@ -43,7 +44,7 @@ from pip._internal.utils.misc import (
 )
 from pip._internal.utils.virtualenv import running_under_virtualenv
 
-from .base import Candidate, Constraint, Requirement
+from .base import Candidate, CandidateVersion, Constraint, Requirement
 from .candidates import (
     AlreadyInstalledCandidate,
     BaseCandidate,
@@ -60,6 +61,14 @@ from .requirements import (
     UnsatisfiableRequirement,
 )
 
+if TYPE_CHECKING:
+    from typing import Protocol
+
+    class ConflictCause(Protocol):
+        requirement: RequiresPythonRequirement
+        parent: Candidate
+
+
 logger = logging.getLogger(__name__)
 
 C = TypeVar("C")
@@ -92,8 +101,9 @@ class Factory:
         self._build_failures = {}  # type: Cache[InstallationError]
         self._link_candidate_cache = {}  # type: Cache[LinkCandidate]
         self._editable_candidate_cache = {}  # type: Cache[EditableCandidate]
-        self._installed_candidate_cache = {
-        }  # type: Dict[str, AlreadyInstalledCandidate]
+        self._installed_candidate_cache = (
+            {}
+        )  # type: Dict[str, AlreadyInstalledCandidate]
 
         if not ignore_installed:
             self._installed_dists = {
@@ -129,8 +139,8 @@ class Factory:
         link,  # type: Link
         extras,  # type: FrozenSet[str]
         template,  # type: InstallRequirement
-        name,  # type: Optional[str]
-        version,  # type: Optional[_BaseVersion]
+        name,  # type: Optional[NormalizedName]
+        version,  # type: Optional[CandidateVersion]
     ):
         # type: (...) -> Optional[Candidate]
         # TODO: Check already installed candidate, and use it if the link and
@@ -145,8 +155,11 @@ class Factory:
             if link not in self._editable_candidate_cache:
                 try:
                     self._editable_candidate_cache[link] = EditableCandidate(
-                        link, template, factory=self,
-                        name=name, version=version,
+                        link,
+                        template,
+                        factory=self,
+                        name=name,
+                        version=version,
                     )
                 except (InstallationSubprocessError, MetadataInconsistent) as e:
                     logger.warning("Discarding %s. %s", link, e)
@@ -157,8 +170,11 @@ class Factory:
             if link not in self._link_candidate_cache:
                 try:
                     self._link_candidate_cache[link] = LinkCandidate(
-                        link, template, factory=self,
-                        name=name, version=version,
+                        link,
+                        template,
+                        factory=self,
+                        name=name,
+                        version=version,
                     )
                 except (InstallationSubprocessError, MetadataInconsistent) as e:
                     logger.warning("Discarding %s. %s", link, e)
@@ -186,10 +202,12 @@ class Factory:
         # all of them.
         # Hopefully the Project model can correct this mismatch in the future.
         template = ireqs[0]
+        assert template.req, "Candidates found on index must be PEP 508"
         name = canonicalize_name(template.req.name)
 
         extras = frozenset()  # type: FrozenSet[str]
         for ireq in ireqs:
+            assert ireq.req, "Candidates found on index must be PEP 508"
             specifier &= ireq.req.specifier
             hashes &= ireq.hashes(trust_internet=False)
             extras |= frozenset(ireq.extras)
@@ -268,7 +286,8 @@ class Factory:
             )
 
         return (
-            c for c in explicit_candidates
+            c
+            for c in explicit_candidates
             if constraint.is_satisfied_by(c)
             and all(req.is_satisfied_by(c) for req in requirements)
         )
@@ -278,7 +297,8 @@ class Factory:
         if not ireq.match_markers(requested_extras):
             logger.info(
                 "Ignoring %s: markers '%s' don't match your environment",
-                ireq.name, ireq.markers,
+                ireq.name,
+                ireq.markers,
             )
             return None
         if not ireq.link:
@@ -350,7 +370,7 @@ class Factory:
     def get_dist_to_uninstall(self, candidate):
         # type: (Candidate) -> Optional[Distribution]
         # TODO: Are there more cases this needs to return True? Editable?
-        dist = self._installed_dists.get(candidate.name)
+        dist = self._installed_dists.get(candidate.project_name)
         if dist is None:  # Not installed, no uninstallation required.
             return None
 
@@ -372,41 +392,75 @@ class Factory:
             raise InstallationError(
                 "Will not install to the user site because it will "
                 "lack sys.path precedence to {} in {}".format(
-                    dist.project_name, dist.location,
+                    dist.project_name,
+                    dist.location,
                 )
             )
         return None
 
-    def _report_requires_python_error(
-        self,
-        requirement,  # type: RequiresPythonRequirement
-        template,  # type: Candidate
-    ):
-        # type: (...) -> UnsupportedPythonVersion
-        message_format = (
-            "Package {package!r} requires a different Python: "
-            "{version} not in {specifier!r}"
-        )
-        message = message_format.format(
-            package=template.name,
-            version=self._python_candidate.version,
-            specifier=str(requirement.specifier),
-        )
+    def _report_requires_python_error(self, causes):
+        # type: (Sequence[ConflictCause]) -> UnsupportedPythonVersion
+        assert causes, "Requires-Python error reported with no cause"
+
+        version = self._python_candidate.version
+
+        if len(causes) == 1:
+            specifier = str(causes[0].requirement.specifier)
+            message = (
+                f"Package {causes[0].parent.name!r} requires a different "
+                f"Python: {version} not in {specifier!r}"
+            )
+            return UnsupportedPythonVersion(message)
+
+        message = f"Packages require a different Python. {version} not in:"
+        for cause in causes:
+            package = cause.parent.format_for_error()
+            specifier = str(cause.requirement.specifier)
+            message += f"\n{specifier!r} (required by {package})"
         return UnsupportedPythonVersion(message)
 
-    def get_installation_error(self, e):
-        # type: (ResolutionImpossible) -> InstallationError
+    def _report_single_requirement_conflict(self, req, parent):
+        # type: (Requirement, Optional[Candidate]) -> DistributionNotFound
+        if parent is None:
+            req_disp = str(req)
+        else:
+            req_disp = f"{req} (from {parent.name})"
+
+        cands = self._finder.find_all_candidates(req.project_name)
+        versions = [str(v) for v in sorted({c.version for c in cands})]
+
+        logger.critical(
+            "Could not find a version that satisfies the requirement %s "
+            "(from versions: %s)",
+            req_disp,
+            ", ".join(versions) or "none",
+        )
+
+        return DistributionNotFound(f"No matching distribution found for {req}")
+
+    def get_installation_error(
+        self,
+        e,  # type: ResolutionImpossible[Requirement, Candidate]
+        constraints,  # type: Dict[str, Constraint]
+    ):
+        # type: (...) -> InstallationError
 
         assert e.causes, "Installation error reported with no cause"
 
         # If one of the things we can't solve is "we need Python X.Y",
         # that is what we report.
-        for cause in e.causes:
-            if isinstance(cause.requirement, RequiresPythonRequirement):
-                return self._report_requires_python_error(
-                    cause.requirement,
-                    cause.parent,
-                )
+        requires_python_causes = [
+            cause
+            for cause in e.causes
+            if isinstance(cause.requirement, RequiresPythonRequirement)
+            and not cause.requirement.is_satisfied_by(self._python_candidate)
+        ]
+        if requires_python_causes:
+            # The comprehension above makes sure all Requirement instances are
+            # RequiresPythonRequirement, so let's cast for convinience.
+            return self._report_requires_python_error(
+                cast("Sequence[ConflictCause]", requires_python_causes),
+            )
 
         # Otherwise, we have a set of causes which can't all be satisfied
         # at once.
@@ -415,17 +469,8 @@ class Factory:
         # satisfied. We just report that case.
         if len(e.causes) == 1:
             req, parent = e.causes[0]
-            if parent is None:
-                req_disp = str(req)
-            else:
-                req_disp = f'{req} (from {parent.name})'
-            logger.critical(
-                "Could not find a version that satisfies the requirement %s",
-                req_disp,
-            )
-            return DistributionNotFound(
-                f'No matching distribution found for {req}'
-            )
+            if req.name not in constraints:
+                return self._report_single_requirement_conflict(req, parent)
 
         # OK, we now have a list of requirements that can't all be
         # satisfied at once.
@@ -461,26 +506,35 @@ class Factory:
         else:
             info = "the requested packages"
 
-        msg = "Cannot install {} because these package versions " \
+        msg = (
+            "Cannot install {} because these package versions "
             "have conflicting dependencies.".format(info)
+        )
         logger.critical(msg)
         msg = "\nThe conflict is caused by:"
+
+        relevant_constraints = set()
         for req, parent in e.causes:
+            if req.name in constraints:
+                relevant_constraints.add(req.name)
             msg = msg + "\n    "
             if parent:
-                msg = msg + "{} {} depends on ".format(
-                    parent.name,
-                    parent.version
-                )
+                msg = msg + f"{parent.name} {parent.version} depends on "
             else:
                 msg = msg + "The user requested "
             msg = msg + req.format_for_error()
+        for key in relevant_constraints:
+            spec = constraints[key].specifier
+            msg += f"\n    The user requested (constraint) {key}{spec}"
 
-        msg = msg + "\n\n" + \
-            "To fix this you could try to:\n" + \
-            "1. loosen the range of package versions you've specified\n" + \
-            "2. remove package versions to allow pip attempt to solve " + \
-            "the dependency conflict\n"
+        msg = (
+            msg
+            + "\n\n"
+            + "To fix this you could try to:\n"
+            + "1. loosen the range of package versions you've specified\n"
+            + "2. remove package versions to allow pip attempt to solve "
+            + "the dependency conflict\n"
+        )
 
         logger.info(msg)
 
diff --git a/src/pip/_internal/resolution/resolvelib/found_candidates.py b/src/pip/_internal/resolution/resolvelib/found_candidates.py
index 594485061..e8b72e660 100644
--- a/src/pip/_internal/resolution/resolvelib/found_candidates.py
+++ b/src/pip/_internal/resolution/resolvelib/found_candidates.py
@@ -97,6 +97,7 @@ class FoundCandidates(collections_abc.Sequence):
     page when remote packages are actually needed. This improve performances
     when suitable candidates are already installed on disk.
     """
+
     def __init__(
         self,
         get_infos,  # type: Callable[[], Iterator[IndexCandidateInfo]]
diff --git a/src/pip/_internal/resolution/resolvelib/provider.py b/src/pip/_internal/resolution/resolvelib/provider.py
index 1f4439a14..cd2ccfa60 100644
--- a/src/pip/_internal/resolution/resolvelib/provider.py
+++ b/src/pip/_internal/resolution/resolvelib/provider.py
@@ -1,10 +1,20 @@
-from typing import Any, Dict, Iterable, Optional, Sequence, Tuple, Union
+from typing import TYPE_CHECKING, Dict, Iterable, Optional, Sequence, Union
 
 from pip._vendor.resolvelib.providers import AbstractProvider
 
 from .base import Candidate, Constraint, Requirement
 from .factory import Factory
 
+if TYPE_CHECKING:
+    from pip._vendor.resolvelib.providers import Preference
+    from pip._vendor.resolvelib.resolvers import RequirementInformation
+
+    PreferenceInformation = RequirementInformation[Requirement, Candidate]
+
+    _ProviderBase = AbstractProvider[Requirement, Candidate, str]
+else:
+    _ProviderBase = AbstractProvider
+
 # Notes on the relationship between the provider, the factory, and the
 # candidate and requirement classes.
 #
@@ -24,7 +34,7 @@ from .factory import Factory
 # services to those objects (access to pip's finder and preparer).
 
 
-class PipProvider(AbstractProvider):
+class PipProvider(_ProviderBase):
     """Pip's provider implementation for resolvelib.
 
     :params constraints: A mapping of constraints specified by the user. Keys
@@ -50,17 +60,17 @@ class PipProvider(AbstractProvider):
         self._upgrade_strategy = upgrade_strategy
         self._user_requested = user_requested
 
-    def identify(self, dependency):
+    def identify(self, requirement_or_candidate):
         # type: (Union[Requirement, Candidate]) -> str
-        return dependency.name
+        return requirement_or_candidate.name
 
     def get_preference(
         self,
         resolution,  # type: Optional[Candidate]
-        candidates,  # type: Sequence[Candidate]
-        information  # type: Sequence[Tuple[Requirement, Candidate]]
+        candidates,  # type: Iterable[Candidate]
+        information,  # type: Iterable[PreferenceInformation]
     ):
-        # type: (...) -> Any
+        # type: (...) -> Preference
         """Produce a sort key for given requirement based on preference.
 
         The lower the return value is, the more preferred this group of
@@ -99,9 +109,7 @@ class PipProvider(AbstractProvider):
                 return 0
             spec_sets = (ireq.specifier for ireq in ireqs if ireq)
             operators = [
-                specifier.operator
-                for spec_set in spec_sets
-                for specifier in spec_set
+                specifier.operator for spec_set in spec_sets for specifier in spec_set
             ]
             if any(op in ("==", "===") for op in operators):
                 return 1
@@ -122,7 +130,7 @@ class PipProvider(AbstractProvider):
         # delaying Setuptools helps reduce branches the resolver has to check.
         # This serves as a temporary fix for issues like "apache-airlfow[all]"
         # while we work on "proper" branch pruning techniques.
-        delay_this = (key == "setuptools")
+        delay_this = key == "setuptools"
 
         return (delay_this, restrictive, order, key)
 
@@ -147,7 +155,7 @@ class PipProvider(AbstractProvider):
             if self._upgrade_strategy == "eager":
                 return True
             elif self._upgrade_strategy == "only-if-needed":
-                return (name in self._user_requested)
+                return name in self._user_requested
             return False
 
         return self._factory.find_candidates(
@@ -163,8 +171,4 @@ class PipProvider(AbstractProvider):
     def get_dependencies(self, candidate):
         # type: (Candidate) -> Sequence[Requirement]
         with_requires = not self._ignore_dependencies
-        return [
-            r
-            for r in candidate.iter_dependencies(with_requires)
-            if r is not None
-        ]
+        return [r for r in candidate.iter_dependencies(with_requires) if r is not None]
diff --git a/src/pip/_internal/resolution/resolvelib/reporter.py b/src/pip/_internal/resolution/resolvelib/reporter.py
index 697351bd7..074583de0 100644
--- a/src/pip/_internal/resolution/resolvelib/reporter.py
+++ b/src/pip/_internal/resolution/resolvelib/reporter.py
@@ -10,7 +10,6 @@ logger = getLogger(__name__)
 
 
 class PipReporter(BaseReporter):
-
     def __init__(self):
         # type: () -> None
         self.backtracks_by_package = defaultdict(int)  # type: DefaultDict[str, int]
@@ -32,7 +31,7 @@ class PipReporter(BaseReporter):
                 "runtime. If you want to abort this run, you can press "
                 "Ctrl + C to do so. To improve how pip performs, tell us what "
                 "happened here: https://pip.pypa.io/surveys/backtracking"
-            )
+            ),
         }
 
     def backtracking(self, candidate):
diff --git a/src/pip/_internal/resolution/resolvelib/requirements.py b/src/pip/_internal/resolution/resolvelib/requirements.py
index aec45aa68..a7fcdd1e3 100644
--- a/src/pip/_internal/resolution/resolvelib/requirements.py
+++ b/src/pip/_internal/resolution/resolvelib/requirements.py
@@ -1,5 +1,5 @@
 from pip._vendor.packaging.specifiers import SpecifierSet
-from pip._vendor.packaging.utils import canonicalize_name
+from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
 
 from pip._internal.req.req_install import InstallRequirement
 
@@ -24,7 +24,7 @@ class ExplicitRequirement(Requirement):
 
     @property
     def project_name(self):
-        # type: () -> str
+        # type: () -> NormalizedName
         # No need to canonicalise - the candidate did this
         return self.candidate.project_name
 
@@ -67,7 +67,8 @@ class SpecifierRequirement(Requirement):
 
     @property
     def project_name(self):
-        # type: () -> str
+        # type: () -> NormalizedName
+        assert self._ireq.req, "Specifier-backed ireq is always PEP 508"
         return canonicalize_name(self._ireq.req.name)
 
     @property
@@ -96,19 +97,21 @@ class SpecifierRequirement(Requirement):
 
     def is_satisfied_by(self, candidate):
         # type: (Candidate) -> bool
-        assert candidate.name == self.name, \
-            "Internal issue: Candidate is not for this requirement " \
-            " {} vs {}".format(candidate.name, self.name)
+        assert candidate.name == self.name, (
+            f"Internal issue: Candidate is not for this requirement "
+            f"{candidate.name} vs {self.name}"
+        )
         # We can safely always allow prereleases here since PackageFinder
         # already implements the prerelease logic, and would have filtered out
         # prerelease candidates if the user does not expect them.
+        assert self._ireq.req, "Specifier-backed ireq is always PEP 508"
         spec = self._ireq.req.specifier
         return spec.contains(candidate.version, prereleases=True)
 
 
 class RequiresPythonRequirement(Requirement):
-    """A requirement representing Requires-Python metadata.
-    """
+    """A requirement representing Requires-Python metadata."""
+
     def __init__(self, specifier, match):
         # type: (SpecifierSet, Candidate) -> None
         self.specifier = specifier
@@ -127,7 +130,7 @@ class RequiresPythonRequirement(Requirement):
 
     @property
     def project_name(self):
-        # type: () -> str
+        # type: () -> NormalizedName
         return self._candidate.project_name
 
     @property
@@ -155,15 +158,15 @@ class RequiresPythonRequirement(Requirement):
 
 
 class UnsatisfiableRequirement(Requirement):
-    """A requirement that cannot be satisfied.
-    """
+    """A requirement that cannot be satisfied."""
+
     def __init__(self, name):
-        # type: (str) -> None
+        # type: (NormalizedName) -> None
         self._name = name
 
     def __str__(self):
         # type: () -> str
-        return "{} (unavailable)".format(self._name)
+        return f"{self._name} (unavailable)"
 
     def __repr__(self):
         # type: () -> str
@@ -174,7 +177,7 @@ class UnsatisfiableRequirement(Requirement):
 
     @property
     def project_name(self):
-        # type: () -> str
+        # type: () -> NormalizedName
         return self._name
 
     @property
diff --git a/src/pip/_internal/resolution/resolvelib/resolver.py b/src/pip/_internal/resolution/resolvelib/resolver.py
index 5bfe3712b..0eab785d8 100644
--- a/src/pip/_internal/resolution/resolvelib/resolver.py
+++ b/src/pip/_internal/resolution/resolvelib/resolver.py
@@ -1,13 +1,13 @@
 import functools
 import logging
 import os
-from typing import TYPE_CHECKING, Dict, List, Optional, Set, Tuple
+from typing import TYPE_CHECKING, Dict, List, Optional, Set, Tuple, cast
 
-from pip._vendor import six
 from pip._vendor.packaging.utils import canonicalize_name
-from pip._vendor.resolvelib import ResolutionImpossible
+from pip._vendor.packaging.version import parse as parse_version
+from pip._vendor.resolvelib import BaseReporter, ResolutionImpossible
 from pip._vendor.resolvelib import Resolver as RLResolver
-from pip._vendor.resolvelib.resolvers import Result
+from pip._vendor.resolvelib.structs import DirectedGraph
 
 from pip._internal.cache import WheelCache
 from pip._internal.exceptions import InstallationError
@@ -28,11 +28,14 @@ from pip._internal.utils.deprecation import deprecated
 from pip._internal.utils.filetypes import is_archive_file
 from pip._internal.utils.misc import dist_is_editable
 
-from .base import Constraint
+from .base import Candidate, Constraint, Requirement
 from .factory import Factory
 
 if TYPE_CHECKING:
-    from pip._vendor.resolvelib.structs import Graph
+    from pip._vendor.resolvelib.resolvers import Result as RLResult
+
+    Result = RLResult[Requirement, Candidate, str]
+
 
 logger = logging.getLogger(__name__)
 
@@ -86,6 +89,7 @@ class Resolver(BaseResolver):
                     raise InstallationError(problem)
                 if not req.match_markers():
                     continue
+                assert req.name, "Constraint must be named"
                 name = canonicalize_name(req.name)
                 if name in constraints:
                     constraints[name] &= req
@@ -97,7 +101,7 @@ class Resolver(BaseResolver):
                     if canonical_name not in user_requested:
                         user_requested[canonical_name] = i
                 r = self.factory.make_requirement_from_install_req(
-                    req, requested_extras=(),
+                    req, requested_extras=()
                 )
                 if r is not None:
                     requirements.append(r)
@@ -110,23 +114,29 @@ class Resolver(BaseResolver):
             user_requested=user_requested,
         )
         if "PIP_RESOLVER_DEBUG" in os.environ:
-            reporter = PipDebuggingReporter()
+            reporter = PipDebuggingReporter()  # type: BaseReporter
         else:
             reporter = PipReporter()
-        resolver = RLResolver(provider, reporter)
+        resolver = RLResolver(
+            provider,
+            reporter,
+        )  # type: RLResolver[Requirement, Candidate, str]
 
         try:
             try_to_avoid_resolution_too_deep = 2000000
-            self._result = resolver.resolve(
-                requirements, max_rounds=try_to_avoid_resolution_too_deep,
+            result = self._result = resolver.resolve(
+                requirements, max_rounds=try_to_avoid_resolution_too_deep
             )
 
         except ResolutionImpossible as e:
-            error = self.factory.get_installation_error(e)
-            six.raise_from(error, e)
+            error = self.factory.get_installation_error(
+                cast("ResolutionImpossible[Requirement, Candidate]", e),
+                constraints,
+            )
+            raise error from e
 
         req_set = RequirementSet(check_supported_wheels=check_supported_wheels)
-        for candidate in self._result.mapping.values():
+        for candidate in result.mapping.values():
             ireq = candidate.get_install_requirement()
             if ireq is None:
                 continue
@@ -140,14 +150,14 @@ class Resolver(BaseResolver):
             elif self.factory.force_reinstall:
                 # The --force-reinstall flag is set -- reinstall.
                 ireq.should_reinstall = True
-            elif installed_dist.parsed_version != candidate.version:
+            elif parse_version(installed_dist.version) != candidate.version:
                 # The installation is different in version -- reinstall.
                 ireq.should_reinstall = True
             elif candidate.is_editable or dist_is_editable(installed_dist):
                 # The incoming distribution is editable, or different in
                 # editable-ness to installation -- reinstall.
                 ireq.should_reinstall = True
-            elif candidate.source_link.is_file:
+            elif candidate.source_link and candidate.source_link.is_file:
                 # The incoming distribution is under file://
                 if candidate.source_link.is_wheel:
                     # is a local wheel -- do nothing.
@@ -188,14 +198,14 @@ class Resolver(BaseResolver):
                 # The reason can contain non-ASCII characters, Unicode
                 # is required for Python 2.
                 msg = (
-                    'The candidate selected for download or install is a '
-                    'yanked version: {name!r} candidate (version {version} '
-                    'at {link})\nReason for being yanked: {reason}'
+                    "The candidate selected for download or install is a "
+                    "yanked version: {name!r} candidate (version {version} "
+                    "at {link})\nReason for being yanked: {reason}"
                 ).format(
                     name=candidate.name,
                     version=candidate.version,
                     link=link,
-                    reason=link.yanked_reason or '',
+                    reason=link.yanked_reason or "",
                 )
                 logger.warning(msg)
 
@@ -235,7 +245,7 @@ class Resolver(BaseResolver):
 
 
 def get_topological_weights(graph, expected_node_count):
-    # type: (Graph, int) -> Dict[Optional[str], int]
+    # type: (DirectedGraph[Optional[str]], int) -> Dict[Optional[str], int]
     """Assign weights to each node based on how "deep" they are.
 
     This implementation may change at any point in the future without prior
@@ -281,7 +291,7 @@ def get_topological_weights(graph, expected_node_count):
 
 
 def _req_set_item_sorter(
-    item,     # type: Tuple[str, InstallRequirement]
+    item,  # type: Tuple[str, InstallRequirement]
     weights,  # type: Dict[Optional[str], int]
 ):
     # type: (...) -> Tuple[int, str]
diff --git a/src/pip/_internal/utils/appdirs.py b/src/pip/_internal/utils/appdirs.py
index b8c101b0c..db974dad6 100644
--- a/src/pip/_internal/utils/appdirs.py
+++ b/src/pip/_internal/utils/appdirs.py
@@ -21,7 +21,7 @@ def user_config_dir(appname, roaming=True):
     # type: (str, bool) -> str
     path = _appdirs.user_config_dir(appname, appauthor=False, roaming=roaming)
     if _appdirs.system == "darwin" and not os.path.isdir(path):
-        path = os.path.expanduser('~/.config/')
+        path = os.path.expanduser("~/.config/")
         if appname:
             path = os.path.join(path, appname)
     return path
@@ -34,5 +34,5 @@ def site_config_dirs(appname):
     dirval = _appdirs.site_config_dir(appname, appauthor=False, multipath=True)
     if _appdirs.system not in ["win32", "darwin"]:
         # always look in /etc directly as well
-        return dirval.split(os.pathsep) + ['/etc']
+        return dirval.split(os.pathsep) + ["/etc"]
     return [dirval]
diff --git a/src/pip/_internal/utils/compat.py b/src/pip/_internal/utils/compat.py
index a468a1825..1fb2dc729 100644
--- a/src/pip/_internal/utils/compat.py
+++ b/src/pip/_internal/utils/compat.py
@@ -1,17 +1,11 @@
 """Stuff that differs in different Python versions and platform
 distributions."""
 
-# The following comment should be removed at some point in the future.
-# mypy: disallow-untyped-defs=False
-
-import codecs
-import locale
 import logging
 import os
 import sys
-from typing import Optional, Union
 
-__all__ = ["console_to_str", "get_path_uid", "stdlib_pkgs", "WINDOWS"]
+__all__ = ["get_path_uid", "stdlib_pkgs", "WINDOWS"]
 
 
 logger = logging.getLogger(__name__)
@@ -21,88 +15,16 @@ def has_tls():
     # type: () -> bool
     try:
         import _ssl  # noqa: F401  # ignore unused
+
         return True
     except ImportError:
         pass
 
     from pip._vendor.urllib3.util import IS_PYOPENSSL
+
     return IS_PYOPENSSL
 
 
-def str_to_display(data, desc=None):
-    # type: (Union[bytes, str], Optional[str]) -> str
-    """
-    For display or logging purposes, convert a bytes object (or text) to
-    text (e.g. unicode in Python 2) safe for output.
-
-    :param desc: An optional phrase describing the input data, for use in
-        the log message if a warning is logged. Defaults to "Bytes object".
-
-    This function should never error out and so can take a best effort
-    approach. It is okay to be lossy if needed since the return value is
-    just for display.
-
-    We assume the data is in the locale preferred encoding. If it won't
-    decode properly, we warn the user but decode as best we can.
-
-    We also ensure that the output can be safely written to standard output
-    without encoding errors.
-    """
-    if isinstance(data, str):
-        return data
-
-    # Otherwise, data is a bytes object (str in Python 2).
-    # First, get the encoding we assume. This is the preferred
-    # encoding for the locale, unless that is not found, or
-    # it is ASCII, in which case assume UTF-8
-    encoding = locale.getpreferredencoding()
-    if (not encoding) or codecs.lookup(encoding).name == "ascii":
-        encoding = "utf-8"
-
-    # Now try to decode the data - if we fail, warn the user and
-    # decode with replacement.
-    try:
-        decoded_data = data.decode(encoding)
-    except UnicodeDecodeError:
-        logger.warning(
-            '%s does not appear to be encoded as %s',
-            desc or 'Bytes object',
-            encoding,
-        )
-        decoded_data = data.decode(encoding, errors="backslashreplace")
-
-    # Make sure we can print the output, by encoding it to the output
-    # encoding with replacement of unencodable characters, and then
-    # decoding again.
-    # We use stderr's encoding because it's less likely to be
-    # redirected and if we don't find an encoding we skip this
-    # step (on the assumption that output is wrapped by something
-    # that won't fail).
-    # The double getattr is to deal with the possibility that we're
-    # being called in a situation where sys.__stderr__ doesn't exist,
-    # or doesn't have an encoding attribute. Neither of these cases
-    # should occur in normal pip use, but there's no harm in checking
-    # in case people use pip in (unsupported) unusual situations.
-    output_encoding = getattr(getattr(sys, "__stderr__", None),
-                              "encoding", None)
-
-    if output_encoding:
-        output_encoded = decoded_data.encode(
-            output_encoding,
-            errors="backslashreplace"
-        )
-        decoded_data = output_encoded.decode(output_encoding)
-
-    return decoded_data
-
-
-def console_to_str(data):
-    # type: (bytes) -> str
-    """Return a string, safe for output, of subprocess output.
-    """
-    return str_to_display(data, desc='Subprocess output')
-
-
 def get_path_uid(path):
     # type: (str) -> int
     """
@@ -116,7 +38,7 @@ def get_path_uid(path):
 
     :raises OSError: When path is a symlink or can't be read.
     """
-    if hasattr(os, 'O_NOFOLLOW'):
+    if hasattr(os, "O_NOFOLLOW"):
         fd = os.open(path, os.O_RDONLY | os.O_NOFOLLOW)
         file_uid = os.fstat(fd).st_uid
         os.close(fd)
@@ -127,10 +49,7 @@ def get_path_uid(path):
             file_uid = os.stat(path).st_uid
         else:
             # raise OSError for parity with os.O_NOFOLLOW above
-            raise OSError(
-                "{} is a symlink; Will not return uid for symlinks".format(
-                    path)
-            )
+            raise OSError(f"{path} is a symlink; Will not return uid for symlinks")
     return file_uid
 
 
@@ -143,5 +62,4 @@ stdlib_pkgs = {"python", "wsgiref", "argparse"}
 
 
 # windows detection, covers cpython and ironpython
-WINDOWS = (sys.platform.startswith("win") or
-           (sys.platform == 'cli' and os.name == 'nt'))
+WINDOWS = sys.platform.startswith("win") or (sys.platform == "cli" and os.name == "nt")
diff --git a/src/pip/_internal/utils/compatibility_tags.py b/src/pip/_internal/utils/compatibility_tags.py
index 5578f1f78..14fe51c1a 100644
--- a/src/pip/_internal/utils/compatibility_tags.py
+++ b/src/pip/_internal/utils/compatibility_tags.py
@@ -18,13 +18,13 @@ if TYPE_CHECKING:
     from pip._vendor.packaging.tags import PythonVersion
 
 
-_osx_arch_pat = re.compile(r'(.+)_(\d+)_(\d+)_(.+)')
+_osx_arch_pat = re.compile(r"(.+)_(\d+)_(\d+)_(.+)")
 
 
 def version_info_to_nodot(version_info):
     # type: (Tuple[int, ...]) -> str
     # Only use up to the first two numbers.
-    return ''.join(map(str, version_info[:2]))
+    return "".join(map(str, version_info[:2]))
 
 
 def _mac_platforms(arch):
@@ -39,7 +39,7 @@ def _mac_platforms(arch):
             # actual prefix provided by the user in case they provided
             # something like "macosxcustom_". It may be good to remove
             # this as undocumented or deprecate it in the future.
-            '{}_{}'.format(name, arch[len('macosx_'):])
+            "{}_{}".format(name, arch[len("macosx_") :])
             for arch in mac_platforms(mac_version, actual_arch)
         ]
     else:
@@ -51,31 +51,31 @@ def _mac_platforms(arch):
 def _custom_manylinux_platforms(arch):
     # type: (str) -> List[str]
     arches = [arch]
-    arch_prefix, arch_sep, arch_suffix = arch.partition('_')
-    if arch_prefix == 'manylinux2014':
+    arch_prefix, arch_sep, arch_suffix = arch.partition("_")
+    if arch_prefix == "manylinux2014":
         # manylinux1/manylinux2010 wheels run on most manylinux2014 systems
         # with the exception of wheels depending on ncurses. PEP 599 states
         # manylinux1/manylinux2010 wheels should be considered
         # manylinux2014 wheels:
         # https://www.python.org/dev/peps/pep-0599/#backwards-compatibility-with-manylinux2010-wheels
-        if arch_suffix in {'i686', 'x86_64'}:
-            arches.append('manylinux2010' + arch_sep + arch_suffix)
-            arches.append('manylinux1' + arch_sep + arch_suffix)
-    elif arch_prefix == 'manylinux2010':
+        if arch_suffix in {"i686", "x86_64"}:
+            arches.append("manylinux2010" + arch_sep + arch_suffix)
+            arches.append("manylinux1" + arch_sep + arch_suffix)
+    elif arch_prefix == "manylinux2010":
         # manylinux1 wheels run on most manylinux2010 systems with the
         # exception of wheels depending on ncurses. PEP 571 states
         # manylinux1 wheels should be considered manylinux2010 wheels:
         # https://www.python.org/dev/peps/pep-0571/#backwards-compatibility-with-manylinux1-wheels
-        arches.append('manylinux1' + arch_sep + arch_suffix)
+        arches.append("manylinux1" + arch_sep + arch_suffix)
     return arches
 
 
 def _get_custom_platforms(arch):
     # type: (str) -> List[str]
-    arch_prefix, arch_sep, arch_suffix = arch.partition('_')
-    if arch.startswith('macosx'):
+    arch_prefix, arch_sep, arch_suffix = arch.partition("_")
+    if arch.startswith("macosx"):
         arches = _mac_platforms(arch)
-    elif arch_prefix in ['manylinux2014', 'manylinux2010']:
+    elif arch_prefix in ["manylinux2014", "manylinux2010"]:
         arches = _custom_manylinux_platforms(arch)
     else:
         arches = [arch]
@@ -121,7 +121,7 @@ def get_supported(
     version=None,  # type: Optional[str]
     platforms=None,  # type: Optional[List[str]]
     impl=None,  # type: Optional[str]
-    abis=None  # type: Optional[List[str]]
+    abis=None,  # type: Optional[List[str]]
 ):
     # type: (...) -> List[Tag]
     """Return a list of supported tags for each version specified in
diff --git a/src/pip/_internal/utils/deprecation.py b/src/pip/_internal/utils/deprecation.py
index 803830892..b62b3fb65 100644
--- a/src/pip/_internal/utils/deprecation.py
+++ b/src/pip/_internal/utils/deprecation.py
@@ -2,12 +2,9 @@
 A module that implements tooling to enable easy warnings about deprecations.
 """
 
-# The following comment should be removed at some point in the future.
-# mypy: disallow-untyped-defs=False
-
 import logging
 import warnings
-from typing import Any, Optional
+from typing import Any, Optional, TextIO, Type, Union
 
 from pip._vendor.packaging.version import parse
 
@@ -24,21 +21,25 @@ _original_showwarning = None  # type: Any
 
 
 # Warnings <-> Logging Integration
-def _showwarning(message, category, filename, lineno, file=None, line=None):
+def _showwarning(
+    message,  # type: Union[Warning, str]
+    category,  # type: Type[Warning]
+    filename,  # type: str
+    lineno,  # type: int
+    file=None,  # type: Optional[TextIO]
+    line=None,  # type: Optional[str]
+):
+    # type: (...) -> None
     if file is not None:
         if _original_showwarning is not None:
-            _original_showwarning(
-                message, category, filename, lineno, file, line,
-            )
+            _original_showwarning(message, category, filename, lineno, file, line)
     elif issubclass(category, PipDeprecationWarning):
         # We use a specially named logger which will handle all of the
         # deprecation messages for pip.
         logger = logging.getLogger("pip._internal.deprecations")
         logger.warning(message)
     else:
-        _original_showwarning(
-            message, category, filename, lineno, file, line,
-        )
+        _original_showwarning(message, category, filename, lineno, file, line)
 
 
 def install_warning_logger():
@@ -82,10 +83,13 @@ def deprecated(reason, replacement, gone_in, issue=None):
         (reason, DEPRECATION_MSG_PREFIX + "{}"),
         (gone_in, "pip {} will remove support for this functionality."),
         (replacement, "A possible replacement is {}."),
-        (issue, (
-            "You can find discussion regarding this at "
-            "https://github.com/pypa/pip/issues/{}."
-        )),
+        (
+            issue,
+            (
+                "You can find discussion regarding this at "
+                "https://github.com/pypa/pip/issues/{}."
+            ),
+        ),
     ]
     message = " ".join(
         template.format(val) for val, template in sentences if val is not None
diff --git a/src/pip/_internal/utils/direct_url_helpers.py b/src/pip/_internal/utils/direct_url_helpers.py
index e5ddc6a5c..eb50ac42b 100644
--- a/src/pip/_internal/utils/direct_url_helpers.py
+++ b/src/pip/_internal/utils/direct_url_helpers.py
@@ -47,8 +47,8 @@ def direct_url_from_link(link, source_dir=None, link_is_in_wheel_cache=False):
     if link.is_vcs:
         vcs_backend = vcs.get_backend_for_scheme(link.scheme)
         assert vcs_backend
-        url, requested_revision, _ = (
-            vcs_backend.get_url_rev_and_auth(link.url_without_fragment)
+        url, requested_revision, _ = vcs_backend.get_url_rev_and_auth(
+            link.url_without_fragment
         )
         # For VCS links, we need to find out and add commit_id.
         if link_is_in_wheel_cache:
@@ -106,7 +106,7 @@ def dist_get_direct_url(dist):
     except (
         DirectUrlValidationError,
         json.JSONDecodeError,
-        UnicodeDecodeError
+        UnicodeDecodeError,
     ) as e:
         logger.warning(
             "Error parsing %s for %s: %s",
diff --git a/src/pip/_internal/utils/encoding.py b/src/pip/_internal/utils/encoding.py
index 67b0209f6..7c8893d55 100644
--- a/src/pip/_internal/utils/encoding.py
+++ b/src/pip/_internal/utils/encoding.py
@@ -5,16 +5,16 @@ import sys
 from typing import List, Tuple
 
 BOMS = [
-    (codecs.BOM_UTF8, 'utf-8'),
-    (codecs.BOM_UTF16, 'utf-16'),
-    (codecs.BOM_UTF16_BE, 'utf-16-be'),
-    (codecs.BOM_UTF16_LE, 'utf-16-le'),
-    (codecs.BOM_UTF32, 'utf-32'),
-    (codecs.BOM_UTF32_BE, 'utf-32-be'),
-    (codecs.BOM_UTF32_LE, 'utf-32-le'),
+    (codecs.BOM_UTF8, "utf-8"),
+    (codecs.BOM_UTF16, "utf-16"),
+    (codecs.BOM_UTF16_BE, "utf-16-be"),
+    (codecs.BOM_UTF16_LE, "utf-16-le"),
+    (codecs.BOM_UTF32, "utf-32"),
+    (codecs.BOM_UTF32_BE, "utf-32-be"),
+    (codecs.BOM_UTF32_LE, "utf-32-le"),
 ]  # type: List[Tuple[bytes, str]]
 
-ENCODING_RE = re.compile(br'coding[:=]\s*([-\w.]+)')
+ENCODING_RE = re.compile(br"coding[:=]\s*([-\w.]+)")
 
 
 def auto_decode(data):
@@ -24,13 +24,13 @@ def auto_decode(data):
     Fallback to locale.getpreferredencoding(False) like open() on Python3"""
     for bom, encoding in BOMS:
         if data.startswith(bom):
-            return data[len(bom):].decode(encoding)
+            return data[len(bom) :].decode(encoding)
     # Lets check the first two lines as in PEP263
-    for line in data.split(b'\n')[:2]:
-        if line[0:1] == b'#' and ENCODING_RE.search(line):
+    for line in data.split(b"\n")[:2]:
+        if line[0:1] == b"#" and ENCODING_RE.search(line):
             result = ENCODING_RE.search(line)
             assert result is not None
-            encoding = result.groups()[0].decode('ascii')
+            encoding = result.groups()[0].decode("ascii")
             return data.decode(encoding)
     return data.decode(
         locale.getpreferredencoding(False) or sys.getdefaultencoding(),
diff --git a/src/pip/_internal/utils/filesystem.py b/src/pip/_internal/utils/filesystem.py
index 14553d377..3db97dc41 100644
--- a/src/pip/_internal/utils/filesystem.py
+++ b/src/pip/_internal/utils/filesystem.py
@@ -62,8 +62,7 @@ def copy2_fixed(src, dest):
                 pass
             else:
                 if is_socket_file:
-                    raise shutil.SpecialFileError(
-                        "`{f}` is a socket".format(**locals()))
+                    raise shutil.SpecialFileError(f"`{f}` is a socket")
 
         raise
 
@@ -88,8 +87,8 @@ def adjacent_tmp_file(path, **kwargs):
         delete=False,
         dir=os.path.dirname(path),
         prefix=os.path.basename(path),
-        suffix='.tmp',
-        **kwargs
+        suffix=".tmp",
+        **kwargs,
     ) as f:
         result = cast(BinaryIO, f)
         try:
@@ -120,7 +119,7 @@ def test_writable_dir(path):
             break  # Should never get here, but infinite loops are bad
         path = parent
 
-    if os.name == 'posix':
+    if os.name == "posix":
         return os.access(path, os.W_OK)
 
     return _test_writable_dir_win(path)
@@ -130,10 +129,10 @@ def _test_writable_dir_win(path):
     # type: (str) -> bool
     # os.access doesn't work on Windows: http://bugs.python.org/issue2528
     # and we can't use tempfile: http://bugs.python.org/issue22107
-    basename = 'accesstest_deleteme_fishfingers_custard_'
-    alphabet = 'abcdefghijklmnopqrstuvwxyz0123456789'
+    basename = "accesstest_deleteme_fishfingers_custard_"
+    alphabet = "abcdefghijklmnopqrstuvwxyz0123456789"
     for _ in range(10):
-        name = basename + ''.join(random.choice(alphabet) for _ in range(6))
+        name = basename + "".join(random.choice(alphabet) for _ in range(6))
         file = os.path.join(path, name)
         try:
             fd = os.open(file, os.O_RDWR | os.O_CREAT | os.O_EXCL)
@@ -152,9 +151,7 @@ def _test_writable_dir_win(path):
             return True
 
     # This should never be reached
-    raise OSError(
-        'Unexpected condition testing for writable directory'
-    )
+    raise OSError("Unexpected condition testing for writable directory")
 
 
 def find_files(path, pattern):
diff --git a/src/pip/_internal/utils/filetypes.py b/src/pip/_internal/utils/filetypes.py
index 117f38757..da935846f 100644
--- a/src/pip/_internal/utils/filetypes.py
+++ b/src/pip/_internal/utils/filetypes.py
@@ -5,15 +5,18 @@ from typing import Tuple
 
 from pip._internal.utils.misc import splitext
 
-WHEEL_EXTENSION = '.whl'
-BZ2_EXTENSIONS = ('.tar.bz2', '.tbz')  # type: Tuple[str, ...]
-XZ_EXTENSIONS = ('.tar.xz', '.txz', '.tlz',
-                 '.tar.lz', '.tar.lzma')  # type: Tuple[str, ...]
-ZIP_EXTENSIONS = ('.zip', WHEEL_EXTENSION)  # type: Tuple[str, ...]
-TAR_EXTENSIONS = ('.tar.gz', '.tgz', '.tar')  # type: Tuple[str, ...]
-ARCHIVE_EXTENSIONS = (
-    ZIP_EXTENSIONS + BZ2_EXTENSIONS + TAR_EXTENSIONS + XZ_EXTENSIONS
-)
+WHEEL_EXTENSION = ".whl"
+BZ2_EXTENSIONS = (".tar.bz2", ".tbz")  # type: Tuple[str, ...]
+XZ_EXTENSIONS = (
+    ".tar.xz",
+    ".txz",
+    ".tlz",
+    ".tar.lz",
+    ".tar.lzma",
+)  # type: Tuple[str, ...]
+ZIP_EXTENSIONS = (".zip", WHEEL_EXTENSION)  # type: Tuple[str, ...]
+TAR_EXTENSIONS = (".tar.gz", ".tgz", ".tar")  # type: Tuple[str, ...]
+ARCHIVE_EXTENSIONS = ZIP_EXTENSIONS + BZ2_EXTENSIONS + TAR_EXTENSIONS + XZ_EXTENSIONS
 
 
 def is_archive_file(name):
diff --git a/src/pip/_internal/utils/hashes.py b/src/pip/_internal/utils/hashes.py
index f91e42973..e0ecf6ee9 100644
--- a/src/pip/_internal/utils/hashes.py
+++ b/src/pip/_internal/utils/hashes.py
@@ -10,12 +10,12 @@ if TYPE_CHECKING:
 
 # The recommended hash algo of the moment. Change this whenever the state of
 # the art changes; it won't hurt backward compatibility.
-FAVORITE_HASH = 'sha256'
+FAVORITE_HASH = "sha256"
 
 
 # Names of hashlib algorithms allowed by the --hash option and ``pip hash``
 # Currently, those are the ones at least as collision-resistant as sha256.
-STRONG_HASHES = ['sha256', 'sha384', 'sha512']
+STRONG_HASHES = ["sha256", "sha384", "sha512"]
 
 
 class Hashes:
@@ -23,6 +23,7 @@ class Hashes:
     known-good values
 
     """
+
     def __init__(self, hashes=None):
         # type: (Dict[str, List[str]]) -> None
         """
@@ -63,7 +64,7 @@ class Hashes:
 
     def is_hash_allowed(
         self,
-        hash_name,   # type: str
+        hash_name,  # type: str
         hex_digest,  # type: str
     ):
         # type: (...) -> bool
@@ -83,9 +84,7 @@ class Hashes:
             try:
                 gots[hash_name] = hashlib.new(hash_name)
             except (ValueError, TypeError):
-                raise InstallationError(
-                    f'Unknown hash name: {hash_name}'
-                )
+                raise InstallationError(f"Unknown hash name: {hash_name}")
 
         for chunk in chunks:
             for hash in gots.values():
@@ -111,7 +110,7 @@ class Hashes:
 
     def check_against_path(self, path):
         # type: (str) -> None
-        with open(path, 'rb') as file:
+        with open(path, "rb") as file:
             return self.check_against_file(file)
 
     def __nonzero__(self):
@@ -132,11 +131,13 @@ class Hashes:
     def __hash__(self):
         # type: () -> int
         return hash(
-            ",".join(sorted(
-                ":".join((alg, digest))
-                for alg, digest_list in self._allowed.items()
-                for digest in digest_list
-            ))
+            ",".join(
+                sorted(
+                    ":".join((alg, digest))
+                    for alg, digest_list in self._allowed.items()
+                    for digest in digest_list
+                )
+            )
         )
 
 
@@ -147,6 +148,7 @@ class MissingHashes(Hashes):
     exception showing it to the user.
 
     """
+
     def __init__(self):
         # type: () -> None
         """Don't offer the ``hashes`` kwarg."""
diff --git a/src/pip/_internal/utils/inject_securetransport.py b/src/pip/_internal/utils/inject_securetransport.py
index 5b93b1d67..b6863d934 100644
--- a/src/pip/_internal/utils/inject_securetransport.py
+++ b/src/pip/_internal/utils/inject_securetransport.py
@@ -22,7 +22,7 @@ def inject_securetransport():
         return
 
     # Checks for OpenSSL 1.0.1
-    if ssl.OPENSSL_VERSION_NUMBER >= 0x1000100f:
+    if ssl.OPENSSL_VERSION_NUMBER >= 0x1000100F:
         return
 
     try:
diff --git a/src/pip/_internal/utils/logging.py b/src/pip/_internal/utils/logging.py
index d1d46ab70..45798d54f 100644
--- a/src/pip/_internal/utils/logging.py
+++ b/src/pip/_internal/utils/logging.py
@@ -1,6 +1,3 @@
-# The following comment should be removed at some point in the future.
-# mypy: disallow-untyped-defs=False
-
 import contextlib
 import errno
 import logging
@@ -8,7 +5,7 @@ import logging.handlers
 import os
 import sys
 from logging import Filter, getLogger
-from typing import Any
+from typing import IO, Any, Callable, Iterator, Optional, TextIO, Type, cast
 
 from pip._internal.utils.compat import WINDOWS
 from pip._internal.utils.deprecation import DEPRECATION_MSG_PREFIX
@@ -29,13 +26,14 @@ except Exception:
 
 
 _log_state = threading.local()
-subprocess_logger = getLogger('pip.subprocessor')
+subprocess_logger = getLogger("pip.subprocessor")
 
 
 class BrokenStdoutLoggingError(Exception):
     """
     Raised if BrokenPipeError occurs for the stdout stream while logging.
     """
+
     pass
 
 
@@ -45,13 +43,17 @@ if WINDOWS:
     # https://bugs.python.org/issue19612
     # https://bugs.python.org/issue30418
     def _is_broken_pipe_error(exc_class, exc):
+        # type: (Type[BaseException], BaseException) -> bool
         """See the docstring for non-Windows below."""
-        return ((exc_class is BrokenPipeError) or
-                (exc_class is OSError and
-                 exc.errno in (errno.EINVAL, errno.EPIPE)))
+        return (exc_class is BrokenPipeError) or (
+            isinstance(exc, OSError) and exc.errno in (errno.EINVAL, errno.EPIPE)
+        )
+
+
 else:
     # Then we are in the non-Windows case.
     def _is_broken_pipe_error(exc_class, exc):
+        # type: (Type[BaseException], BaseException) -> bool
         """
         Return whether an exception is a broken pipe error.
 
@@ -59,11 +61,12 @@ else:
           exc_class: an exception class.
           exc: an exception instance.
         """
-        return (exc_class is BrokenPipeError)
+        return exc_class is BrokenPipeError
 
 
 @contextlib.contextmanager
 def indent_log(num=2):
+    # type: (int) -> Iterator[None]
     """
     A context manager which will cause the log output to be indented for any
     log messages emitted inside it.
@@ -78,7 +81,8 @@ def indent_log(num=2):
 
 
 def get_indentation():
-    return getattr(_log_state, 'indentation', 0)
+    # type: () -> int
+    return getattr(_log_state, "indentation", 0)
 
 
 class IndentingFormatter(logging.Formatter):
@@ -101,22 +105,24 @@ class IndentingFormatter(logging.Formatter):
         super().__init__(*args, **kwargs)
 
     def get_message_start(self, formatted, levelno):
+        # type: (str, int) -> str
         """
         Return the start of the formatted log message (not counting the
         prefix to add to each line).
         """
         if levelno < logging.WARNING:
-            return ''
+            return ""
         if formatted.startswith(DEPRECATION_MSG_PREFIX):
             # Then the message already has a prefix.  We don't want it to
             # look like "WARNING: DEPRECATION: ...."
-            return ''
+            return ""
         if levelno < logging.ERROR:
-            return 'WARNING: '
+            return "WARNING: "
 
-        return 'ERROR: '
+        return "ERROR: "
 
     def format(self, record):
+        # type: (logging.LogRecord) -> str
         """
         Calls the standard formatter, but will indent all of the log message
         lines by our current indentation level.
@@ -125,20 +131,20 @@ class IndentingFormatter(logging.Formatter):
         message_start = self.get_message_start(formatted, record.levelno)
         formatted = message_start + formatted
 
-        prefix = ''
+        prefix = ""
         if self.add_timestamp:
             prefix = f"{self.formatTime(record)} "
         prefix += " " * get_indentation()
-        formatted = "".join([
-            prefix + line
-            for line in formatted.splitlines(True)
-        ])
+        formatted = "".join([prefix + line for line in formatted.splitlines(True)])
         return formatted
 
 
 def _color_wrap(*colors):
+    # type: (*str) -> Callable[[str], str]
     def wrapped(inp):
+        # type: (str) -> str
         return "".join(list(colors) + [inp, colorama.Style.RESET_ALL])
+
     return wrapped
 
 
@@ -155,6 +161,7 @@ class ColorizedStreamHandler(logging.StreamHandler):
         COLORS = []
 
     def __init__(self, stream=None, no_color=None):
+        # type: (Optional[TextIO], bool) -> None
         super().__init__(stream)
         self._no_color = no_color
 
@@ -162,22 +169,26 @@ class ColorizedStreamHandler(logging.StreamHandler):
             self.stream = colorama.AnsiToWin32(self.stream)
 
     def _using_stdout(self):
+        # type: () -> bool
         """
         Return whether the handler is using sys.stdout.
         """
         if WINDOWS and colorama:
             # Then self.stream is an AnsiToWin32 object.
-            return self.stream.wrapped is sys.stdout
+            stream = cast(colorama.AnsiToWin32, self.stream)
+            return stream.wrapped is sys.stdout
 
         return self.stream is sys.stdout
 
     def should_color(self):
+        # type: () -> bool
         # Don't colorize things if we do not have colorama or if told not to
         if not colorama or self._no_color:
             return False
 
         real_stream = (
-            self.stream if not isinstance(self.stream, colorama.AnsiToWin32)
+            self.stream
+            if not isinstance(self.stream, colorama.AnsiToWin32)
             else self.stream.wrapped
         )
 
@@ -193,7 +204,8 @@ class ColorizedStreamHandler(logging.StreamHandler):
         return False
 
     def format(self, record):
-        msg = logging.StreamHandler.format(self, record)
+        # type: (logging.LogRecord) -> str
+        msg = super().format(record)
 
         if self.should_color():
             for level, color in self.COLORS:
@@ -205,31 +217,37 @@ class ColorizedStreamHandler(logging.StreamHandler):
 
     # The logging module says handleError() can be customized.
     def handleError(self, record):
+        # type: (logging.LogRecord) -> None
         exc_class, exc = sys.exc_info()[:2]
         # If a broken pipe occurred while calling write() or flush() on the
         # stdout stream in logging's Handler.emit(), then raise our special
         # exception so we can handle it in main() instead of logging the
         # broken pipe error and continuing.
-        if (exc_class and self._using_stdout() and
-                _is_broken_pipe_error(exc_class, exc)):
+        if (
+            exc_class
+            and exc
+            and self._using_stdout()
+            and _is_broken_pipe_error(exc_class, exc)
+        ):
             raise BrokenStdoutLoggingError()
 
         return super().handleError(record)
 
 
 class BetterRotatingFileHandler(logging.handlers.RotatingFileHandler):
-
     def _open(self):
+        # type: () -> IO[Any]
         ensure_dir(os.path.dirname(self.baseFilename))
-        return logging.handlers.RotatingFileHandler._open(self)
+        return super()._open()
 
 
 class MaxLevelFilter(Filter):
-
     def __init__(self, level):
+        # type: (int) -> None
         self.level = level
 
     def filter(self, record):
+        # type: (logging.LogRecord) -> bool
         return record.levelno < self.level
 
 
@@ -240,12 +258,14 @@ class ExcludeLoggerFilter(Filter):
     """
 
     def filter(self, record):
+        # type: (logging.LogRecord) -> bool
         # The base Filter class allows only records from a logger (or its
         # children).
         return not super().filter(record)
 
 
 def setup_logging(verbosity, no_color, user_log_file):
+    # type: (int, bool, Optional[str]) -> int
     """Configures and sets up all of the logging
 
     Returns the requested logging level, as its integer value.
@@ -292,78 +312,76 @@ def setup_logging(verbosity, no_color, user_log_file):
         ["user_log"] if include_user_log else []
     )
 
-    logging.config.dictConfig({
-        "version": 1,
-        "disable_existing_loggers": False,
-        "filters": {
-            "exclude_warnings": {
-                "()": "pip._internal.utils.logging.MaxLevelFilter",
-                "level": logging.WARNING,
+    logging.config.dictConfig(
+        {
+            "version": 1,
+            "disable_existing_loggers": False,
+            "filters": {
+                "exclude_warnings": {
+                    "()": "pip._internal.utils.logging.MaxLevelFilter",
+                    "level": logging.WARNING,
+                },
+                "restrict_to_subprocess": {
+                    "()": "logging.Filter",
+                    "name": subprocess_logger.name,
+                },
+                "exclude_subprocess": {
+                    "()": "pip._internal.utils.logging.ExcludeLoggerFilter",
+                    "name": subprocess_logger.name,
+                },
             },
-            "restrict_to_subprocess": {
-                "()": "logging.Filter",
-                "name": subprocess_logger.name,
+            "formatters": {
+                "indent": {
+                    "()": IndentingFormatter,
+                    "format": "%(message)s",
+                },
+                "indent_with_timestamp": {
+                    "()": IndentingFormatter,
+                    "format": "%(message)s",
+                    "add_timestamp": True,
+                },
             },
-            "exclude_subprocess": {
-                "()": "pip._internal.utils.logging.ExcludeLoggerFilter",
-                "name": subprocess_logger.name,
+            "handlers": {
+                "console": {
+                    "level": level,
+                    "class": handler_classes["stream"],
+                    "no_color": no_color,
+                    "stream": log_streams["stdout"],
+                    "filters": ["exclude_subprocess", "exclude_warnings"],
+                    "formatter": "indent",
+                },
+                "console_errors": {
+                    "level": "WARNING",
+                    "class": handler_classes["stream"],
+                    "no_color": no_color,
+                    "stream": log_streams["stderr"],
+                    "filters": ["exclude_subprocess"],
+                    "formatter": "indent",
+                },
+                # A handler responsible for logging to the console messages
+                # from the "subprocessor" logger.
+                "console_subprocess": {
+                    "level": level,
+                    "class": handler_classes["stream"],
+                    "no_color": no_color,
+                    "stream": log_streams["stderr"],
+                    "filters": ["restrict_to_subprocess"],
+                    "formatter": "indent",
+                },
+                "user_log": {
+                    "level": "DEBUG",
+                    "class": handler_classes["file"],
+                    "filename": additional_log_file,
+                    "delay": True,
+                    "formatter": "indent_with_timestamp",
+                },
             },
-        },
-        "formatters": {
-            "indent": {
-                "()": IndentingFormatter,
-                "format": "%(message)s",
+            "root": {
+                "level": root_level,
+                "handlers": handlers,
             },
-            "indent_with_timestamp": {
-                "()": IndentingFormatter,
-                "format": "%(message)s",
-                "add_timestamp": True,
-            },
-        },
-        "handlers": {
-            "console": {
-                "level": level,
-                "class": handler_classes["stream"],
-                "no_color": no_color,
-                "stream": log_streams["stdout"],
-                "filters": ["exclude_subprocess", "exclude_warnings"],
-                "formatter": "indent",
-            },
-            "console_errors": {
-                "level": "WARNING",
-                "class": handler_classes["stream"],
-                "no_color": no_color,
-                "stream": log_streams["stderr"],
-                "filters": ["exclude_subprocess"],
-                "formatter": "indent",
-            },
-            # A handler responsible for logging to the console messages
-            # from the "subprocessor" logger.
-            "console_subprocess": {
-                "level": level,
-                "class": handler_classes["stream"],
-                "no_color": no_color,
-                "stream": log_streams["stderr"],
-                "filters": ["restrict_to_subprocess"],
-                "formatter": "indent",
-            },
-            "user_log": {
-                "level": "DEBUG",
-                "class": handler_classes["file"],
-                "filename": additional_log_file,
-                "delay": True,
-                "formatter": "indent_with_timestamp",
-            },
-        },
-        "root": {
-            "level": root_level,
-            "handlers": handlers,
-        },
-        "loggers": {
-            "pip._vendor": {
-                "level": vendored_log_level
-            }
-        },
-    })
+            "loggers": {"pip._vendor": {"level": vendored_log_level}},
+        }
+    )
 
     return level_number
diff --git a/src/pip/_internal/utils/misc.py b/src/pip/_internal/utils/misc.py
index 3e5f1753e..26037dbdc 100644
--- a/src/pip/_internal/utils/misc.py
+++ b/src/pip/_internal/utils/misc.py
@@ -1,6 +1,5 @@
 # The following comment should be removed at some point in the future.
 # mypy: strict-optional=False
-# mypy: disallow-untyped-defs=False
 
 import contextlib
 import errno
@@ -16,16 +15,21 @@ import sys
 import urllib.parse
 from io import StringIO
 from itertools import filterfalse, tee, zip_longest
+from types import TracebackType
 from typing import (
     Any,
     AnyStr,
+    BinaryIO,
     Callable,
     Container,
+    ContextManager,
     Iterable,
     Iterator,
     List,
     Optional,
+    TextIO,
     Tuple,
+    Type,
     TypeVar,
     cast,
 )
@@ -42,19 +46,29 @@ from pip._internal.utils.virtualenv import (
     virtualenv_no_global,
 )
 
-__all__ = ['rmtree', 'display_path', 'backup_dir',
-           'ask', 'splitext',
-           'format_size', 'is_installable_dir',
-           'normalize_path',
-           'renames', 'get_prog',
-           'captured_stdout', 'ensure_dir',
-           'remove_auth_from_url']
+__all__ = [
+    "rmtree",
+    "display_path",
+    "backup_dir",
+    "ask",
+    "splitext",
+    "format_size",
+    "is_installable_dir",
+    "normalize_path",
+    "renames",
+    "get_prog",
+    "captured_stdout",
+    "ensure_dir",
+    "remove_auth_from_url",
+]
 
 
 logger = logging.getLogger(__name__)
 
-VersionInfo = Tuple[int, int, int]
 T = TypeVar("T")
+ExcInfo = Tuple[Type[BaseException], BaseException, TracebackType]
+VersionInfo = Tuple[int, int, int]
+NetlocTuple = Tuple[str, Tuple[Optional[str], Optional[str]]]
 
 
 def get_pip_version():
@@ -62,10 +76,10 @@ def get_pip_version():
     pip_pkg_dir = os.path.join(os.path.dirname(__file__), "..", "..")
     pip_pkg_dir = os.path.abspath(pip_pkg_dir)
 
-    return (
-        'pip {} from {} (python {})'.format(
-            __version__, pip_pkg_dir, get_major_minor_version(),
-        )
+    return "pip {} from {} (python {})".format(
+        __version__,
+        pip_pkg_dir,
+        get_major_minor_version(),
     )
 
 
@@ -86,7 +100,7 @@ def normalize_version_info(py_version_info):
     elif len(py_version_info) > 3:
         py_version_info = py_version_info[:3]
 
-    return cast('VersionInfo', py_version_info)
+    return cast("VersionInfo", py_version_info)
 
 
 def ensure_dir(path):
@@ -104,13 +118,13 @@ def get_prog():
     # type: () -> str
     try:
         prog = os.path.basename(sys.argv[0])
-        if prog in ('__main__.py', '-c'):
+        if prog in ("__main__.py", "-c"):
             return f"{sys.executable} -m pip"
         else:
             return prog
     except (AttributeError, TypeError, IndexError):
         pass
-    return 'pip'
+    return "pip"
 
 
 # Retry every half second for up to 3 seconds
@@ -118,11 +132,11 @@ def get_prog():
 @retry(reraise=True, stop=stop_after_delay(3), wait=wait_fixed(0.5))
 def rmtree(dir, ignore_errors=False):
     # type: (AnyStr, bool) -> None
-    shutil.rmtree(dir, ignore_errors=ignore_errors,
-                  onerror=rmtree_errorhandler)
+    shutil.rmtree(dir, ignore_errors=ignore_errors, onerror=rmtree_errorhandler)
 
 
 def rmtree_errorhandler(func, path, exc_info):
+    # type: (Callable[..., Any], str, ExcInfo) -> None
     """On Windows, the files in .svn are read-only, so when rmtree() tries to
     remove them, an exception is thrown.  We catch that here, remove the
     read-only attribute, and hopefully continue without problems."""
@@ -148,11 +162,11 @@ def display_path(path):
     if possible."""
     path = os.path.normcase(os.path.abspath(path))
     if path.startswith(os.getcwd() + os.path.sep):
-        path = '.' + path[len(os.getcwd()):]
+        path = "." + path[len(os.getcwd()) :]
     return path
 
 
-def backup_dir(dir, ext='.bak'):
+def backup_dir(dir, ext=".bak"):
     # type: (str, str) -> str
     """Figure out the name of a directory to back up the given dir to
     (adding .bak, .bak2, etc)"""
@@ -166,7 +180,7 @@ def backup_dir(dir, ext='.bak'):
 
 def ask_path_exists(message, options):
     # type: (str, Iterable[str]) -> str
-    for action in os.environ.get('PIP_EXISTS_ACTION', '').split():
+    for action in os.environ.get("PIP_EXISTS_ACTION", "").split():
         if action in options:
             return action
     return ask(message, options)
@@ -175,10 +189,9 @@ def ask_path_exists(message, options):
 def _check_no_input(message):
     # type: (str) -> None
     """Raise an error if no input is allowed."""
-    if os.environ.get('PIP_NO_INPUT'):
+    if os.environ.get("PIP_NO_INPUT"):
         raise Exception(
-            'No input was expected ($PIP_NO_INPUT set); question: {}'.format(
-                message)
+            f"No input was expected ($PIP_NO_INPUT set); question: {message}"
         )
 
 
@@ -191,8 +204,8 @@ def ask(message, options):
         response = response.strip().lower()
         if response not in options:
             print(
-                'Your response ({!r}) was not one of the expected responses: '
-                '{}'.format(response, ', '.join(options))
+                "Your response ({!r}) was not one of the expected responses: "
+                "{}".format(response, ", ".join(options))
             )
         else:
             return response
@@ -221,24 +234,24 @@ def strtobool(val):
     'val' is anything else.
     """
     val = val.lower()
-    if val in ('y', 'yes', 't', 'true', 'on', '1'):
+    if val in ("y", "yes", "t", "true", "on", "1"):
         return 1
-    elif val in ('n', 'no', 'f', 'false', 'off', '0'):
+    elif val in ("n", "no", "f", "false", "off", "0"):
         return 0
     else:
-        raise ValueError("invalid truth value %r" % (val,))
+        raise ValueError(f"invalid truth value {val!r}")
 
 
 def format_size(bytes):
     # type: (float) -> str
     if bytes > 1000 * 1000:
-        return '{:.1f} MB'.format(bytes / 1000.0 / 1000)
+        return "{:.1f} MB".format(bytes / 1000.0 / 1000)
     elif bytes > 10 * 1000:
-        return '{} kB'.format(int(bytes / 1000))
+        return "{} kB".format(int(bytes / 1000))
     elif bytes > 1000:
-        return '{:.1f} kB'.format(bytes / 1000.0)
+        return "{:.1f} kB".format(bytes / 1000.0)
     else:
-        return '{} bytes'.format(int(bytes))
+        return "{} bytes".format(int(bytes))
 
 
 def tabulate(rows):
@@ -251,27 +264,27 @@ def tabulate(rows):
     (['foobar     2000', '3735928559'], [10, 4])
     """
     rows = [tuple(map(str, row)) for row in rows]
-    sizes = [max(map(len, col)) for col in zip_longest(*rows, fillvalue='')]
+    sizes = [max(map(len, col)) for col in zip_longest(*rows, fillvalue="")]
     table = [" ".join(map(str.ljust, row, sizes)).rstrip() for row in rows]
     return table, sizes
 
 
 def is_installable_dir(path):
     # type: (str) -> bool
-    """Is path is a directory containing setup.py or pyproject.toml?
-    """
+    """Is path is a directory containing setup.py or pyproject.toml?"""
     if not os.path.isdir(path):
         return False
-    setup_py = os.path.join(path, 'setup.py')
+    setup_py = os.path.join(path, "setup.py")
     if os.path.isfile(setup_py):
         return True
-    pyproject_toml = os.path.join(path, 'pyproject.toml')
+    pyproject_toml = os.path.join(path, "pyproject.toml")
     if os.path.isfile(pyproject_toml):
         return True
     return False
 
 
 def read_chunks(file, size=io.DEFAULT_BUFFER_SIZE):
+    # type: (BinaryIO, int) -> Iterator[bytes]
     """Yield pieces of data from a file-like object until EOF."""
     while True:
         chunk = file.read(size)
@@ -298,7 +311,7 @@ def splitext(path):
     # type: (str) -> Tuple[str, str]
     """Like os.path.splitext, but take off .tar too"""
     base, ext = posixpath.splitext(path)
-    if base.lower().endswith('.tar'):
+    if base.lower().endswith(".tar"):
         ext = base[-4:] + ext
         base = base[:-4]
     return base, ext
@@ -372,19 +385,19 @@ def dist_is_editable(dist):
     Return True if given Distribution is an editable install.
     """
     for path_item in sys.path:
-        egg_link = os.path.join(path_item, dist.project_name + '.egg-link')
+        egg_link = os.path.join(path_item, dist.project_name + ".egg-link")
         if os.path.isfile(egg_link):
             return True
     return False
 
 
 def get_installed_distributions(
-        local_only=True,  # type: bool
-        skip=stdlib_pkgs,  # type: Container[str]
-        include_editables=True,  # type: bool
-        editables_only=False,  # type: bool
-        user_only=False,  # type: bool
-        paths=None  # type: Optional[List[str]]
+    local_only=True,  # type: bool
+    skip=stdlib_pkgs,  # type: Container[str]
+    include_editables=True,  # type: bool
+    editables_only=False,  # type: bool
+    user_only=False,  # type: bool
+    paths=None,  # type: Optional[List[str]]
 ):
     # type: (...) -> List[Distribution]
     """Return a list of installed Distribution objects.
@@ -419,6 +432,7 @@ def get_distribution(req_name):
     """
     from pip._internal.metadata import get_default_environment
     from pip._internal.metadata.pkg_resources import Distribution as _Dist
+
     dist = get_default_environment().get_distribution(req_name)
     if dist is None:
         return None
@@ -455,7 +469,7 @@ def egg_link_path(dist):
         sites.append(site_packages)
 
     for site in sites:
-        egglink = os.path.join(site, dist.project_name) + '.egg-link'
+        egglink = os.path.join(site, dist.project_name) + ".egg-link"
         if os.path.isfile(egglink):
             return egglink
     return None
@@ -483,20 +497,24 @@ def write_output(msg, *args):
 
 
 class StreamWrapper(StringIO):
+    orig_stream = None  # type: TextIO
 
     @classmethod
     def from_stream(cls, orig_stream):
+        # type: (TextIO) -> StreamWrapper
         cls.orig_stream = orig_stream
         return cls()
 
     # compileall.compile_dir() needs stdout.encoding to print to stdout
+    # https://github.com/python/mypy/issues/4125
     @property
-    def encoding(self):
+    def encoding(self):  # type: ignore
         return self.orig_stream.encoding
 
 
 @contextlib.contextmanager
 def captured_output(stream_name):
+    # type: (str) -> Iterator[StreamWrapper]
     """Return a context manager used by captured_stdout/stdin/stderr
     that temporarily replaces the sys stream *stream_name* with a StringIO.
 
@@ -511,6 +529,7 @@ def captured_output(stream_name):
 
 
 def captured_stdout():
+    # type: () -> ContextManager[StreamWrapper]
     """Capture the output of sys.stdout:
 
        with captured_stdout() as stdout:
@@ -519,22 +538,24 @@ def captured_stdout():
 
     Taken from Lib/support/__init__.py in the CPython repo.
     """
-    return captured_output('stdout')
+    return captured_output("stdout")
 
 
 def captured_stderr():
+    # type: () -> ContextManager[StreamWrapper]
     """
     See captured_stdout().
     """
-    return captured_output('stderr')
+    return captured_output("stderr")
 
 
 # Simulates an enum
 def enum(*sequential, **named):
+    # type: (*Any, **Any) -> Type[Any]
     enums = dict(zip(sequential, range(len(sequential))), **named)
     reverse = {value: key for key, value in enums.items()}
-    enums['reverse_mapping'] = reverse
-    return type('Enum', (), enums)
+    enums["reverse_mapping"] = reverse
+    return type("Enum", (), enums)
 
 
 def build_netloc(host, port):
@@ -544,21 +565,21 @@ def build_netloc(host, port):
     """
     if port is None:
         return host
-    if ':' in host:
+    if ":" in host:
         # Only wrap host with square brackets when it is IPv6
-        host = f'[{host}]'
-    return f'{host}:{port}'
+        host = f"[{host}]"
+    return f"{host}:{port}"
 
 
-def build_url_from_netloc(netloc, scheme='https'):
+def build_url_from_netloc(netloc, scheme="https"):
     # type: (str, str) -> str
     """
     Build a full URL from a netloc.
     """
-    if netloc.count(':') >= 2 and '@' not in netloc and '[' not in netloc:
+    if netloc.count(":") >= 2 and "@" not in netloc and "[" not in netloc:
         # It must be a bare IPv6 address, so wrap it with brackets.
-        netloc = f'[{netloc}]'
-    return f'{scheme}://{netloc}'
+        netloc = f"[{netloc}]"
+    return f"{scheme}://{netloc}"
 
 
 def parse_netloc(netloc):
@@ -572,31 +593,33 @@ def parse_netloc(netloc):
 
 
 def split_auth_from_netloc(netloc):
+    # type: (str) -> NetlocTuple
     """
     Parse out and remove the auth information from a netloc.
 
     Returns: (netloc, (username, password)).
     """
-    if '@' not in netloc:
+    if "@" not in netloc:
         return netloc, (None, None)
 
     # Split from the right because that's how urllib.parse.urlsplit()
     # behaves if more than one @ is present (which can be checked using
     # the password attribute of urlsplit()'s return value).
-    auth, netloc = netloc.rsplit('@', 1)
-    if ':' in auth:
+    auth, netloc = netloc.rsplit("@", 1)
+    pw = None  # type: Optional[str]
+    if ":" in auth:
         # Split from the left because that's how urllib.parse.urlsplit()
         # behaves if more than one : is present (which again can be checked
         # using the password attribute of the return value)
-        user_pass = auth.split(':', 1)
+        user, pw = auth.split(":", 1)
     else:
-        user_pass = auth, None
+        user, pw = auth, None
 
-    user_pass = tuple(
-        None if x is None else urllib.parse.unquote(x) for x in user_pass
-    )
+    user = urllib.parse.unquote(user)
+    if pw is not None:
+        pw = urllib.parse.unquote(pw)
 
-    return netloc, user_pass
+    return netloc, (user, pw)
 
 
 def redact_netloc(netloc):
@@ -612,17 +635,18 @@ def redact_netloc(netloc):
     if user is None:
         return netloc
     if password is None:
-        user = '****'
-        password = ''
+        user = "****"
+        password = ""
     else:
         user = urllib.parse.quote(user)
-        password = ':****'
-    return '{user}{password}@{netloc}'.format(user=user,
-                                              password=password,
-                                              netloc=netloc)
+        password = ":****"
+    return "{user}{password}@{netloc}".format(
+        user=user, password=password, netloc=netloc
+    )
 
 
 def _transform_url(url, transform_netloc):
+    # type: (str, Callable[[str], Tuple[Any, ...]]) -> Tuple[str, NetlocTuple]
     """Transform and replace netloc in a url.
 
     transform_netloc is a function taking the netloc and returning a
@@ -635,18 +659,18 @@ def _transform_url(url, transform_netloc):
     purl = urllib.parse.urlsplit(url)
     netloc_tuple = transform_netloc(purl.netloc)
     # stripped url
-    url_pieces = (
-        purl.scheme, netloc_tuple[0], purl.path, purl.query, purl.fragment
-    )
+    url_pieces = (purl.scheme, netloc_tuple[0], purl.path, purl.query, purl.fragment)
     surl = urllib.parse.urlunsplit(url_pieces)
-    return surl, netloc_tuple
+    return surl, cast("NetlocTuple", netloc_tuple)
 
 
 def _get_netloc(netloc):
+    # type: (str) -> NetlocTuple
     return split_auth_from_netloc(netloc)
 
 
 def _redact_netloc(netloc):
+    # type: (str) -> Tuple[str,]
     return (redact_netloc(netloc),)
 
 
@@ -678,7 +702,7 @@ def redact_auth_from_url(url):
 class HiddenText:
     def __init__(
         self,
-        secret,    # type: str
+        secret,  # type: str
         redacted,  # type: str
     ):
         # type: (...) -> None
@@ -687,7 +711,7 @@ class HiddenText:
 
     def __repr__(self):
         # type: (...) -> str
-        return ''.format(str(self))
+        return "".format(str(self))
 
     def __str__(self):
         # type: (...) -> str
@@ -701,12 +725,12 @@ class HiddenText:
 
         # The string being used for redaction doesn't also have to match,
         # just the raw, original string.
-        return (self.secret == other.secret)
+        return self.secret == other.secret
 
 
 def hide_value(value):
     # type: (str) -> HiddenText
-    return HiddenText(value, redacted='****')
+    return HiddenText(value, redacted="****")
 
 
 def hide_url(url):
@@ -725,41 +749,36 @@ def protect_pip_from_modification_on_windows(modifying_pip):
     pip_names = [
         "pip.exe",
         "pip{}.exe".format(sys.version_info[0]),
-        "pip{}.{}.exe".format(*sys.version_info[:2])
+        "pip{}.{}.exe".format(*sys.version_info[:2]),
     ]
 
     # See https://github.com/pypa/pip/issues/1299 for more discussion
     should_show_use_python_msg = (
-        modifying_pip and
-        WINDOWS and
-        os.path.basename(sys.argv[0]) in pip_names
+        modifying_pip and WINDOWS and os.path.basename(sys.argv[0]) in pip_names
     )
 
     if should_show_use_python_msg:
-        new_command = [
-            sys.executable, "-m", "pip"
-        ] + sys.argv[1:]
+        new_command = [sys.executable, "-m", "pip"] + sys.argv[1:]
         raise CommandError(
-            'To modify pip, please run the following command:\n{}'
-            .format(" ".join(new_command))
+            "To modify pip, please run the following command:\n{}".format(
+                " ".join(new_command)
+            )
         )
 
 
 def is_console_interactive():
     # type: () -> bool
-    """Is this console interactive?
-    """
+    """Is this console interactive?"""
     return sys.stdin is not None and sys.stdin.isatty()
 
 
 def hash_file(path, blocksize=1 << 20):
     # type: (str, int) -> Tuple[Any, int]
-    """Return (hash, length) for path using hashlib.sha256()
-    """
+    """Return (hash, length) for path using hashlib.sha256()"""
 
     h = hashlib.sha256()
     length = 0
-    with open(path, 'rb') as f:
+    with open(path, "rb") as f:
         for block in read_chunks(f, size=blocksize):
             length += len(block)
             h.update(block)
@@ -767,6 +786,7 @@ def hash_file(path, blocksize=1 << 20):
 
 
 def is_wheel_installed():
+    # type: () -> bool
     """
     Return whether the wheel package is installed.
     """
diff --git a/src/pip/_internal/utils/models.py b/src/pip/_internal/utils/models.py
index c14e9ff92..0e02bc7a5 100644
--- a/src/pip/_internal/utils/models.py
+++ b/src/pip/_internal/utils/models.py
@@ -1,40 +1,46 @@
 """Utilities for defining models
 """
-# The following comment should be removed at some point in the future.
-# mypy: disallow-untyped-defs=False
 
 import operator
+from typing import Any, Callable, Type
 
 
 class KeyBasedCompareMixin:
-    """Provides comparison capabilities that is based on a key
-    """
+    """Provides comparison capabilities that is based on a key"""
 
-    __slots__ = ['_compare_key', '_defining_class']
+    __slots__ = ["_compare_key", "_defining_class"]
 
     def __init__(self, key, defining_class):
+        # type: (Any, Type[KeyBasedCompareMixin]) -> None
         self._compare_key = key
         self._defining_class = defining_class
 
     def __hash__(self):
+        # type: () -> int
         return hash(self._compare_key)
 
     def __lt__(self, other):
+        # type: (Any) -> bool
         return self._compare(other, operator.__lt__)
 
     def __le__(self, other):
+        # type: (Any) -> bool
         return self._compare(other, operator.__le__)
 
     def __gt__(self, other):
+        # type: (Any) -> bool
         return self._compare(other, operator.__gt__)
 
     def __ge__(self, other):
+        # type: (Any) -> bool
         return self._compare(other, operator.__ge__)
 
     def __eq__(self, other):
+        # type: (Any) -> bool
         return self._compare(other, operator.__eq__)
 
     def _compare(self, other, method):
+        # type: (Any, Callable[[Any, Any], bool]) -> bool
         if not isinstance(other, self._defining_class):
             return NotImplemented
 
diff --git a/src/pip/_internal/utils/packaging.py b/src/pip/_internal/utils/packaging.py
index 1be31ea91..3f9dbd3b7 100644
--- a/src/pip/_internal/utils/packaging.py
+++ b/src/pip/_internal/utils/packaging.py
@@ -31,7 +31,7 @@ def check_requires_python(requires_python, version_info):
         return True
     requires_python_specifier = specifiers.SpecifierSet(requires_python)
 
-    python_version = version.parse('.'.join(map(str, version_info)))
+    python_version = version.parse(".".join(map(str, version_info)))
     return python_version in requires_python_specifier
 
 
@@ -41,16 +41,17 @@ def get_metadata(dist):
     :raises NoneMetadataError: if the distribution reports `has_metadata()`
         True but `get_metadata()` returns None.
     """
-    metadata_name = 'METADATA'
-    if (isinstance(dist, pkg_resources.DistInfoDistribution) and
-            dist.has_metadata(metadata_name)):
+    metadata_name = "METADATA"
+    if isinstance(dist, pkg_resources.DistInfoDistribution) and dist.has_metadata(
+        metadata_name
+    ):
         metadata = dist.get_metadata(metadata_name)
-    elif dist.has_metadata('PKG-INFO'):
-        metadata_name = 'PKG-INFO'
+    elif dist.has_metadata("PKG-INFO"):
+        metadata_name = "PKG-INFO"
         metadata = dist.get_metadata(metadata_name)
     else:
         logger.warning("No metadata found in %s", display_path(dist.location))
-        metadata = ''
+        metadata = ""
 
     if metadata is None:
         raise NoneMetadataError(dist, metadata_name)
@@ -69,7 +70,7 @@ def get_requires_python(dist):
     if not present.
     """
     pkg_info_dict = get_metadata(dist)
-    requires_python = pkg_info_dict.get('Requires-Python')
+    requires_python = pkg_info_dict.get("Requires-Python")
 
     if requires_python is not None:
         # Convert to a str to satisfy the type checker, since requires_python
@@ -81,8 +82,8 @@ def get_requires_python(dist):
 
 def get_installer(dist):
     # type: (Distribution) -> str
-    if dist.has_metadata('INSTALLER'):
-        for line in dist.get_metadata_lines('INSTALLER'):
+    if dist.has_metadata("INSTALLER"):
+        for line in dist.get_metadata_lines("INSTALLER"):
             if line.strip():
                 return line.strip()
-    return ''
+    return ""
diff --git a/src/pip/_internal/utils/parallel.py b/src/pip/_internal/utils/parallel.py
index 6b0919f14..de91dc8ab 100644
--- a/src/pip/_internal/utils/parallel.py
+++ b/src/pip/_internal/utils/parallel.py
@@ -16,7 +16,7 @@ These helpers work like Python 3's map, with two differences:
   than using the default value of 1.
 """
 
-__all__ = ['map_multiprocess', 'map_multithread']
+__all__ = ["map_multiprocess", "map_multithread"]
 
 from contextlib import contextmanager
 from multiprocessing import Pool as ProcessPool
@@ -27,8 +27,8 @@ from typing import Callable, Iterable, Iterator, TypeVar, Union
 from pip._vendor.requests.adapters import DEFAULT_POOLSIZE
 
 Pool = Union[pool.Pool, pool.ThreadPool]
-S = TypeVar('S')
-T = TypeVar('T')
+S = TypeVar("S")
+T = TypeVar("T")
 
 # On platforms without sem_open, multiprocessing[.dummy] Pool
 # cannot be created.
diff --git a/src/pip/_internal/utils/pkg_resources.py b/src/pip/_internal/utils/pkg_resources.py
index 8c4974a70..ee1eca300 100644
--- a/src/pip/_internal/utils/pkg_resources.py
+++ b/src/pip/_internal/utils/pkg_resources.py
@@ -4,8 +4,8 @@ from pip._vendor.pkg_resources import yield_lines
 
 
 class DictMetadata:
-    """IMetadataProvider that reads metadata files from a dictionary.
-    """
+    """IMetadataProvider that reads metadata files from a dictionary."""
+
     def __init__(self, metadata):
         # type: (Dict[str, bytes]) -> None
         self._metadata = metadata
diff --git a/src/pip/_internal/utils/setuptools_build.py b/src/pip/_internal/utils/setuptools_build.py
index 7d91f6f26..4b8e4b359 100644
--- a/src/pip/_internal/utils/setuptools_build.py
+++ b/src/pip/_internal/utils/setuptools_build.py
@@ -8,9 +8,11 @@ from typing import List, Optional, Sequence
 # invoking via the shim.  This avoids e.g. the following manifest_maker
 # warning: "warning: manifest_maker: standard file '-c' not found".
 _SETUPTOOLS_SHIM = (
-    "import sys, setuptools, tokenize; sys.argv[0] = {0!r}; __file__={0!r};"
-    "f=getattr(tokenize, 'open', open)(__file__);"
-    "code=f.read().replace('\\r\\n', '\\n');"
+    "import io, os, sys, setuptools, tokenize; sys.argv[0] = {0!r}; __file__={0!r};"
+    "f = getattr(tokenize, 'open', open)(__file__) "
+    "if os.path.exists(__file__) "
+    "else io.StringIO('from setuptools import setup; setup()');"
+    "code = f.read().replace('\\r\\n', '\\n');"
     "f.close();"
     "exec(compile(code, __file__, 'exec'))"
 )
@@ -20,7 +22,7 @@ def make_setuptools_shim_args(
     setup_py_path,  # type: str
     global_options=None,  # type: Sequence[str]
     no_user_config=False,  # type: bool
-    unbuffered_output=False  # type: bool
+    unbuffered_output=False,  # type: bool
 ):
     # type: (...) -> List[str]
     """
@@ -55,9 +57,7 @@ def make_setuptools_bdist_wheel_args(
     # relies on site.py to find parts of the standard library outside the
     # virtualenv.
     args = make_setuptools_shim_args(
-        setup_py_path,
-        global_options=global_options,
-        unbuffered_output=True
+        setup_py_path, global_options=global_options, unbuffered_output=True
     )
     args += ["bdist_wheel", "-d", destination_dir]
     args += build_options
@@ -70,9 +70,7 @@ def make_setuptools_clean_args(
 ):
     # type: (...) -> List[str]
     args = make_setuptools_shim_args(
-        setup_py_path,
-        global_options=global_options,
-        unbuffered_output=True
+        setup_py_path, global_options=global_options, unbuffered_output=True
     )
     args += ["clean", "--all"]
     return args
@@ -103,7 +101,7 @@ def make_setuptools_develop_args(
     if prefix:
         args += ["--prefix", prefix]
     if home is not None:
-        args += ["--home", home]
+        args += ["--install-dir", home]
 
     if use_user_site:
         args += ["--user", "--prefix="]
@@ -117,9 +115,7 @@ def make_setuptools_egg_info_args(
     no_user_config,  # type: bool
 ):
     # type: (...) -> List[str]
-    args = make_setuptools_shim_args(
-        setup_py_path, no_user_config=no_user_config
-    )
+    args = make_setuptools_shim_args(setup_py_path, no_user_config=no_user_config)
 
     args += ["egg_info"]
 
@@ -140,7 +136,7 @@ def make_setuptools_install_args(
     home,  # type: Optional[str]
     use_user_site,  # type: bool
     no_user_config,  # type: bool
-    pycompile  # type: bool
+    pycompile,  # type: bool
 ):
     # type: (...) -> List[str]
     assert not (use_user_site and prefix)
@@ -150,7 +146,7 @@ def make_setuptools_install_args(
         setup_py_path,
         global_options=global_options,
         no_user_config=no_user_config,
-        unbuffered_output=True
+        unbuffered_output=True,
     )
     args += ["install", "--record", record_filename]
     args += ["--single-version-externally-managed"]
diff --git a/src/pip/_internal/utils/subprocess.py b/src/pip/_internal/utils/subprocess.py
index cec7896bf..2c8cf2123 100644
--- a/src/pip/_internal/utils/subprocess.py
+++ b/src/pip/_internal/utils/subprocess.py
@@ -6,14 +6,13 @@ from typing import Any, Callable, Iterable, List, Mapping, Optional, Union
 
 from pip._internal.cli.spinners import SpinnerInterface, open_spinner
 from pip._internal.exceptions import InstallationSubprocessError
-from pip._internal.utils.compat import console_to_str, str_to_display
 from pip._internal.utils.logging import subprocess_logger
 from pip._internal.utils.misc import HiddenText
 
 CommandArgs = List[Union[str, HiddenText]]
 
 
-LOG_DIVIDER = '----------------------------------------'
+LOG_DIVIDER = "----------------------------------------"
 
 
 def make_command(*args):
@@ -44,9 +43,9 @@ def format_command_args(args):
     # this can trigger a UnicodeDecodeError in Python 2 if the argument
     # has type unicode and includes a non-ascii character.  (The type
     # checker doesn't ensure the annotations are correct in all cases.)
-    return ' '.join(
-        shlex.quote(str(arg)) if isinstance(arg, HiddenText)
-        else shlex.quote(arg) for arg in args
+    return " ".join(
+        shlex.quote(str(arg)) if isinstance(arg, HiddenText) else shlex.quote(arg)
+        for arg in args
     )
 
 
@@ -55,15 +54,13 @@ def reveal_command_args(args):
     """
     Return the arguments in their raw, unredacted form.
     """
-    return [
-        arg.secret if isinstance(arg, HiddenText) else arg for arg in args
-    ]
+    return [arg.secret if isinstance(arg, HiddenText) else arg for arg in args]
 
 
 def make_subprocess_output_error(
-    cmd_args,     # type: Union[List[str], CommandArgs]
-    cwd,          # type: Optional[str]
-    lines,        # type: List[str]
+    cmd_args,  # type: Union[List[str], CommandArgs]
+    cwd,  # type: Optional[str]
+    lines,  # type: List[str]
     exit_status,  # type: int
 ):
     # type: (...) -> str
@@ -74,25 +71,20 @@ def make_subprocess_output_error(
     :param lines: A list of lines, each ending with a newline.
     """
     command = format_command_args(cmd_args)
-    # Convert `command` and `cwd` to text (unicode in Python 2) so we can use
-    # them as arguments in the unicode format string below. This avoids
-    # "UnicodeDecodeError: 'ascii' codec can't decode byte ..." in Python 2
-    # if either contains a non-ascii character.
-    command_display = str_to_display(command, desc='command bytes')
 
     # We know the joined output value ends in a newline.
-    output = ''.join(lines)
+    output = "".join(lines)
     msg = (
         # Use a unicode string to avoid "UnicodeEncodeError: 'ascii'
         # codec can't encode character ..." in Python 2 when a format
         # argument (e.g. `output`) has a non-ascii character.
-        'Command errored out with exit status {exit_status}:\n'
-        ' command: {command_display}\n'
-        '     cwd: {cwd_display}\n'
-        'Complete output ({line_count} lines):\n{output}{divider}'
+        "Command errored out with exit status {exit_status}:\n"
+        " command: {command_display}\n"
+        "     cwd: {cwd_display}\n"
+        "Complete output ({line_count} lines):\n{output}{divider}"
     ).format(
         exit_status=exit_status,
-        command_display=command_display,
+        command_display=command,
         cwd_display=cwd,
         line_count=len(lines),
         output=output,
@@ -105,7 +97,7 @@ def call_subprocess(
     cmd,  # type: Union[List[str], CommandArgs]
     show_stdout=False,  # type: bool
     cwd=None,  # type: Optional[str]
-    on_returncode='raise',  # type: str
+    on_returncode="raise",  # type: str
     extra_ok_returncodes=None,  # type: Optional[Iterable[int]]
     command_desc=None,  # type: Optional[str]
     extra_environ=None,  # type: Optional[Mapping[str, Any]]
@@ -182,11 +174,14 @@ def call_subprocess(
             stderr=subprocess.STDOUT if not stdout_only else subprocess.PIPE,
             cwd=cwd,
             env=env,
+            errors="backslashreplace",
         )
     except Exception as exc:
         if log_failed_cmd:
             subprocess_logger.critical(
-                "Error %s while executing command %s", exc, command_desc,
+                "Error %s while executing command %s",
+                exc,
+                command_desc,
             )
         raise
     all_output = []
@@ -196,12 +191,11 @@ def call_subprocess(
         proc.stdin.close()
         # In this mode, stdout and stderr are in the same pipe.
         while True:
-            # The "line" value is a unicode string in Python 2.
-            line = console_to_str(proc.stdout.readline())
+            line = proc.stdout.readline()  # type: str
             if not line:
                 break
             line = line.rstrip()
-            all_output.append(line + '\n')
+            all_output.append(line + "\n")
 
             # Show the line immediately.
             log_subprocess(line)
@@ -214,25 +208,21 @@ def call_subprocess(
         finally:
             if proc.stdout:
                 proc.stdout.close()
-        output = ''.join(all_output)
+        output = "".join(all_output)
     else:
         # In this mode, stdout and stderr are in different pipes.
         # We must use communicate() which is the only safe way to read both.
-        out_bytes, err_bytes = proc.communicate()
+        out, err = proc.communicate()
         # log line by line to preserve pip log indenting
-        out = console_to_str(out_bytes)
         for out_line in out.splitlines():
             log_subprocess(out_line)
         all_output.append(out)
-        err = console_to_str(err_bytes)
         for err_line in err.splitlines():
             log_subprocess(err_line)
         all_output.append(err)
         output = out
 
-    proc_had_error = (
-        proc.returncode and proc.returncode not in extra_ok_returncodes
-    )
+    proc_had_error = proc.returncode and proc.returncode not in extra_ok_returncodes
     if use_spinner:
         assert spinner
         if proc_had_error:
@@ -240,7 +230,7 @@ def call_subprocess(
         else:
             spinner.finish("done")
     if proc_had_error:
-        if on_returncode == 'raise':
+        if on_returncode == "raise":
             if not showing_subprocess and log_failed_cmd:
                 # Then the subprocess streams haven't been logged to the
                 # console yet.
@@ -252,18 +242,17 @@ def call_subprocess(
                 )
                 subprocess_logger.error(msg)
             raise InstallationSubprocessError(proc.returncode, command_desc)
-        elif on_returncode == 'warn':
+        elif on_returncode == "warn":
             subprocess_logger.warning(
                 'Command "%s" had error code %s in %s',
                 command_desc,
                 proc.returncode,
                 cwd,
             )
-        elif on_returncode == 'ignore':
+        elif on_returncode == "ignore":
             pass
         else:
-            raise ValueError('Invalid value: on_returncode={!r}'.format(
-                             on_returncode))
+            raise ValueError(f"Invalid value: on_returncode={on_returncode!r}")
     return output
 
 
@@ -278,7 +267,7 @@ def runner_with_spinner_message(message):
     def runner(
         cmd,  # type: List[str]
         cwd=None,  # type: Optional[str]
-        extra_environ=None  # type: Optional[Mapping[str, Any]]
+        extra_environ=None,  # type: Optional[Mapping[str, Any]]
     ):
         # type: (...) -> None
         with open_spinner(message) as spinner:
diff --git a/src/pip/_internal/utils/temp_dir.py b/src/pip/_internal/utils/temp_dir.py
index 8c4aaba3a..477cbe6b1 100644
--- a/src/pip/_internal/utils/temp_dir.py
+++ b/src/pip/_internal/utils/temp_dir.py
@@ -10,7 +10,7 @@ from pip._internal.utils.misc import enum, rmtree
 
 logger = logging.getLogger(__name__)
 
-_T = TypeVar('_T', bound='TempDirectory')
+_T = TypeVar("_T", bound="TempDirectory")
 
 
 # Kinds of temporary directories. Only needed for ones that are
@@ -38,8 +38,7 @@ def global_tempdir_manager():
 
 
 class TempDirectoryTypeRegistry:
-    """Manages temp directory behavior
-    """
+    """Manages temp directory behavior"""
 
     def __init__(self):
         # type: () -> None
@@ -108,7 +107,7 @@ class TempDirectory:
 
     def __init__(
         self,
-        path=None,    # type: Optional[str]
+        path=None,  # type: Optional[str]
         delete=_default,  # type: Union[bool, None, _Default]
         kind="temp",  # type: str
         globally_managed=False,  # type: bool
@@ -142,9 +141,7 @@ class TempDirectory:
     @property
     def path(self):
         # type: () -> str
-        assert not self._deleted, (
-            f"Attempted to access deleted path: {self._path}"
-        )
+        assert not self._deleted, f"Attempted to access deleted path: {self._path}"
         return self._path
 
     def __repr__(self):
@@ -169,22 +166,18 @@ class TempDirectory:
 
     def _create(self, kind):
         # type: (str) -> str
-        """Create a temporary directory and store its path in self.path
-        """
+        """Create a temporary directory and store its path in self.path"""
         # We realpath here because some systems have their default tmpdir
         # symlinked to another directory.  This tends to confuse build
         # scripts, so we canonicalize the path by traversing potential
         # symlinks here.
-        path = os.path.realpath(
-            tempfile.mkdtemp(prefix=f"pip-{kind}-")
-        )
+        path = os.path.realpath(tempfile.mkdtemp(prefix=f"pip-{kind}-"))
         logger.debug("Created temporary directory: %s", path)
         return path
 
     def cleanup(self):
         # type: () -> None
-        """Remove the temporary directory created and reset state
-        """
+        """Remove the temporary directory created and reset state"""
         self._deleted = True
         if not os.path.exists(self._path):
             return
@@ -205,6 +198,7 @@ class AdjacentTempDirectory(TempDirectory):
             (when used as a contextmanager)
 
     """
+
     # The characters that may be used to name the temp directory
     # We always prepend a ~ and then rotate through these until
     # a usable name is found.
@@ -214,7 +208,7 @@ class AdjacentTempDirectory(TempDirectory):
 
     def __init__(self, original, delete=None):
         # type: (str, Optional[bool]) -> None
-        self.original = original.rstrip('/\\')
+        self.original = original.rstrip("/\\")
         super().__init__(delete=delete)
 
     @classmethod
@@ -229,16 +223,18 @@ class AdjacentTempDirectory(TempDirectory):
         """
         for i in range(1, len(name)):
             for candidate in itertools.combinations_with_replacement(
-                    cls.LEADING_CHARS, i - 1):
-                new_name = '~' + ''.join(candidate) + name[i:]
+                cls.LEADING_CHARS, i - 1
+            ):
+                new_name = "~" + "".join(candidate) + name[i:]
                 if new_name != name:
                     yield new_name
 
         # If we make it this far, we will have to make a longer name
         for i in range(len(cls.LEADING_CHARS)):
             for candidate in itertools.combinations_with_replacement(
-                    cls.LEADING_CHARS, i):
-                new_name = '~' + ''.join(candidate) + name
+                cls.LEADING_CHARS, i
+            ):
+                new_name = "~" + "".join(candidate) + name
                 if new_name != name:
                     yield new_name
 
@@ -258,9 +254,7 @@ class AdjacentTempDirectory(TempDirectory):
                 break
         else:
             # Final fallback on the default behavior.
-            path = os.path.realpath(
-                tempfile.mkdtemp(prefix=f"pip-{kind}-")
-            )
+            path = os.path.realpath(tempfile.mkdtemp(prefix=f"pip-{kind}-"))
 
         logger.debug("Created temporary directory: %s", path)
         return path
diff --git a/src/pip/_internal/utils/unpacking.py b/src/pip/_internal/utils/unpacking.py
index 74d3f4a9c..44ac47535 100644
--- a/src/pip/_internal/utils/unpacking.py
+++ b/src/pip/_internal/utils/unpacking.py
@@ -26,16 +26,18 @@ SUPPORTED_EXTENSIONS = ZIP_EXTENSIONS + TAR_EXTENSIONS
 
 try:
     import bz2  # noqa
+
     SUPPORTED_EXTENSIONS += BZ2_EXTENSIONS
 except ImportError:
-    logger.debug('bz2 module is not available')
+    logger.debug("bz2 module is not available")
 
 try:
     # Only for Python 3.3+
     import lzma  # noqa
+
     SUPPORTED_EXTENSIONS += XZ_EXTENSIONS
 except ImportError:
-    logger.debug('lzma module is not available')
+    logger.debug("lzma module is not available")
 
 
 def current_umask():
@@ -48,18 +50,15 @@ def current_umask():
 
 def split_leading_dir(path):
     # type: (str) -> List[str]
-    path = path.lstrip('/').lstrip('\\')
-    if (
-        '/' in path and (
-            ('\\' in path and path.find('/') < path.find('\\')) or
-            '\\' not in path
-        )
+    path = path.lstrip("/").lstrip("\\")
+    if "/" in path and (
+        ("\\" in path and path.find("/") < path.find("\\")) or "\\" not in path
     ):
-        return path.split('/', 1)
-    elif '\\' in path:
-        return path.split('\\', 1)
+        return path.split("/", 1)
+    elif "\\" in path:
+        return path.split("\\", 1)
     else:
-        return [path, '']
+        return [path, ""]
 
 
 def has_leading_dir(paths):
@@ -118,7 +117,7 @@ def unzip_file(filename, location, flatten=True):
     no-ops per the python docs.
     """
     ensure_dir(location)
-    zipfp = open(filename, 'rb')
+    zipfp = open(filename, "rb")
     try:
         zip = zipfile.ZipFile(zipfp, allowZip64=True)
         leading = has_leading_dir(zip.namelist()) and flatten
@@ -131,11 +130,11 @@ def unzip_file(filename, location, flatten=True):
             dir = os.path.dirname(fn)
             if not is_within_directory(location, fn):
                 message = (
-                    'The zip file ({}) has a file ({}) trying to install '
-                    'outside target directory ({})'
+                    "The zip file ({}) has a file ({}) trying to install "
+                    "outside target directory ({})"
                 )
                 raise InstallationError(message.format(filename, fn, location))
-            if fn.endswith('/') or fn.endswith('\\'):
+            if fn.endswith("/") or fn.endswith("\\"):
                 # A directory
                 ensure_dir(fn)
             else:
@@ -144,7 +143,7 @@ def unzip_file(filename, location, flatten=True):
                 # chunk of memory for the file's content
                 fp = zip.open(name)
                 try:
-                    with open(fn, 'wb') as destfp:
+                    with open(fn, "wb") as destfp:
                         shutil.copyfileobj(fp, destfp)
                 finally:
                     fp.close()
@@ -165,24 +164,23 @@ def untar_file(filename, location):
     no-ops per the python docs.
     """
     ensure_dir(location)
-    if filename.lower().endswith('.gz') or filename.lower().endswith('.tgz'):
-        mode = 'r:gz'
+    if filename.lower().endswith(".gz") or filename.lower().endswith(".tgz"):
+        mode = "r:gz"
     elif filename.lower().endswith(BZ2_EXTENSIONS):
-        mode = 'r:bz2'
+        mode = "r:bz2"
     elif filename.lower().endswith(XZ_EXTENSIONS):
-        mode = 'r:xz'
-    elif filename.lower().endswith('.tar'):
-        mode = 'r'
+        mode = "r:xz"
+    elif filename.lower().endswith(".tar"):
+        mode = "r"
     else:
         logger.warning(
-            'Cannot determine compression type for file %s', filename,
+            "Cannot determine compression type for file %s",
+            filename,
         )
-        mode = 'r:*'
+        mode = "r:*"
     tar = tarfile.open(filename, mode)
     try:
-        leading = has_leading_dir([
-            member.name for member in tar.getmembers()
-        ])
+        leading = has_leading_dir([member.name for member in tar.getmembers()])
         for member in tar.getmembers():
             fn = member.name
             if leading:
@@ -190,12 +188,10 @@ def untar_file(filename, location):
             path = os.path.join(location, fn)
             if not is_within_directory(location, path):
                 message = (
-                    'The tar file ({}) has a file ({}) trying to install '
-                    'outside target directory ({})'
-                )
-                raise InstallationError(
-                    message.format(filename, path, location)
+                    "The tar file ({}) has a file ({}) trying to install "
+                    "outside target directory ({})"
                 )
+                raise InstallationError(message.format(filename, path, location))
             if member.isdir():
                 ensure_dir(path)
             elif member.issym():
@@ -206,8 +202,10 @@ def untar_file(filename, location):
                     # Some corrupt tar files seem to produce this
                     # (specifically bad symlinks)
                     logger.warning(
-                        'In the tar file %s the member %s is invalid: %s',
-                        filename, member.name, exc,
+                        "In the tar file %s the member %s is invalid: %s",
+                        filename,
+                        member.name,
+                        exc,
                     )
                     continue
             else:
@@ -217,13 +215,15 @@ def untar_file(filename, location):
                     # Some corrupt tar files seem to produce this
                     # (specifically bad symlinks)
                     logger.warning(
-                        'In the tar file %s the member %s is invalid: %s',
-                        filename, member.name, exc,
+                        "In the tar file %s the member %s is invalid: %s",
+                        filename,
+                        member.name,
+                        exc,
                     )
                     continue
                 ensure_dir(os.path.dirname(path))
                 assert fp is not None
-                with open(path, 'wb') as destfp:
+                with open(path, "wb") as destfp:
                     shutil.copyfileobj(fp, destfp)
                 fp.close()
                 # Update the timestamp (useful for cython compiled files)
@@ -236,38 +236,32 @@ def untar_file(filename, location):
 
 
 def unpack_file(
-        filename,  # type: str
-        location,  # type: str
-        content_type=None,  # type: Optional[str]
+    filename,  # type: str
+    location,  # type: str
+    content_type=None,  # type: Optional[str]
 ):
     # type: (...) -> None
     filename = os.path.realpath(filename)
     if (
-        content_type == 'application/zip' or
-        filename.lower().endswith(ZIP_EXTENSIONS) or
-        zipfile.is_zipfile(filename)
+        content_type == "application/zip"
+        or filename.lower().endswith(ZIP_EXTENSIONS)
+        or zipfile.is_zipfile(filename)
     ):
-        unzip_file(
-            filename,
-            location,
-            flatten=not filename.endswith('.whl')
-        )
+        unzip_file(filename, location, flatten=not filename.endswith(".whl"))
     elif (
-        content_type == 'application/x-gzip' or
-        tarfile.is_tarfile(filename) or
-        filename.lower().endswith(
-            TAR_EXTENSIONS + BZ2_EXTENSIONS + XZ_EXTENSIONS
-        )
+        content_type == "application/x-gzip"
+        or tarfile.is_tarfile(filename)
+        or filename.lower().endswith(TAR_EXTENSIONS + BZ2_EXTENSIONS + XZ_EXTENSIONS)
     ):
         untar_file(filename, location)
     else:
         # FIXME: handle?
         # FIXME: magic signatures?
         logger.critical(
-            'Cannot unpack file %s (downloaded from %s, content-type: %s); '
-            'cannot detect archive format',
-            filename, location, content_type,
-        )
-        raise InstallationError(
-            f'Cannot determine archive format of {location}'
+            "Cannot unpack file %s (downloaded from %s, content-type: %s); "
+            "cannot detect archive format",
+            filename,
+            location,
+            content_type,
         )
+        raise InstallationError(f"Cannot determine archive format of {location}")
diff --git a/src/pip/_internal/utils/urls.py b/src/pip/_internal/utils/urls.py
index 8ae11ce7d..50a04d861 100644
--- a/src/pip/_internal/utils/urls.py
+++ b/src/pip/_internal/utils/urls.py
@@ -7,9 +7,9 @@ from typing import Optional
 
 def get_url_scheme(url):
     # type: (str) -> Optional[str]
-    if ':' not in url:
+    if ":" not in url:
         return None
-    return url.split(':', 1)[0].lower()
+    return url.split(":", 1)[0].lower()
 
 
 def path_to_url(path):
@@ -19,7 +19,7 @@ def path_to_url(path):
     quoted path parts.
     """
     path = os.path.normpath(os.path.abspath(path))
-    url = urllib.parse.urljoin('file:', urllib.request.pathname2url(path))
+    url = urllib.parse.urljoin("file:", urllib.request.pathname2url(path))
     return url
 
 
@@ -28,22 +28,21 @@ def url_to_path(url):
     """
     Convert a file: URL to a path.
     """
-    assert url.startswith('file:'), (
-        "You can only turn file: urls into filenames (not {url!r})"
-        .format(**locals()))
+    assert url.startswith(
+        "file:"
+    ), f"You can only turn file: urls into filenames (not {url!r})"
 
     _, netloc, path, _, _ = urllib.parse.urlsplit(url)
 
-    if not netloc or netloc == 'localhost':
+    if not netloc or netloc == "localhost":
         # According to RFC 8089, same as empty authority.
-        netloc = ''
-    elif sys.platform == 'win32':
+        netloc = ""
+    elif sys.platform == "win32":
         # If we have a UNC path, prepend UNC share notation.
-        netloc = '\\\\' + netloc
+        netloc = "\\\\" + netloc
     else:
         raise ValueError(
-            'non-local file URIs are not supported on this platform: {url!r}'
-            .format(**locals())
+            f"non-local file URIs are not supported on this platform: {url!r}"
         )
 
     path = urllib.request.url2pathname(netloc + path)
diff --git a/src/pip/_internal/utils/virtualenv.py b/src/pip/_internal/utils/virtualenv.py
index c9c601f86..51cacb55c 100644
--- a/src/pip/_internal/utils/virtualenv.py
+++ b/src/pip/_internal/utils/virtualenv.py
@@ -27,13 +27,12 @@ def _running_under_regular_virtualenv():
     This handles virtual environments created with pypa's virtualenv.
     """
     # pypa/virtualenv case
-    return hasattr(sys, 'real_prefix')
+    return hasattr(sys, "real_prefix")
 
 
 def running_under_virtualenv():
     # type: () -> bool
-    """Return True if we're running inside a virtualenv, False otherwise.
-    """
+    """Return True if we're running inside a virtualenv, False otherwise."""
     return _running_under_venv() or _running_under_regular_virtualenv()
 
 
@@ -43,11 +42,11 @@ def _get_pyvenv_cfg_lines():
 
     Returns None, if it could not read/access the file.
     """
-    pyvenv_cfg_file = os.path.join(sys.prefix, 'pyvenv.cfg')
+    pyvenv_cfg_file = os.path.join(sys.prefix, "pyvenv.cfg")
     try:
         # Although PEP 405 does not specify, the built-in venv module always
         # writes with UTF-8. (pypa/pip#8717)
-        with open(pyvenv_cfg_file, encoding='utf-8') as f:
+        with open(pyvenv_cfg_file, encoding="utf-8") as f:
             return f.read().splitlines()  # avoids trailing newlines
     except OSError:
         return None
@@ -78,7 +77,7 @@ def _no_global_under_venv():
 
     for line in cfg_lines:
         match = _INCLUDE_SYSTEM_SITE_PACKAGES_REGEX.match(line)
-        if match is not None and match.group('value') == 'false':
+        if match is not None and match.group("value") == "false":
             return True
     return False
 
@@ -92,15 +91,15 @@ def _no_global_under_regular_virtualenv():
     """
     site_mod_dir = os.path.dirname(os.path.abspath(site.__file__))
     no_global_site_packages_file = os.path.join(
-        site_mod_dir, 'no-global-site-packages.txt',
+        site_mod_dir,
+        "no-global-site-packages.txt",
     )
     return os.path.exists(no_global_site_packages_file)
 
 
 def virtualenv_no_global():
     # type: () -> bool
-    """Returns a boolean, whether running in venv with no system site-packages.
-    """
+    """Returns a boolean, whether running in venv with no system site-packages."""
     # PEP 405 compliance needs to be checked first since virtualenv >=20 would
     # return True for both checks, but is only able to use the PEP 405 config.
     if _running_under_venv():
diff --git a/src/pip/_internal/utils/wheel.py b/src/pip/_internal/utils/wheel.py
index de0485812..42f080845 100644
--- a/src/pip/_internal/utils/wheel.py
+++ b/src/pip/_internal/utils/wheel.py
@@ -23,6 +23,7 @@ class WheelMetadata(DictMetadata):
     """Metadata provider that maps metadata decoding exceptions to our
     internal exception type.
     """
+
     def __init__(self, metadata, wheel_name):
         # type: (Dict[str, bytes], str) -> None
         super().__init__(metadata)
@@ -35,9 +36,7 @@ class WheelMetadata(DictMetadata):
         except UnicodeDecodeError as e:
             # Augment the default error with the origin of the file.
             raise UnsupportedWheel(
-                "Error decoding metadata for {}: {}".format(
-                    self._wheel_name, e
-                )
+                f"Error decoding metadata for {self._wheel_name}: {e}"
             )
 
 
@@ -49,9 +48,7 @@ def pkg_resources_distribution_for_wheel(wheel_zip, name, location):
     """
     info_dir, _ = parse_wheel(wheel_zip, name)
 
-    metadata_files = [
-        p for p in wheel_zip.namelist() if p.startswith(f"{info_dir}/")
-    ]
+    metadata_files = [p for p in wheel_zip.namelist() if p.startswith(f"{info_dir}/")]
 
     metadata_text = {}  # type: Dict[str, bytes]
     for path in metadata_files:
@@ -60,15 +57,11 @@ def pkg_resources_distribution_for_wheel(wheel_zip, name, location):
         try:
             metadata_text[metadata_name] = read_wheel_metadata_file(wheel_zip, path)
         except UnsupportedWheel as e:
-            raise UnsupportedWheel(
-                "{} has an invalid wheel, {}".format(name, str(e))
-            )
+            raise UnsupportedWheel("{} has an invalid wheel, {}".format(name, str(e)))
 
     metadata = WheelMetadata(metadata_text, location)
 
-    return DistInfoDistribution(
-        location=location, metadata=metadata, project_name=name
-    )
+    return DistInfoDistribution(location=location, metadata=metadata, project_name=name)
 
 
 def parse_wheel(wheel_zip, name):
@@ -83,9 +76,7 @@ def parse_wheel(wheel_zip, name):
         metadata = wheel_metadata(wheel_zip, info_dir)
         version = wheel_version(metadata)
     except UnsupportedWheel as e:
-        raise UnsupportedWheel(
-            "{} has an invalid wheel, {}".format(name, str(e))
-        )
+        raise UnsupportedWheel("{} has an invalid wheel, {}".format(name, str(e)))
 
     check_compatibility(version, name)
 
@@ -102,16 +93,14 @@ def wheel_dist_info_dir(source, name):
     # Zip file path separators must be /
     subdirs = {p.split("/", 1)[0] for p in source.namelist()}
 
-    info_dirs = [s for s in subdirs if s.endswith('.dist-info')]
+    info_dirs = [s for s in subdirs if s.endswith(".dist-info")]
 
     if not info_dirs:
         raise UnsupportedWheel(".dist-info directory not found")
 
     if len(info_dirs) > 1:
         raise UnsupportedWheel(
-            "multiple .dist-info directories found: {}".format(
-                ", ".join(info_dirs)
-            )
+            "multiple .dist-info directories found: {}".format(", ".join(info_dirs))
         )
 
     info_dir = info_dirs[0]
@@ -135,9 +124,7 @@ def read_wheel_metadata_file(source, path):
         # BadZipFile for general corruption, KeyError for missing entry,
         # and RuntimeError for password-protected files
     except (BadZipFile, KeyError, RuntimeError) as e:
-        raise UnsupportedWheel(
-            f"could not read {path!r} file: {e!r}"
-        )
+        raise UnsupportedWheel(f"could not read {path!r} file: {e!r}")
 
 
 def wheel_metadata(source, dist_info_dir):
@@ -172,7 +159,7 @@ def wheel_version(wheel_data):
     version = version_text.strip()
 
     try:
-        return tuple(map(int, version.split('.')))
+        return tuple(map(int, version.split(".")))
     except ValueError:
         raise UnsupportedWheel(f"invalid Wheel-Version: {version!r}")
 
@@ -193,10 +180,10 @@ def check_compatibility(version, name):
     if version[0] > VERSION_COMPATIBLE[0]:
         raise UnsupportedWheel(
             "{}'s Wheel-Version ({}) is not compatible with this version "
-            "of pip".format(name, '.'.join(map(str, version)))
+            "of pip".format(name, ".".join(map(str, version)))
         )
     elif version > VERSION_COMPATIBLE:
         logger.warning(
-            'Installing from a newer Wheel-Version (%s)',
-            '.'.join(map(str, version)),
+            "Installing from a newer Wheel-Version (%s)",
+            ".".join(map(str, version)),
         )
diff --git a/src/pip/_internal/vcs/subversion.py b/src/pip/_internal/vcs/subversion.py
index 0819f1bc6..f58264446 100644
--- a/src/pip/_internal/vcs/subversion.py
+++ b/src/pip/_internal/vcs/subversion.py
@@ -159,8 +159,7 @@ class Subversion(VersionControl):
         elif data.startswith(' None
-    canonical_name = canonicalize_name(req.name)
+    canonical_name = canonicalize_name(req.name or "")
     w = Wheel(os.path.basename(wheel_path))
     if canonicalize_name(w.name) != canonical_name:
         raise InvalidWheelFilename(
@@ -175,10 +175,11 @@ def _verify_one(req, wheel_path):
             "got {!r}".format(canonical_name, w.name),
         )
     dist = get_wheel_distribution(wheel_path, canonical_name)
-    if canonicalize_version(dist.version) != canonicalize_version(w.version):
+    dist_verstr = str(dist.version)
+    if canonicalize_version(dist_verstr) != canonicalize_version(w.version):
         raise InvalidWheelFilename(
             "Wheel has unexpected file name: expected {!r}, "
-            "got {!r}".format(str(dist.version), w.version),
+            "got {!r}".format(dist_verstr, w.version),
         )
     metadata_version_value = dist.metadata_version
     if metadata_version_value is None:
@@ -186,13 +187,13 @@ def _verify_one(req, wheel_path):
     try:
         metadata_version = Version(metadata_version_value)
     except InvalidVersion:
-        msg = "Invalid Metadata-Version: {}".format(metadata_version_value)
+        msg = f"Invalid Metadata-Version: {metadata_version_value}"
         raise UnsupportedWheel(msg)
     if (metadata_version >= Version("1.2")
             and not isinstance(dist.version, Version)):
         raise UnsupportedWheel(
             "Metadata 1.2 mandates PEP 440 version, "
-            "but {!r} is not".format(str(dist.version))
+            "but {!r} is not".format(dist_verstr)
         )
 
 
@@ -242,6 +243,7 @@ def _build_one_inside_env(
         assert req.name
         if req.use_pep517:
             assert req.metadata_directory
+            assert req.pep517_backend
             wheel_path = build_wheel_pep517(
                 name=req.name,
                 backend=req.pep517_backend,
diff --git a/src/pip/_vendor/resolvelib.pyi b/src/pip/_vendor/resolvelib.pyi
deleted file mode 100644
index b4ef4e108..000000000
--- a/src/pip/_vendor/resolvelib.pyi
+++ /dev/null
@@ -1 +0,0 @@
-from resolvelib import *
\ No newline at end of file
diff --git a/src/pip/_vendor/resolvelib/__init__.py b/src/pip/_vendor/resolvelib/__init__.py
index f023ad631..63ee53446 100644
--- a/src/pip/_vendor/resolvelib/__init__.py
+++ b/src/pip/_vendor/resolvelib/__init__.py
@@ -11,7 +11,7 @@ __all__ = [
     "ResolutionTooDeep",
 ]
 
-__version__ = "0.5.4"
+__version__ = "0.5.5"
 
 
 from .providers import AbstractProvider, AbstractResolver
diff --git a/src/pip/_vendor/resolvelib/__init__.pyi b/src/pip/_vendor/resolvelib/__init__.pyi
new file mode 100644
index 000000000..4a84f8f30
--- /dev/null
+++ b/src/pip/_vendor/resolvelib/__init__.pyi
@@ -0,0 +1,15 @@
+__version__: str
+
+from .providers import (
+    AbstractResolver as AbstractResolver,
+    AbstractProvider as AbstractProvider,
+)
+from .reporters import BaseReporter as BaseReporter
+from .resolvers import (
+    InconsistentCandidate as InconsistentCandidate,
+    RequirementsConflicted as RequirementsConflicted,
+    Resolver as Resolver,
+    ResolutionError as ResolutionError,
+    ResolutionImpossible as ResolutionImpossible,
+    ResolutionTooDeep as ResolutionTooDeep,
+)
diff --git a/src/pip/_vendor/resolvelib/providers.py b/src/pip/_vendor/resolvelib/providers.py
index 965cf9c13..8ef700cc0 100644
--- a/src/pip/_vendor/resolvelib/providers.py
+++ b/src/pip/_vendor/resolvelib/providers.py
@@ -2,12 +2,10 @@ class AbstractProvider(object):
     """Delegate class to provide requirement interface for the resolver."""
 
     def identify(self, requirement_or_candidate):
-        """Given a requirement or candidate, return an identifier for it.
+        """Given a requirement, return an identifier for it.
 
-        This is used in many places to identify a requirement or candidate,
-        e.g. whether two requirements should have their specifier parts merged,
-        whether two candidates would conflict with each other (because they
-        have same name but different versions).
+        This is used to identify a requirement, e.g. whether two requirements
+        should have their specifier parts merged.
         """
         raise NotImplementedError
 
diff --git a/src/pip/_vendor/resolvelib/providers.pyi b/src/pip/_vendor/resolvelib/providers.pyi
new file mode 100644
index 000000000..3c8ff24d4
--- /dev/null
+++ b/src/pip/_vendor/resolvelib/providers.pyi
@@ -0,0 +1,44 @@
+from typing import (
+    Any,
+    Collection,
+    Generic,
+    Iterable,
+    Mapping,
+    Optional,
+    Protocol,
+    Sequence,
+    Union,
+)
+
+from .reporters import BaseReporter
+from .resolvers import RequirementInformation
+from .structs import (
+    KT,
+    RT,
+    CT,
+    IterableView,
+    Matches,
+)
+
+class Preference(Protocol):
+    def __lt__(self, __other: Any) -> bool: ...
+
+class AbstractProvider(Generic[RT, CT, KT]):
+    def identify(self, requirement_or_candidate: Union[RT, CT]) -> KT: ...
+    def get_preference(
+        self,
+        resolution: Optional[CT],
+        candidates: IterableView[CT],
+        information: Collection[RequirementInformation[RT, CT]],
+    ) -> Preference: ...
+    def find_matches(self, requirements: Sequence[RT]) -> Matches: ...
+    def is_satisfied_by(self, requirement: RT, candidate: CT) -> bool: ...
+    def get_dependencies(self, candidate: CT) -> Iterable[RT]: ...
+
+class AbstractResolver(Generic[RT, CT, KT]):
+    base_exception = Exception
+    provider: AbstractProvider[RT, CT, KT]
+    reporter: BaseReporter
+    def __init__(
+        self, provider: AbstractProvider[RT, CT, KT], reporter: BaseReporter
+    ): ...
diff --git a/src/pip/_vendor/resolvelib/py.typed b/src/pip/_vendor/resolvelib/py.typed
new file mode 100644
index 000000000..e69de29bb
diff --git a/src/pip/_vendor/resolvelib/reporters.pyi b/src/pip/_vendor/resolvelib/reporters.pyi
new file mode 100644
index 000000000..55e38ab88
--- /dev/null
+++ b/src/pip/_vendor/resolvelib/reporters.pyi
@@ -0,0 +1,10 @@
+from typing import Any
+
+class BaseReporter:
+    def starting(self) -> Any: ...
+    def starting_round(self, index: int) -> Any: ...
+    def ending_round(self, index: int, state: Any) -> Any: ...
+    def ending(self, state: Any) -> Any: ...
+    def adding_requirement(self, requirement: Any, parent: Any) -> Any: ...
+    def backtracking(self, candidate: Any) -> Any: ...
+    def pinning(self, candidate: Any) -> Any: ...
diff --git a/src/pip/_vendor/resolvelib/resolvers.py b/src/pip/_vendor/resolvelib/resolvers.py
index bb88d8c2c..60a30ee4f 100644
--- a/src/pip/_vendor/resolvelib/resolvers.py
+++ b/src/pip/_vendor/resolvelib/resolvers.py
@@ -76,7 +76,8 @@ class Criterion(object):
     @classmethod
     def from_requirement(cls, provider, requirement, parent):
         """Build an instance from a requirement."""
-        cands = build_iter_view(provider.find_matches([requirement]))
+        matches = provider.find_matches(requirements=[requirement])
+        cands = build_iter_view(matches)
         infos = [RequirementInformation(requirement, parent)]
         criterion = cls(cands, infos, incompatibilities=[])
         if not cands:
@@ -93,7 +94,8 @@ class Criterion(object):
         """Build a new instance from this and a new requirement."""
         infos = list(self.information)
         infos.append(RequirementInformation(requirement, parent))
-        cands = build_iter_view(provider.find_matches([r for r, _ in infos]))
+        matches = provider.find_matches([r for r, _ in infos])
+        cands = build_iter_view(matches)
         criterion = type(self)(cands, infos, list(self.incompatibilities))
         if not cands:
             raise RequirementsConflicted(criterion)
@@ -165,22 +167,21 @@ class Resolution(object):
         self._states.append(state)
 
     def _merge_into_criterion(self, requirement, parent):
-        self._r.adding_requirement(requirement, parent)
-        name = self._p.identify(requirement)
-        try:
+        self._r.adding_requirement(requirement=requirement, parent=parent)
+        name = self._p.identify(requirement_or_candidate=requirement)
+        if name in self.state.criteria:
             crit = self.state.criteria[name]
-        except KeyError:
-            crit = Criterion.from_requirement(self._p, requirement, parent)
-        else:
             crit = crit.merged_with(self._p, requirement, parent)
+        else:
+            crit = Criterion.from_requirement(self._p, requirement, parent)
         return name, crit
 
     def _get_criterion_item_preference(self, item):
         name, criterion = item
         return self._p.get_preference(
-            self.state.mapping.get(name),
-            criterion.candidates.for_preference(),
-            criterion.information,
+            resolution=self.state.mapping.get(name),
+            candidates=criterion.candidates.for_preference(),
+            information=criterion.information,
         )
 
     def _is_current_pin_satisfying(self, name, criterion):
@@ -189,13 +190,13 @@ class Resolution(object):
         except KeyError:
             return False
         return all(
-            self._p.is_satisfied_by(r, current_pin)
+            self._p.is_satisfied_by(requirement=r, candidate=current_pin)
             for r in criterion.iter_requirement()
         )
 
     def _get_criteria_to_update(self, candidate):
         criteria = {}
-        for r in self._p.get_dependencies(candidate):
+        for r in self._p.get_dependencies(candidate=candidate):
             name, crit = self._merge_into_criterion(r, parent=candidate)
             criteria[name] = crit
         return criteria
@@ -214,7 +215,7 @@ class Resolution(object):
             # faulty provider, we will raise an error to notify the implementer
             # to fix find_matches() and/or is_satisfied_by().
             satisfied = all(
-                self._p.is_satisfied_by(r, candidate)
+                self._p.is_satisfied_by(requirement=r, candidate=candidate)
                 for r in criterion.iter_requirement()
             )
             if not satisfied:
@@ -222,7 +223,7 @@ class Resolution(object):
 
             # Put newly-pinned candidate at the end. This is essential because
             # backtracking looks at this mapping to get the last pin.
-            self._r.pinning(candidate)
+            self._r.pinning(candidate=candidate)
             self.state.mapping.pop(name, None)
             self.state.mapping[name] = candidate
             self.state.criteria.update(criteria)
@@ -274,7 +275,7 @@ class Resolution(object):
             # Also mark the newly known incompatibility.
             incompatibilities_from_broken.append((name, [candidate]))
 
-            self._r.backtracking(candidate)
+            self._r.backtracking(candidate=candidate)
 
             # Create a new state from the last known-to-work one, and apply
             # the previously gathered incompatibility information.
@@ -326,7 +327,7 @@ class Resolution(object):
         self._push_new_state()
 
         for round_index in range(max_rounds):
-            self._r.starting_round(round_index)
+            self._r.starting_round(index=round_index)
 
             unsatisfied_criterion_items = [
                 item
@@ -336,7 +337,7 @@ class Resolution(object):
 
             # All criteria are accounted for. Nothing more to pin, we are done!
             if not unsatisfied_criterion_items:
-                self._r.ending(self.state)
+                self._r.ending(state=self.state)
                 return self.state
 
             # Choose the most preferred unpinned criterion to try.
@@ -359,7 +360,7 @@ class Resolution(object):
                 # Pinning was successful. Push a new state to do another pin.
                 self._push_new_state()
 
-            self._r.ending_round(round_index, self.state)
+            self._r.ending_round(index=round_index, state=self.state)
 
         raise ResolutionTooDeep(max_rounds)
 
diff --git a/src/pip/_vendor/resolvelib/resolvers.pyi b/src/pip/_vendor/resolvelib/resolvers.pyi
new file mode 100644
index 000000000..e61b0bcb4
--- /dev/null
+++ b/src/pip/_vendor/resolvelib/resolvers.pyi
@@ -0,0 +1,73 @@
+from typing import (
+    Collection,
+    Generic,
+    Iterable,
+    Iterator,
+    List,
+    Mapping,
+    Optional,
+)
+
+from .providers import AbstractProvider, AbstractResolver
+from .structs import (
+    CT,
+    KT,
+    RT,
+    DirectedGraph,
+    IterableView,
+)
+
+# This should be a NamedTuple, but Python 3.6 has a bug that prevents it.
+# https://stackoverflow.com/a/50531189/1376863
+class RequirementInformation(tuple, Generic[RT, CT]):
+    requirement: RT
+    parent: Optional[CT]
+
+class Criterion(Generic[RT, CT, KT]):
+    candidates: IterableView[CT]
+    information: Collection[RequirementInformation[RT, CT]]
+    incompatibilities: List[CT]
+    @classmethod
+    def from_requirement(
+        cls,
+        provider: AbstractProvider[RT, CT, KT],
+        requirement: RT,
+        parent: Optional[CT],
+    ) -> Criterion[RT, CT, KT]: ...
+    def iter_requirement(self) -> Iterator[RT]: ...
+    def iter_parent(self) -> Iterator[Optional[CT]]: ...
+    def merged_with(
+        self,
+        provider: AbstractProvider[RT, CT, KT],
+        requirement: RT,
+        parent: Optional[CT],
+    ) -> Criterion[RT, CT, KT]: ...
+    def excluded_of(self, candidates: List[CT]) -> Criterion[RT, CT, KT]: ...
+
+class ResolverException(Exception): ...
+
+class RequirementsConflicted(ResolverException, Generic[RT, CT, KT]):
+    criterion: Criterion[RT, CT, KT]
+
+class ResolutionError(ResolverException): ...
+
+class InconsistentCandidate(ResolverException, Generic[RT, CT, KT]):
+    candidate: CT
+    criterion: Criterion[RT, CT, KT]
+
+class ResolutionImpossible(ResolutionError, Generic[RT, CT]):
+    causes: List[RequirementInformation[RT, CT]]
+
+class ResolutionTooDeep(ResolutionError):
+    round_count: int
+
+class Result(Generic[RT, CT, KT]):
+    mapping: Mapping[KT, CT]
+    graph: DirectedGraph[Optional[KT]]
+    criteria: Mapping[KT, Criterion[RT, CT, KT]]
+
+class Resolver(AbstractResolver, Generic[RT, CT, KT]):
+    base_exception = ResolverException
+    def resolve(
+        self, requirements: Iterable[RT], max_rounds: int = 100
+    ) -> Result[RT, CT, KT]: ...
diff --git a/src/pip/_vendor/resolvelib/structs.pyi b/src/pip/_vendor/resolvelib/structs.pyi
new file mode 100644
index 000000000..1122d17aa
--- /dev/null
+++ b/src/pip/_vendor/resolvelib/structs.pyi
@@ -0,0 +1,35 @@
+from abc import ABCMeta
+from typing import (
+    Callable,
+    Container,
+    Generic,
+    Iterable,
+    Iterator,
+    Tuple,
+    TypeVar,
+    Union,
+)
+
+KT = TypeVar("KT")
+RT = TypeVar("RT")
+CT = TypeVar("CT")
+_T = TypeVar("_T")
+Matches = Union[Iterable[CT], Callable[[], Iterator[CT]]]
+
+class IterableView(Container[CT], Iterable[CT], metaclass=ABCMeta):
+    def excluding(self: _T, candidates: Container[CT]) -> _T: ...
+
+class DirectedGraph(Generic[KT]):
+    def __iter__(self) -> Iterator[KT]: ...
+    def __len__(self) -> int: ...
+    def __contains__(self, key: KT) -> bool: ...
+    def copy(self) -> "DirectedGraph[KT]": ...
+    def add(self, key: KT) -> None: ...
+    def remove(self, key: KT) -> None: ...
+    def connected(self, f: KT, t: KT) -> bool: ...
+    def connect(self, f: KT, t: KT) -> None: ...
+    def iter_edges(self) -> Iterable[Tuple[KT, KT]]: ...
+    def iter_children(self, key: KT) -> Iterable[KT]: ...
+    def iter_parents(self, key: KT) -> Iterable[KT]: ...
+
+def build_iter_view(matches: Matches) -> IterableView[CT]: ...
diff --git a/src/pip/_vendor/urllib3/_version.py b/src/pip/_vendor/urllib3/_version.py
index 2dba29e3f..97c983300 100644
--- a/src/pip/_vendor/urllib3/_version.py
+++ b/src/pip/_vendor/urllib3/_version.py
@@ -1,2 +1,2 @@
 # This file is protected via CODEOWNERS
-__version__ = "1.26.2"
+__version__ = "1.26.4"
diff --git a/src/pip/_vendor/urllib3/connection.py b/src/pip/_vendor/urllib3/connection.py
index 660d679c3..45580b7e1 100644
--- a/src/pip/_vendor/urllib3/connection.py
+++ b/src/pip/_vendor/urllib3/connection.py
@@ -67,7 +67,7 @@ port_by_scheme = {"http": 80, "https": 443}
 
 # When it comes time to update this value as a part of regular maintenance
 # (ie test_recent_date is failing) update it to ~6 months before the current date.
-RECENT_DATE = datetime.date(2019, 1, 1)
+RECENT_DATE = datetime.date(2020, 7, 1)
 
 _CONTAINS_CONTROL_CHAR_RE = re.compile(r"[^-!#$%&'*+.^_`|~0-9a-zA-Z]")
 
@@ -215,7 +215,7 @@ class HTTPConnection(_HTTPConnection, object):
 
     def putheader(self, header, *values):
         """"""
-        if SKIP_HEADER not in values:
+        if not any(isinstance(v, str) and v == SKIP_HEADER for v in values):
             _HTTPConnection.putheader(self, header, *values)
         elif six.ensure_str(header.lower()) not in SKIPPABLE_HEADERS:
             raise ValueError(
@@ -490,6 +490,10 @@ class HTTPSConnection(HTTPConnection):
             self.ca_cert_dir,
             self.ca_cert_data,
         )
+        # By default urllib3's SSLContext disables `check_hostname` and uses
+        # a custom check. For proxies we're good with relying on the default
+        # verification.
+        ssl_context.check_hostname = True
 
         # If no cert was provided, use only the default options for server
         # certificate validation
diff --git a/src/pip/_vendor/urllib3/exceptions.py b/src/pip/_vendor/urllib3/exceptions.py
index d69958d5d..cba6f3f56 100644
--- a/src/pip/_vendor/urllib3/exceptions.py
+++ b/src/pip/_vendor/urllib3/exceptions.py
@@ -289,7 +289,17 @@ class ProxySchemeUnknown(AssertionError, URLSchemeUnknown):
     # TODO(t-8ch): Stop inheriting from AssertionError in v2.0.
 
     def __init__(self, scheme):
-        message = "Not supported proxy scheme %s" % scheme
+        # 'localhost' is here because our URL parser parses
+        # localhost:8080 -> scheme=localhost, remove if we fix this.
+        if scheme == "localhost":
+            scheme = None
+        if scheme is None:
+            message = "Proxy URL had no scheme, should start with http:// or https://"
+        else:
+            message = (
+                "Proxy URL had unsupported scheme %s, should use http:// or https://"
+                % scheme
+            )
         super(ProxySchemeUnknown, self).__init__(message)
 
 
diff --git a/src/pip/_vendor/urllib3/util/retry.py b/src/pip/_vendor/urllib3/util/retry.py
index ee51f922f..d25a41b42 100644
--- a/src/pip/_vendor/urllib3/util/retry.py
+++ b/src/pip/_vendor/urllib3/util/retry.py
@@ -253,6 +253,7 @@ class Retry(object):
                 "Using 'method_whitelist' with Retry is deprecated and "
                 "will be removed in v2.0. Use 'allowed_methods' instead",
                 DeprecationWarning,
+                stacklevel=2,
             )
             allowed_methods = method_whitelist
         if allowed_methods is _Default:
diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt
index 0032327a2..c5d1b643e 100644
--- a/src/pip/_vendor/vendor.txt
+++ b/src/pip/_vendor/vendor.txt
@@ -13,8 +13,8 @@ requests==2.25.1
     certifi==2020.12.05
     chardet==4.0.0
     idna==2.10
-    urllib3==1.26.2
-resolvelib==0.5.4
+    urllib3==1.26.4
+resolvelib==0.5.5
 setuptools==44.0.0
 six==1.15.0
 tenacity==6.3.1
diff --git a/tests/conftest.py b/tests/conftest.py
index 36f90653d..a53e0c4f7 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -56,26 +56,26 @@ def pytest_addoption(parser):
 
 def pytest_collection_modifyitems(config, items):
     for item in items:
-        if not hasattr(item, 'module'):  # e.g.: DoctestTextfile
+        if not hasattr(item, "module"):  # e.g.: DoctestTextfile
             continue
 
-        if (item.get_closest_marker('search') and
-                not config.getoption('--run-search')):
-            item.add_marker(pytest.mark.skip('pip search test skipped'))
+        if item.get_closest_marker("search") and not config.getoption("--run-search"):
+            item.add_marker(pytest.mark.skip("pip search test skipped"))
 
         if "CI" in os.environ:
             # Mark network tests as flaky
-            if item.get_closest_marker('network') is not None:
+            if item.get_closest_marker("network") is not None:
                 item.add_marker(pytest.mark.flaky(reruns=3, reruns_delay=2))
 
-        if (item.get_closest_marker('incompatible_with_test_venv') and
-                config.getoption("--use-venv")):
-            item.add_marker(pytest.mark.skip(
-                'Incompatible with test venv'))
-        if (item.get_closest_marker('incompatible_with_venv') and
-                sys.prefix != sys.base_prefix):
-            item.add_marker(pytest.mark.skip(
-                'Incompatible with venv'))
+        if item.get_closest_marker("incompatible_with_test_venv") and config.getoption(
+            "--use-venv"
+        ):
+            item.add_marker(pytest.mark.skip("Incompatible with test venv"))
+        if (
+            item.get_closest_marker("incompatible_with_venv")
+            and sys.prefix != sys.base_prefix
+        ):
+            item.add_marker(pytest.mark.skip("Incompatible with venv"))
 
         module_path = os.path.relpath(
             item.module.__file__,
@@ -83,22 +83,21 @@ def pytest_collection_modifyitems(config, items):
         )
 
         module_root_dir = module_path.split(os.pathsep)[0]
-        if (module_root_dir.startswith("functional") or
-                module_root_dir.startswith("integration") or
-                module_root_dir.startswith("lib")):
+        if (
+            module_root_dir.startswith("functional")
+            or module_root_dir.startswith("integration")
+            or module_root_dir.startswith("lib")
+        ):
             item.add_marker(pytest.mark.integration)
         elif module_root_dir.startswith("unit"):
             item.add_marker(pytest.mark.unit)
         else:
-            raise RuntimeError(
-                f"Unknown test type (filename = {module_path})"
-            )
+            raise RuntimeError(f"Unknown test type (filename = {module_path})")
 
 
 @pytest.fixture(scope="session", autouse=True)
 def resolver_variant(request):
-    """Set environment variable to make pip default to the correct resolver.
-    """
+    """Set environment variable to make pip default to the correct resolver."""
     resolver = request.config.getoption("--resolver")
 
     # Handle the environment variables for this test.
@@ -118,9 +117,9 @@ def resolver_variant(request):
         yield resolver
 
 
-@pytest.fixture(scope='session')
+@pytest.fixture(scope="session")
 def tmpdir_factory(request, tmpdir_factory):
-    """ Modified `tmpdir_factory` session fixture
+    """Modified `tmpdir_factory` session fixture
     that will automatically cleanup after itself.
     """
     yield tmpdir_factory
@@ -172,17 +171,17 @@ def isolate(tmpdir, monkeypatch):
     fake_root = os.path.join(str(tmpdir), "fake-root")
     os.makedirs(fake_root)
 
-    if sys.platform == 'win32':
+    if sys.platform == "win32":
         # Note: this will only take effect in subprocesses...
         home_drive, home_path = os.path.splitdrive(home_dir)
-        monkeypatch.setenv('USERPROFILE', home_dir)
-        monkeypatch.setenv('HOMEDRIVE', home_drive)
-        monkeypatch.setenv('HOMEPATH', home_path)
+        monkeypatch.setenv("USERPROFILE", home_dir)
+        monkeypatch.setenv("HOMEDRIVE", home_drive)
+        monkeypatch.setenv("HOMEPATH", home_path)
         for env_var, sub_path in (
-            ('APPDATA', 'AppData/Roaming'),
-            ('LOCALAPPDATA', 'AppData/Local'),
+            ("APPDATA", "AppData/Roaming"),
+            ("LOCALAPPDATA", "AppData/Local"),
         ):
-            path = os.path.join(home_dir, *sub_path.split('/'))
+            path = os.path.join(home_dir, *sub_path.split("/"))
             monkeypatch.setenv(env_var, path)
             os.makedirs(path)
     else:
@@ -191,23 +190,46 @@ def isolate(tmpdir, monkeypatch):
         # of the user's actual $HOME directory.
         monkeypatch.setenv("HOME", home_dir)
         # Isolate ourselves from XDG directories
-        monkeypatch.setenv("XDG_DATA_HOME", os.path.join(
-            home_dir, ".local", "share",
-        ))
-        monkeypatch.setenv("XDG_CONFIG_HOME", os.path.join(
-            home_dir, ".config",
-        ))
+        monkeypatch.setenv(
+            "XDG_DATA_HOME",
+            os.path.join(
+                home_dir,
+                ".local",
+                "share",
+            ),
+        )
+        monkeypatch.setenv(
+            "XDG_CONFIG_HOME",
+            os.path.join(
+                home_dir,
+                ".config",
+            ),
+        )
         monkeypatch.setenv("XDG_CACHE_HOME", os.path.join(home_dir, ".cache"))
-        monkeypatch.setenv("XDG_RUNTIME_DIR", os.path.join(
-            home_dir, ".runtime",
-        ))
-        monkeypatch.setenv("XDG_DATA_DIRS", os.pathsep.join([
-            os.path.join(fake_root, "usr", "local", "share"),
-            os.path.join(fake_root, "usr", "share"),
-        ]))
-        monkeypatch.setenv("XDG_CONFIG_DIRS", os.path.join(
-            fake_root, "etc", "xdg",
-        ))
+        monkeypatch.setenv(
+            "XDG_RUNTIME_DIR",
+            os.path.join(
+                home_dir,
+                ".runtime",
+            ),
+        )
+        monkeypatch.setenv(
+            "XDG_DATA_DIRS",
+            os.pathsep.join(
+                [
+                    os.path.join(fake_root, "usr", "local", "share"),
+                    os.path.join(fake_root, "usr", "share"),
+                ]
+            ),
+        )
+        monkeypatch.setenv(
+            "XDG_CONFIG_DIRS",
+            os.path.join(
+                fake_root,
+                "etc",
+                "xdg",
+            ),
+        )
 
     # Configure git, because without an author name/email git will complain
     # and cause test failures.
@@ -224,9 +246,7 @@ def isolate(tmpdir, monkeypatch):
     # FIXME: Windows...
     os.makedirs(os.path.join(home_dir, ".config", "git"))
     with open(os.path.join(home_dir, ".config", "git", "config"), "wb") as fp:
-        fp.write(
-            b"[user]\n\tname = pip\n\temail = distutils-sig@python.org\n"
-        )
+        fp.write(b"[user]\n\tname = pip\n\temail = distutils-sig@python.org\n")
 
 
 @pytest.fixture(autouse=True)
@@ -245,7 +265,7 @@ def scoped_global_tempdir_manager(request):
         yield
 
 
-@pytest.fixture(scope='session')
+@pytest.fixture(scope="session")
 def pip_src(tmpdir_factory):
     def not_code_files_and_folders(path, names):
         # In the root directory...
@@ -265,7 +285,7 @@ def pip_src(tmpdir_factory):
             ignored.update(fnmatch.filter(names, pattern))
         return ignored
 
-    pip_src = Path(str(tmpdir_factory.mktemp('pip_src'))).joinpath('pip_src')
+    pip_src = Path(str(tmpdir_factory.mktemp("pip_src"))).joinpath("pip_src")
     # Copy over our source tree so that each use is self contained
     shutil.copytree(
         SRC_DIR,
@@ -276,83 +296,77 @@ def pip_src(tmpdir_factory):
 
 
 def _common_wheel_editable_install(tmpdir_factory, common_wheels, package):
-    wheel_candidates = list(
-        common_wheels.glob('{package}-*.whl'.format(**locals())))
+    wheel_candidates = list(common_wheels.glob(f"{package}-*.whl"))
     assert len(wheel_candidates) == 1, wheel_candidates
-    install_dir = Path(str(tmpdir_factory.mktemp(package))) / 'install'
+    install_dir = Path(str(tmpdir_factory.mktemp(package))) / "install"
     Wheel(wheel_candidates[0]).install_as_egg(install_dir)
-    (install_dir / 'EGG-INFO').rename(
-        install_dir / '{package}.egg-info'.format(**locals()))
+    (install_dir / "EGG-INFO").rename(install_dir / f"{package}.egg-info")
     assert compileall.compile_dir(str(install_dir), quiet=1)
     return install_dir
 
 
-@pytest.fixture(scope='session')
+@pytest.fixture(scope="session")
 def setuptools_install(tmpdir_factory, common_wheels):
-    return _common_wheel_editable_install(tmpdir_factory,
-                                          common_wheels,
-                                          'setuptools')
+    return _common_wheel_editable_install(tmpdir_factory, common_wheels, "setuptools")
 
 
-@pytest.fixture(scope='session')
+@pytest.fixture(scope="session")
 def wheel_install(tmpdir_factory, common_wheels):
-    return _common_wheel_editable_install(tmpdir_factory,
-                                          common_wheels,
-                                          'wheel')
+    return _common_wheel_editable_install(tmpdir_factory, common_wheels, "wheel")
 
 
-@pytest.fixture(scope='session')
+@pytest.fixture(scope="session")
 def coverage_install(tmpdir_factory, common_wheels):
-    return _common_wheel_editable_install(tmpdir_factory,
-                                          common_wheels,
-                                          'coverage')
+    return _common_wheel_editable_install(tmpdir_factory, common_wheels, "coverage")
 
 
 def install_egg_link(venv, project_name, egg_info_dir):
-    with open(venv.site / 'easy-install.pth', 'a') as fp:
-        fp.write(str(egg_info_dir.resolve()) + '\n')
-    with open(venv.site / (project_name + '.egg-link'), 'w') as fp:
-        fp.write(str(egg_info_dir) + '\n.')
+    with open(venv.site / "easy-install.pth", "a") as fp:
+        fp.write(str(egg_info_dir.resolve()) + "\n")
+    with open(venv.site / (project_name + ".egg-link"), "w") as fp:
+        fp.write(str(egg_info_dir) + "\n.")
 
 
-@pytest.fixture(scope='session')
-def virtualenv_template(request, tmpdir_factory, pip_src,
-                        setuptools_install, coverage_install):
+@pytest.fixture(scope="session")
+def virtualenv_template(
+    request, tmpdir_factory, pip_src, setuptools_install, coverage_install
+):
 
-    if request.config.getoption('--use-venv'):
-        venv_type = 'venv'
+    if request.config.getoption("--use-venv"):
+        venv_type = "venv"
     else:
-        venv_type = 'virtualenv'
+        venv_type = "virtualenv"
 
     # Create the virtual environment
-    tmpdir = Path(str(tmpdir_factory.mktemp('virtualenv')))
-    venv = VirtualEnvironment(
-        tmpdir.joinpath("venv_orig"), venv_type=venv_type
-    )
+    tmpdir = Path(str(tmpdir_factory.mktemp("virtualenv")))
+    venv = VirtualEnvironment(tmpdir.joinpath("venv_orig"), venv_type=venv_type)
 
     # Install setuptools and pip.
-    install_egg_link(venv, 'setuptools', setuptools_install)
-    pip_editable = Path(str(tmpdir_factory.mktemp('pip'))) / 'pip'
+    install_egg_link(venv, "setuptools", setuptools_install)
+    pip_editable = Path(str(tmpdir_factory.mktemp("pip"))) / "pip"
     shutil.copytree(pip_src, pip_editable, symlinks=True)
     # noxfile.py is Python 3 only
     assert compileall.compile_dir(
-        str(pip_editable), quiet=1, rx=re.compile("noxfile.py$"),
+        str(pip_editable),
+        quiet=1,
+        rx=re.compile("noxfile.py$"),
+    )
+    subprocess.check_call(
+        [venv.bin / "python", "setup.py", "-q", "develop"], cwd=pip_editable
     )
-    subprocess.check_call([venv.bin / 'python', 'setup.py', '-q', 'develop'],
-                          cwd=pip_editable)
 
     # Install coverage and pth file for executing it in any spawned processes
     # in this virtual environment.
-    install_egg_link(venv, 'coverage', coverage_install)
+    install_egg_link(venv, "coverage", coverage_install)
     # zz prefix ensures the file is after easy-install.pth.
-    with open(venv.site / 'zz-coverage-helper.pth', 'a') as f:
-        f.write('import coverage; coverage.process_startup()')
+    with open(venv.site / "zz-coverage-helper.pth", "a") as f:
+        f.write("import coverage; coverage.process_startup()")
 
     # Drop (non-relocatable) launchers.
     for exe in os.listdir(venv.bin):
         if not (
-            exe.startswith('python') or
-            exe.startswith('libpy')  # Don't remove libpypy-c.so...
+            exe.startswith("python")
+            or exe.startswith("libpy")  # Don't remove libpypy-c.so...
         ):
             (venv.bin / exe).unlink()
 
@@ -387,7 +401,7 @@ def virtualenv(virtualenv_factory, tmpdir):
 
 @pytest.fixture
 def with_wheel(virtualenv, wheel_install):
-    install_egg_link(virtualenv, 'wheel', wheel_install)
+    install_egg_link(virtualenv, "wheel", wheel_install)
 
 
 @pytest.fixture(scope="session")
@@ -398,21 +412,16 @@ def script_factory(virtualenv_factory, deprecated_python):
         return PipTestEnvironment(
             # The base location for our test environment
             tmpdir,
-
             # Tell the Test Environment where our virtualenv is located
             virtualenv=virtualenv,
-
             # Do not ignore hidden files, they need to be checked as well
             ignore_hidden=False,
-
             # We are starting with an already empty directory
             start_clear=False,
-
             # We want to ensure no temporary files are left behind, so the
             # PipTestEnvironment needs to capture and assert against temp
             capture_temp=True,
             assert_no_temp=True,
-
             # Deprecated python versions produce an extra deprecation warning
             pip_expect_warning=deprecated_python,
         )
@@ -434,7 +443,7 @@ def script(tmpdir, virtualenv, script_factory):
 @pytest.fixture(scope="session")
 def common_wheels():
     """Provide a directory with latest setuptools and wheel wheels"""
-    return DATA_DIR.joinpath('common_wheels')
+    return DATA_DIR.joinpath("common_wheels")
 
 
 @pytest.fixture(scope="session")
@@ -482,8 +491,7 @@ def deprecated_python():
 def cert_factory(tmpdir_factory):
     def factory():
         # type: () -> str
-        """Returns path to cert/key file.
-        """
+        """Returns path to cert/key file."""
         output_path = Path(str(tmpdir_factory.mktemp("certs"))) / "cert.pem"
         # Must be Text on PY2.
         cert, key = make_tls_cert("localhost")
@@ -537,14 +545,11 @@ class MockServer:
 
     def get_requests(self):
         # type: () -> Dict[str, str]
-        """Get environ for each received request.
-        """
+        """Get environ for each received request."""
         assert not self._running, "cannot get mock from running server"
         # Legacy: replace call[0][0] with call.args[0]
         # when pip drops support for python3.7
-        return [
-            call[0][0] for call in self._server.mock.call_args_list
-        ]
+        return [call[0][0] for call in self._server.mock.call_args_list]
 
 
 @pytest.fixture
@@ -558,8 +563,8 @@ def mock_server():
 @pytest.fixture
 def utc():
     # time.tzset() is not implemented on some platforms, e.g. Windows.
-    tzset = getattr(time, 'tzset', lambda: None)
-    with patch.dict(os.environ, {'TZ': 'UTC'}):
+    tzset = getattr(time, "tzset", lambda: None)
+    with patch.dict(os.environ, {"TZ": "UTC"}):
         tzset()
         yield
     tzset()
diff --git a/tests/data/packages/HackedEggInfo/setup.py b/tests/data/packages/HackedEggInfo/setup.py
index 171f5a2a3..9e872e0b5 100644
--- a/tests/data/packages/HackedEggInfo/setup.py
+++ b/tests/data/packages/HackedEggInfo/setup.py
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 from setuptools import setup
 from setuptools.command import egg_info as orig_egg_info
 
diff --git a/tests/data/packages/SetupPyUTF8/setup.py b/tests/data/packages/SetupPyUTF8/setup.py
index 9b65f5e79..1962a0060 100644
--- a/tests/data/packages/SetupPyUTF8/setup.py
+++ b/tests/data/packages/SetupPyUTF8/setup.py
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
 from distutils.core import setup
 
 setup(name="SetupPyUTF8",
diff --git a/tests/data/src/chattymodule/setup.py b/tests/data/src/chattymodule/setup.py
index 01d772076..68099f2f8 100644
--- a/tests/data/src/chattymodule/setup.py
+++ b/tests/data/src/chattymodule/setup.py
@@ -5,7 +5,7 @@ import sys
 
 from setuptools import setup
 
-print("HELLO FROM CHATTYMODULE {sys.argv[1]}".format(**locals()))
+print(f"HELLO FROM CHATTYMODULE {sys.argv[1]}")
 print(os.environ)
 print(sys.argv)
 if "--fail" in sys.argv:
diff --git a/tests/data/src/prjwithdatafile/setup.py b/tests/data/src/prjwithdatafile/setup.py
index 94863b57b..240b7ea10 100755
--- a/tests/data/src/prjwithdatafile/setup.py
+++ b/tests/data/src/prjwithdatafile/setup.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
 from setuptools import setup
 
 setup(
diff --git a/tests/functional/test_build_env.py b/tests/functional/test_build_env.py
index 7a392f426..b3f51a880 100644
--- a/tests/functional/test_build_env.py
+++ b/tests/functional/test_build_env.py
@@ -79,15 +79,15 @@ def test_build_env_allow_only_one_install(script):
     for prefix in ('normal', 'overlay'):
         build_env.install_requirements(
             finder, ['foo'], prefix,
-            'installing foo in {prefix}'.format(**locals()))
+            f'installing foo in {prefix}')
         with pytest.raises(AssertionError):
             build_env.install_requirements(
                 finder, ['bar'], prefix,
-                'installing bar in {prefix}'.format(**locals()))
+                f'installing bar in {prefix}')
         with pytest.raises(AssertionError):
             build_env.install_requirements(
                 finder, [], prefix,
-                'installing in {prefix}'.format(**locals()))
+                f'installing in {prefix}')
 
 
 def test_build_env_requirements_check(script):
@@ -201,7 +201,7 @@ def test_build_env_isolation(script):
             pass
         else:
             print(
-                'imported `pkg` from `{pkg.__file__}`'.format(**locals()),
+                f'imported `pkg` from `{pkg.__file__}`',
                 file=sys.stderr)
             print('system sites:\n  ' + '\n  '.join(sorted({
                           get_python_lib(plat_specific=0),
diff --git a/tests/functional/test_completion.py b/tests/functional/test_completion.py
index a3986811b..8a7464982 100644
--- a/tests/functional/test_completion.py
+++ b/tests/functional/test_completion.py
@@ -230,7 +230,7 @@ def test_completion_not_files_after_nonexpecting_option(
     (e.g. ``pip install``)
     """
     res, env = autocomplete(
-        words=('pip install {cl_opts} r'.format(**locals())),
+        words=(f'pip install {cl_opts} r'),
         cword='2',
         cwd=data.completion_paths,
     )
diff --git a/tests/functional/test_freeze.py b/tests/functional/test_freeze.py
index 5d3d49686..858e43931 100644
--- a/tests/functional/test_freeze.py
+++ b/tests/functional/test_freeze.py
@@ -5,6 +5,8 @@ import textwrap
 from doctest import ELLIPSIS, OutputChecker
 
 import pytest
+from pip._vendor.packaging.utils import canonicalize_name
+from pip._vendor.pkg_resources import safe_name
 
 from tests.lib import (
     _create_test_package,
@@ -42,7 +44,7 @@ def _check_output(result, expected):
     actual = distribute_re.sub('', actual)
 
     def banner(msg):
-        return '\n========== {msg} ==========\n'.format(**locals())
+        return f'\n========== {msg} ==========\n'
 
     assert checker.check_output(expected, actual, ELLIPSIS), (
         banner('EXPECTED') + expected + banner('ACTUAL') + actual +
@@ -128,26 +130,23 @@ def test_freeze_with_invalid_names(script):
     )
     for pkgname in valid_pkgnames + invalid_pkgnames:
         fake_install(pkgname, script.site_packages_path)
-    result = script.pip('freeze', expect_stderr=True)
-    for pkgname in valid_pkgnames:
-        _check_output(
-            result.stdout,
-            '...{}==1.0...'.format(pkgname.replace('_', '-'))
-        )
-    for pkgname in invalid_pkgnames:
-        # Check that the full distribution repr is present.
-        dist_repr = '{} 1.0 ('.format(pkgname.replace('_', '-'))
-        expected = (
-            '...Could not generate requirement for '
-            'distribution {}...'.format(dist_repr)
-        )
-        _check_output(result.stderr, expected)
 
-    # Also check that the parse error details occur at least once.
-    # We only need to find one occurrence to know that exception details
-    # are logged.
-    expected = '...site-packages): Parse error at "...'
-    _check_output(result.stderr, expected)
+    result = script.pip('freeze', expect_stderr=True)
+
+    # Check all valid names are in the output.
+    output_lines = {line.strip() for line in result.stdout.splitlines()}
+    for name in valid_pkgnames:
+        assert f"{safe_name(name)}==1.0" in output_lines
+
+    # Check all invalid names are excluded from the output.
+    canonical_invalid_names = {canonicalize_name(n) for n in invalid_pkgnames}
+    for line in output_lines:
+        output_name, _, _ = line.partition("=")
+        assert canonicalize_name(output_name) not in canonical_invalid_names
+
+    # The invalid names should be logged.
+    for name in canonical_invalid_names:
+        assert f"Ignoring invalid distribution {name} (" in result.stderr
 
 
 @pytest.mark.git
@@ -272,7 +271,7 @@ def test_freeze_git_clone(script, tmpdir):
     _check_output(result.stdout, expected)
 
     result = script.pip(
-        'freeze', '-f', '{repo_dir}#egg=pip_test_package'.format(**locals()),
+        'freeze', '-f', f'{repo_dir}#egg=pip_test_package',
         expect_stderr=True,
     )
     expected = textwrap.dedent(
@@ -337,7 +336,7 @@ def test_freeze_git_clone_srcdir(script, tmpdir):
     _check_output(result.stdout, expected)
 
     result = script.pip(
-        'freeze', '-f', '{repo_dir}#egg=pip_test_package'.format(**locals()),
+        'freeze', '-f', f'{repo_dir}#egg=pip_test_package',
         expect_stderr=True,
     )
     expected = textwrap.dedent(
@@ -378,7 +377,7 @@ def test_freeze_mercurial_clone_srcdir(script, tmpdir):
     _check_output(result.stdout, expected)
 
     result = script.pip(
-        'freeze', '-f', '{repo_dir}#egg=pip_test_package'.format(**locals()),
+        'freeze', '-f', f'{repo_dir}#egg=pip_test_package',
         expect_stderr=True,
     )
     expected = textwrap.dedent(
@@ -473,7 +472,7 @@ def test_freeze_mercurial_clone(script, tmpdir):
     _check_output(result.stdout, expected)
 
     result = script.pip(
-        'freeze', '-f', '{repo_dir}#egg=pip_test_package'.format(**locals()),
+        'freeze', '-f', f'{repo_dir}#egg=pip_test_package',
         expect_stderr=True,
     )
     expected = textwrap.dedent(
@@ -513,7 +512,7 @@ def test_freeze_bazaar_clone(script, tmpdir):
 
     result = script.pip(
         'freeze', '-f',
-        '{checkout_path}/#egg=django-wikiapp'.format(**locals()),
+        f'{checkout_path}/#egg=django-wikiapp',
         expect_stderr=True,
     )
     expected = textwrap.dedent("""\
diff --git a/tests/functional/test_install.py b/tests/functional/test_install.py
index 2cfb3d3d2..2742e873e 100644
--- a/tests/functional/test_install.py
+++ b/tests/functional/test_install.py
@@ -23,9 +23,7 @@ from tests.lib import (
     need_svn,
     path_to_url,
     pyversion,
-    pyversion_tuple,
     requirements_file,
-    windows_workaround_7667,
 )
 from tests.lib.filesystem import make_socket_file
 from tests.lib.local_repos import local_checkout
@@ -165,7 +163,6 @@ def test_pep518_with_namespace_package(script, data, common_wheels):
     )
 
 
-@pytest.mark.timeout(60)
 @pytest.mark.parametrize('command', ('install', 'wheel'))
 @pytest.mark.parametrize('package', ('pep518_forkbomb',
                                      'pep518_twin_forkbombs_first',
@@ -193,16 +190,10 @@ def test_pip_second_command_line_interface_works(
     """
     # Re-install pip so we get the launchers.
     script.pip_install_local('-f', common_wheels, pip_src)
-    # On old versions of Python, urllib3/requests will raise a warning about
-    # the lack of an SSLContext.
-    kwargs = {'expect_stderr': deprecated_python}
-    if pyversion_tuple < (2, 7, 9):
-        kwargs['expect_stderr'] = True
-
-    args = ['pip{pyversion}'.format(**globals())]
+    args = [f'pip{pyversion}']
     args.extend(['install', 'INITools==0.2'])
     args.extend(['-f', data.packages])
-    result = script.run(*args, **kwargs)
+    result = script.run(*args)
     dist_info_folder = (
         script.site_packages /
         'INITools-0.2.dist-info'
@@ -581,7 +572,29 @@ def test_install_from_local_directory_with_symlinks_to_directories(
     result.did_create(dist_info_folder)
 
 
-@pytest.mark.skipif("sys.platform == 'win32' or sys.version_info < (3,)")
+def test_install_from_local_directory_with_in_tree_build(
+    script, data, with_wheel
+):
+    """
+    Test installing from a local directory with --use-feature=in-tree-build.
+    """
+    to_install = data.packages.joinpath("FSPkg")
+    args = ["install", "--use-feature=in-tree-build", to_install]
+
+    in_tree_build_dir = to_install / "build"
+    assert not in_tree_build_dir.exists()
+    result = script.pip(*args)
+    fspkg_folder = script.site_packages / 'fspkg'
+    dist_info_folder = (
+        script.site_packages /
+        'FSPkg-0.1.dev0.dist-info'
+    )
+    result.did_create(fspkg_folder)
+    result.did_create(dist_info_folder)
+    assert in_tree_build_dir.exists()
+
+
+@pytest.mark.skipif("sys.platform == 'win32'")
 def test_install_from_local_directory_with_socket_file(
     script, data, tmpdir, with_wheel
 ):
@@ -627,9 +640,9 @@ def test_editable_install__local_dir_no_setup_py(
 
     msg = result.stderr
     if deprecated_python:
-        assert 'File "setup.py" not found. ' in msg
+        assert 'File "setup.py" or "setup.cfg" not found. ' in msg
     else:
-        assert msg.startswith('ERROR: File "setup.py" not found. ')
+        assert msg.startswith('ERROR: File "setup.py" or "setup.cfg" not found. ')
     assert 'pyproject.toml' not in msg
 
 
@@ -649,9 +662,9 @@ def test_editable_install__local_dir_no_setup_py_with_pyproject(
 
     msg = result.stderr
     if deprecated_python:
-        assert 'File "setup.py" not found. ' in msg
+        assert 'File "setup.py" or "setup.cfg" not found. ' in msg
     else:
-        assert msg.startswith('ERROR: File "setup.py" not found. ')
+        assert msg.startswith('ERROR: File "setup.py" or "setup.cfg" not found. ')
     assert 'A "pyproject.toml" file was found' in msg
 
 
@@ -727,11 +740,10 @@ def test_install_using_install_option_and_editable(script, tmpdir):
     """
     folder = 'script_folder'
     script.scratch_path.joinpath(folder).mkdir()
-    url = 'git+git://github.com/pypa/pip-test-package'
+    url = local_checkout('git+git://github.com/pypa/pip-test-package', tmpdir)
     result = script.pip(
-        'install', '-e', '{url}#egg=pip-test-package'
-        .format(url=local_checkout(url, tmpdir)),
-        '--install-option=--script-dir={folder}'.format(**locals()),
+        'install', '-e', f'{url}#egg=pip-test-package',
+        f'--install-option=--script-dir={folder}',
         expect_stderr=True)
     script_file = (
         script.venv / 'src' / 'pip-test-package' /
@@ -743,7 +755,6 @@ def test_install_using_install_option_and_editable(script, tmpdir):
 @pytest.mark.xfail
 @pytest.mark.network
 @need_mercurial
-@windows_workaround_7667
 def test_install_global_option_using_editable(script, tmpdir):
     """
     Test using global distutils options, but in an editable installation
@@ -799,10 +810,7 @@ def test_install_folder_using_slash_in_the_end(script, with_wheel):
     pkg_path = script.scratch_path / 'mock'
     pkg_path.joinpath("setup.py").write_text(mock100_setup_py)
     result = script.pip('install', 'mock' + os.path.sep)
-    dist_info_folder = (
-        script.site_packages /
-        'mock-100.1.dist-info'
-    )
+    dist_info_folder = script.site_packages / 'mock-100.1.dist-info'
     result.did_create(dist_info_folder)
 
 
@@ -815,10 +823,7 @@ def test_install_folder_using_relative_path(script, with_wheel):
     pkg_path = script.scratch_path / 'initools' / 'mock'
     pkg_path.joinpath("setup.py").write_text(mock100_setup_py)
     result = script.pip('install', Path('initools') / 'mock')
-    dist_info_folder = (
-        script.site_packages /
-        'mock-100.1.dist-info'.format(**globals())
-    )
+    dist_info_folder = script.site_packages / 'mock-100.1.dist-info'
     result.did_create(dist_info_folder)
 
 
@@ -1028,15 +1033,13 @@ def test_install_package_with_prefix(script, data):
     result.did_create(install_path)
 
 
-def test_install_editable_with_prefix(script):
+def _test_install_editable_with_prefix(script, files):
     # make a dummy project
     pkga_path = script.scratch_path / 'pkga'
     pkga_path.mkdir()
-    pkga_path.joinpath("setup.py").write_text(textwrap.dedent("""
-        from setuptools import setup
-        setup(name='pkga',
-              version='0.1')
-    """))
+
+    for fn, contents in files.items():
+        pkga_path.joinpath(fn).write_text(textwrap.dedent(contents))
 
     if hasattr(sys, "pypy_version_info"):
         site_packages = os.path.join(
@@ -1059,6 +1062,50 @@ def test_install_editable_with_prefix(script):
     result.did_create(install_path)
 
 
+@pytest.mark.network
+def test_install_editable_with_target(script):
+    pkg_path = script.scratch_path / 'pkg'
+    pkg_path.mkdir()
+    pkg_path.joinpath("setup.py").write_text(textwrap.dedent("""
+        from setuptools import setup
+        setup(
+            name='pkg',
+            install_requires=['watching_testrunner']
+        )
+    """))
+
+    target = script.scratch_path / 'target'
+    target.mkdir()
+    result = script.pip(
+        'install', '--editable', pkg_path, '--target', target
+    )
+
+    result.did_create(script.scratch / 'target' / 'pkg.egg-link')
+    result.did_create(script.scratch / 'target' / 'watching_testrunner.py')
+
+
+def test_install_editable_with_prefix_setup_py(script):
+    setup_py = """
+from setuptools import setup
+setup(name='pkga', version='0.1')
+"""
+    _test_install_editable_with_prefix(script, {"setup.py": setup_py})
+
+
+def test_install_editable_with_prefix_setup_cfg(script):
+    setup_cfg = """[metadata]
+name = pkga
+version = 0.1
+"""
+    pyproject_toml = """[build-system]
+requires = ["setuptools", "wheel"]
+build-backend = "setuptools.build_meta"
+"""
+    _test_install_editable_with_prefix(
+        script, {"setup.cfg": setup_cfg, "pyproject.toml": pyproject_toml}
+    )
+
+
 def test_install_package_conflict_prefix_and_user(script, data):
     """
     Test installing a package using pip install --prefix --user errors out
@@ -1294,11 +1341,11 @@ def test_install_subprocess_output_handling(script, data):
 def test_install_log(script, data, tmpdir):
     # test that verbose logs go to "--log" file
     f = tmpdir.joinpath("log.txt")
-    args = ['--log={f}'.format(**locals()),
+    args = [f'--log={f}',
             'install', data.src.joinpath('chattymodule')]
     result = script.pip(*args)
     assert 0 == result.stdout.count("HELLO FROM CHATTYMODULE")
-    with open(f, 'r') as fp:
+    with open(f) as fp:
         # one from egg_info, one from install
         assert 2 == fp.read().count("HELLO FROM CHATTYMODULE")
 
@@ -1321,7 +1368,8 @@ def test_cleanup_after_failed_wheel(script, with_wheel):
     # One of the effects of not cleaning up is broken scripts:
     script_py = script.bin_path / "script.py"
     assert script_py.exists(), script_py
-    shebang = open(script_py, 'r').readline().strip()
+    with open(script_py) as f:
+        shebang = f.readline().strip()
     assert shebang != '#!python', shebang
     # OK, assert that we *said* we were cleaning up:
     # /!\ if in need to change this, also change test_pep517_no_legacy_cleanup
@@ -1392,7 +1440,6 @@ def test_install_no_binary_disables_building_wheels(script, data, with_wheel):
 
 
 @pytest.mark.network
-@windows_workaround_7667
 def test_install_no_binary_builds_pep_517_wheel(script, data, with_wheel):
     to_install = data.packages.joinpath('pep517_setup_and_pyproject')
     res = script.pip(
@@ -1407,7 +1454,6 @@ def test_install_no_binary_builds_pep_517_wheel(script, data, with_wheel):
 
 
 @pytest.mark.network
-@windows_workaround_7667
 def test_install_no_binary_uses_local_backend(
         script, data, with_wheel, tmpdir):
     to_install = data.packages.joinpath('pep517_wrapper_buildsys')
@@ -1448,7 +1494,7 @@ def test_install_editable_with_wrong_egg_name(script, resolver_variant):
     """))
     result = script.pip(
         'install', '--editable',
-        'file://{pkga_path}#egg=pkgb'.format(**locals()),
+        f'file://{pkga_path}#egg=pkgb',
         expect_error=(resolver_variant == "2020-resolver"),
     )
     assert ("Generating metadata for package pkgb produced metadata "
@@ -1534,7 +1580,7 @@ def test_install_incompatible_python_requires_editable(script):
     """))
     result = script.pip(
         'install',
-        '--editable={pkga_path}'.format(**locals()),
+        f'--editable={pkga_path}',
         expect_error=True)
     assert _get_expected_error_text() in result.stderr, str(result)
 
@@ -1651,7 +1697,7 @@ def test_installed_files_recorded_in_deterministic_order(script, data):
     to_install = data.packages.joinpath("FSPkg")
     result = script.pip('install', to_install)
     fspkg_folder = script.site_packages / 'fspkg'
-    egg_info = 'FSPkg-0.1.dev0-py{pyversion}.egg-info'.format(**globals())
+    egg_info = f'FSPkg-0.1.dev0-py{pyversion}.egg-info'
     installed_files_path = (
         script.site_packages / egg_info / 'installed-files.txt'
     )
@@ -1714,10 +1760,10 @@ def test_target_install_ignores_distutils_config_install_prefix(script):
                             'pydistutils.cfg' if sys.platform == 'win32'
                             else '.pydistutils.cfg')
     distutils_config.write_text(textwrap.dedent(
-        '''
+        f'''
         [install]
         prefix={prefix}
-        '''.format(**locals())))
+        '''))
     target = script.scratch_path / 'target'
     result = script.pip_install_local('simplewheel', '-t', target)
 
diff --git a/tests/functional/test_install_compat.py b/tests/functional/test_install_compat.py
index a5a0df652..44b9b290e 100644
--- a/tests/functional/test_install_compat.py
+++ b/tests/functional/test_install_compat.py
@@ -26,7 +26,7 @@ def test_debian_egg_name_workaround(script):
 
     egg_info = os.path.join(
         script.site_packages,
-        "INITools-0.2-py{pyversion}.egg-info".format(**globals()))
+        f"INITools-0.2-py{pyversion}.egg-info")
 
     # Debian only removes pyversion for global installs, not inside a venv
     # so even if this test runs on a Debian/Ubuntu system with broken
@@ -34,14 +34,14 @@ def test_debian_egg_name_workaround(script):
     # .egg-info
     result.did_create(
         egg_info,
-        message="Couldn't find {egg_info}".format(**locals())
+        message=f"Couldn't find {egg_info}"
     )
 
     # The Debian no-pyversion version of the .egg-info
     mangled = os.path.join(script.site_packages, "INITools-0.2.egg-info")
     result.did_not_create(
         mangled,
-        message="Found unexpected {mangled}".format(**locals())
+        message=f"Found unexpected {mangled}"
     )
 
     # Simulate a Debian install by copying the .egg-info to their name for it
diff --git a/tests/functional/test_install_config.py b/tests/functional/test_install_config.py
index 27e4f0b0c..59aec65ff 100644
--- a/tests/functional/test_install_config.py
+++ b/tests/functional/test_install_config.py
@@ -112,7 +112,7 @@ def test_command_line_appends_correctly(script, data):
 
     """
     script.environ['PIP_FIND_LINKS'] = (
-        'https://test.pypi.org {data.find_links}'.format(**locals())
+        f'https://test.pypi.org {data.find_links}'
     )
     result = script.pip(
         'install', '-vvv', 'INITools', '--trusted-host',
@@ -298,7 +298,7 @@ def test_prompt_for_keyring_if_needed(script, data, cert_factory, auth_needed):
         response(str(data.packages / "simple-3.0.tar.gz")),
     ]
 
-    url = "https://{}:{}/simple".format(server.host, server.port)
+    url = f"https://{server.host}:{server.port}/simple"
 
     keyring_content = textwrap.dedent("""\
         import os
diff --git a/tests/functional/test_install_extras.py b/tests/functional/test_install_extras.py
index e96010072..de1ee3795 100644
--- a/tests/functional/test_install_extras.py
+++ b/tests/functional/test_install_extras.py
@@ -136,7 +136,7 @@ def test_install_special_extra(script):
     """))
 
     result = script.pip(
-        'install', '--no-index', '{pkga_path}[Hop_hOp-hoP]'.format(**locals()),
+        'install', '--no-index', f'{pkga_path}[Hop_hOp-hoP]',
         expect_error=True)
     assert (
         "Could not find a version that satisfies the requirement missing_pkg"
@@ -165,7 +165,7 @@ def test_install_extra_merging(script, data, extra_to_install, simple_version):
     """))
 
     result = script.pip_install_local(
-        '{pkga_path}{extra_to_install}'.format(**locals()),
+        f'{pkga_path}{extra_to_install}',
     )
 
     assert f'Successfully installed pkga-0.1 simple-{simple_version}' in result.stdout
diff --git a/tests/functional/test_install_reqs.py b/tests/functional/test_install_reqs.py
index 9c35aee83..d559e94be 100644
--- a/tests/functional/test_install_reqs.py
+++ b/tests/functional/test_install_reqs.py
@@ -68,11 +68,11 @@ def test_requirements_file(script, with_wheel):
 
     """
     other_lib_name, other_lib_version = 'anyjson', '0.3'
-    script.scratch_path.joinpath("initools-req.txt").write_text(textwrap.dedent("""\
+    script.scratch_path.joinpath("initools-req.txt").write_text(textwrap.dedent(f"""\
         INITools==0.2
         # and something else to test out:
         {other_lib_name}<={other_lib_version}
-        """.format(**locals())))
+        """))
     result = script.pip(
         'install', '-r', script.scratch_path / 'initools-req.txt'
     )
@@ -178,15 +178,14 @@ def test_multiple_requirements_files(script, tmpdir, with_wheel):
             other_lib_name
         ),
     )
-    script.scratch_path.joinpath(
-        "{other_lib_name}-req.txt".format(**locals())).write_text(
-            "{other_lib_name}<={other_lib_version}".format(**locals())
+    script.scratch_path.joinpath(f"{other_lib_name}-req.txt").write_text(
+            f"{other_lib_name}<={other_lib_version}"
     )
     result = script.pip(
         'install', '-r', script.scratch_path / 'initools-req.txt'
     )
     assert result.files_created[script.site_packages / other_lib_name].dir
-    fn = '{other_lib_name}-{other_lib_version}.dist-info'.format(**locals())
+    fn = f'{other_lib_name}-{other_lib_version}.dist-info'
     assert result.files_created[script.site_packages / fn].dir
     result.did_create(script.venv / 'src' / 'initools')
 
@@ -295,9 +294,9 @@ def test_wheel_user_with_prefix_in_pydistutils_cfg(
     user_cfg = os.path.join(os.path.expanduser('~'), user_filename)
     script.scratch_path.joinpath("bin").mkdir()
     with open(user_cfg, "w") as cfg:
-        cfg.write(textwrap.dedent("""
+        cfg.write(textwrap.dedent(f"""
             [install]
-            prefix={script.scratch_path}""".format(**locals())))
+            prefix={script.scratch_path}"""))
 
     result = script.pip(
         'install', '--user', '--no-index',
@@ -358,7 +357,7 @@ def test_constraints_local_editable_install_causes_error(
         assert 'Could not satisfy constraints' in result.stderr, str(result)
     else:
         # Because singlemodule only has 0.0.1 available.
-        assert 'No matching distribution found' in result.stderr, str(result)
+        assert 'Cannot install singlemodule 0.0.1' in result.stderr, str(result)
 
 
 @pytest.mark.network
@@ -387,7 +386,7 @@ def test_constraints_local_install_causes_error(
         assert 'Could not satisfy constraints' in result.stderr, str(result)
     else:
         # Because singlemodule only has 0.0.1 available.
-        assert 'No matching distribution found' in result.stderr, str(result)
+        assert 'Cannot install singlemodule 0.0.1' in result.stderr, str(result)
 
 
 def test_constraints_constrain_to_local_editable(
@@ -559,8 +558,7 @@ def test_install_distribution_duplicate_extras(script, data):
     package_name = to_install + "[bar]"
     with pytest.raises(AssertionError):
         result = script.pip_install_local(package_name, package_name)
-        expected = (
-            'Double requirement given: {package_name}'.format(**locals()))
+        expected = (f'Double requirement given: {package_name}')
         assert expected in result.stderr
 
 
@@ -571,7 +569,7 @@ def test_install_distribution_union_with_constraints(
 ):
     to_install = data.packages.joinpath("LocalExtras")
     script.scratch_path.joinpath("constraints.txt").write_text(
-        "{to_install}[bar]".format(**locals()))
+        f"{to_install}[bar]")
     result = script.pip_install_local(
         '-c', script.scratch_path / 'constraints.txt', to_install + '[baz]',
         allow_stderr_warning=True,
@@ -647,9 +645,7 @@ def test_install_unsupported_wheel_file(script, data):
     # Trying to install a local wheel with an incompatible version/type
     # should fail.
     path = data.packages.joinpath("simple.dist-0.1-py1-none-invalid.whl")
-    script.scratch_path.joinpath("wheel-file.txt").write_text(textwrap.dedent("""\
-        {path}
-        """.format(**locals())))
+    script.scratch_path.joinpath("wheel-file.txt").write_text(path + '\n')
     result = script.pip(
         'install', '-r', script.scratch_path / 'wheel-file.txt',
         expect_error=True,
diff --git a/tests/functional/test_install_upgrade.py b/tests/functional/test_install_upgrade.py
index 46aac8f9d..d7586cd58 100644
--- a/tests/functional/test_install_upgrade.py
+++ b/tests/functional/test_install_upgrade.py
@@ -421,8 +421,7 @@ class TestUpgradeDistributeToSetuptools:
 
     def prep_ve(self, script, version, pip_src, distribute=False):
         self.script = script
-        self.script.pip_install_local(
-            'virtualenv=={version}'.format(**locals()))
+        self.script.pip_install_local(f'virtualenv=={version}')
         args = ['virtualenv', self.script.scratch_path / 'VE']
         if distribute:
             args.insert(1, '--distribute')
diff --git a/tests/functional/test_install_user.py b/tests/functional/test_install_user.py
index c5d7acced..538556ed9 100644
--- a/tests/functional/test_install_user.py
+++ b/tests/functional/test_install_user.py
@@ -118,8 +118,7 @@ class Tests_UserSite:
         # usersite has 0.1
         # we still test for egg-info because no-binary implies setup.py install
         egg_info_folder = (
-            script.user_site /
-            'INITools-0.1-py{pyversion}.egg-info'.format(**globals())
+            script.user_site / f'INITools-0.1-py{pyversion}.egg-info'
         )
         initools_v3_file = (
             # file only in 0.3
@@ -146,8 +145,7 @@ class Tests_UserSite:
         # usersite has 0.1
         # we still test for egg-info because no-binary implies setup.py install
         egg_info_folder = (
-            script.user_site /
-            'INITools-0.1-py{pyversion}.egg-info'.format(**globals())
+            script.user_site / f'INITools-0.1-py{pyversion}.egg-info'
         )
         initools_folder = script.user_site / 'initools'
         result2.did_create(egg_info_folder)
@@ -156,7 +154,7 @@ class Tests_UserSite:
         # site still has 0.2 (can't look in result1; have to check)
         egg_info_folder = (
             script.base_path / script.site_packages /
-            'INITools-0.2-py{pyversion}.egg-info'.format(**globals())
+            f'INITools-0.2-py{pyversion}.egg-info'
         )
         initools_folder = script.base_path / script.site_packages / 'initools'
         assert isdir(egg_info_folder)
@@ -178,8 +176,7 @@ class Tests_UserSite:
         # usersite has 0.3.1
         # we still test for egg-info because no-binary implies setup.py install
         egg_info_folder = (
-            script.user_site /
-            'INITools-0.3.1-py{pyversion}.egg-info'.format(**globals())
+            script.user_site / f'INITools-0.3.1-py{pyversion}.egg-info'
         )
         initools_folder = script.user_site / 'initools'
         result2.did_create(egg_info_folder)
@@ -188,7 +185,7 @@ class Tests_UserSite:
         # site still has 0.2 (can't look in result1; have to check)
         egg_info_folder = (
             script.base_path / script.site_packages /
-            'INITools-0.2-py{pyversion}.egg-info'.format(**globals())
+            f'INITools-0.2-py{pyversion}.egg-info'
         )
         initools_folder = script.base_path / script.site_packages / 'initools'
         assert isdir(egg_info_folder), result2.stdout
@@ -213,8 +210,7 @@ class Tests_UserSite:
         # usersite has 0.1
         # we still test for egg-info because no-binary implies setup.py install
         egg_info_folder = (
-            script.user_site /
-            'INITools-0.1-py{pyversion}.egg-info'.format(**globals())
+            script.user_site / f'INITools-0.1-py{pyversion}.egg-info'
         )
         initools_v3_file = (
             # file only in 0.3
@@ -227,7 +223,7 @@ class Tests_UserSite:
         # site still has 0.2 (can't just look in result1; have to check)
         egg_info_folder = (
             script.base_path / script.site_packages /
-            'INITools-0.2-py{pyversion}.egg-info'.format(**globals())
+            f'INITools-0.2-py{pyversion}.egg-info'
         )
         initools_folder = script.base_path / script.site_packages / 'initools'
         assert isdir(egg_info_folder)
diff --git a/tests/functional/test_new_resolver.py b/tests/functional/test_new_resolver.py
index 4d2acbb23..6ed5776ee 100644
--- a/tests/functional/test_new_resolver.py
+++ b/tests/functional/test_new_resolver.py
@@ -16,23 +16,23 @@ from tests.lib.wheel import make_wheel
 
 def assert_installed(script, **kwargs):
     ret = script.pip('list', '--format=json')
-    installed = set(
+    installed = {
         (canonicalize_name(val['name']), val['version'])
         for val in json.loads(ret.stdout)
-    )
-    expected = set((canonicalize_name(k), v) for k, v in kwargs.items())
+    }
+    expected = {(canonicalize_name(k), v) for k, v in kwargs.items()}
     assert expected <= installed, f"{expected!r} not all in {installed!r}"
 
 
 def assert_not_installed(script, *args):
     ret = script.pip("list", "--format=json")
-    installed = set(
+    installed = {
         canonicalize_name(val["name"])
         for val in json.loads(ret.stdout)
-    )
+    }
     # None of the given names should be listed as installed, i.e. their
     # intersection should be empty.
-    expected = set(canonicalize_name(k) for k in args)
+    expected = {canonicalize_name(k) for k in args}
     assert not (expected & installed), f"{expected!r} contained in {installed!r}"
 
 
@@ -40,7 +40,7 @@ def assert_editable(script, *args):
     # This simply checks whether all of the listed packages have a
     # corresponding .egg-link file installed.
     # TODO: Implement a more rigorous way to test for editable installations.
-    egg_links = set(f"{arg}.egg-link" for arg in args)
+    egg_links = {f"{arg}.egg-link" for arg in args}
     assert egg_links <= set(os.listdir(script.site_packages_path)), \
         f"{args!r} not all found in {script.site_packages_path!r}"
 
@@ -687,7 +687,7 @@ def test_new_resolver_constraint_on_dependency(script):
 @pytest.mark.parametrize(
     "constraint_version, expect_error, message",
     [
-        ("1.0", True, "ERROR: No matching distribution found for foo 2.0"),
+        ("1.0", True, "Cannot install foo 2.0"),
         ("2.0", False, "Successfully installed foo-2.0"),
     ],
 )
diff --git a/tests/functional/test_new_resolver_errors.py b/tests/functional/test_new_resolver_errors.py
index 830acc764..b4d63a996 100644
--- a/tests/functional/test_new_resolver_errors.py
+++ b/tests/functional/test_new_resolver_errors.py
@@ -1,4 +1,6 @@
-from tests.lib import create_basic_wheel_for_package
+import sys
+
+from tests.lib import create_basic_wheel_for_package, create_test_package_with_setup
 
 
 def test_new_resolver_conflict_requirements_file(tmpdir, script):
@@ -24,3 +26,50 @@ def test_new_resolver_conflict_requirements_file(tmpdir, script):
 
     message = "package versions have conflicting dependencies"
     assert message in result.stderr, str(result)
+
+
+def test_new_resolver_conflict_constraints_file(tmpdir, script):
+    create_basic_wheel_for_package(script, "pkg", "1.0")
+
+    constrats_file = tmpdir.joinpath("constraints.txt")
+    constrats_file.write_text("pkg!=1.0")
+
+    result = script.pip(
+        "install",
+        "--no-cache-dir", "--no-index",
+        "--find-links", script.scratch_path,
+        "-c", constrats_file,
+        "pkg==1.0",
+        expect_error=True,
+    )
+
+    assert "ResolutionImpossible" in result.stderr, str(result)
+
+    message = "The user requested (constraint) pkg!=1.0"
+    assert message in result.stdout, str(result)
+
+
+def test_new_resolver_requires_python_error(script):
+    compatible_python = ">={0.major}.{0.minor}".format(sys.version_info)
+    incompatible_python = "<{0.major}.{0.minor}".format(sys.version_info)
+
+    pkga = create_test_package_with_setup(
+        script,
+        name="pkga",
+        version="1.0",
+        python_requires=compatible_python,
+    )
+    pkgb = create_test_package_with_setup(
+        script,
+        name="pkgb",
+        version="1.0",
+        python_requires=incompatible_python,
+    )
+
+    # This always fails because pkgb can never be satisfied.
+    result = script.pip("install", "--no-index", pkga, pkgb, expect_error=True)
+
+    # The error message should mention the Requires-Python: value causing the
+    # conflict, not the compatible one.
+    assert incompatible_python in result.stderr, str(result)
+    assert compatible_python not in result.stderr, str(result)
diff --git a/tests/functional/test_no_color.py b/tests/functional/test_no_color.py
index 48ed3ff78..3fd943f93 100644
--- a/tests/functional/test_no_color.py
+++ b/tests/functional/test_no_color.py
@@ -33,7 +33,7 @@ def test_no_color(script):
             pytest.skip("Unable to capture output using script: " + cmd)
 
         try:
-            with open("/tmp/pip-test-no-color.txt", "r") as output_file:
+            with open("/tmp/pip-test-no-color.txt") as output_file:
                 retval = output_file.read()
             return retval
         finally:
diff --git a/tests/functional/test_pep517.py b/tests/functional/test_pep517.py
index bcad47936..a747b8a07 100644
--- a/tests/functional/test_pep517.py
+++ b/tests/functional/test_pep517.py
@@ -3,7 +3,7 @@ from pip._vendor import toml
 
 from pip._internal.build_env import BuildEnvironment
 from pip._internal.req import InstallRequirement
-from tests.lib import make_test_finder, path_to_url, windows_workaround_7667
+from tests.lib import make_test_finder, path_to_url
 
 
 def make_project(tmpdir, requires=None, backend=None, backend_path=None):
@@ -255,7 +255,6 @@ def test_explicit_setuptools_backend(script, tmpdir, data, common_wheels):
 
 
 @pytest.mark.network
-@windows_workaround_7667
 def test_pep517_and_build_options(script, tmpdir, data, common_wheels):
     """Backend generated requirements are installed in the build env"""
     project_dir, name = make_pyproject_with_setup(tmpdir)
diff --git a/tests/functional/test_uninstall_user.py b/tests/functional/test_uninstall_user.py
index 2dbf032ac..7a0006d47 100644
--- a/tests/functional/test_uninstall_user.py
+++ b/tests/functional/test_uninstall_user.py
@@ -46,7 +46,7 @@ class Tests_UninstallUserSite:
         # keep checking for egg-info because no-binary implies setup.py install
         egg_info_folder = (
             script.base_path / script.site_packages /
-            'pip_test_package-0.1-py{pyversion}.egg-info'.format(**globals())
+            f'pip_test_package-0.1-py{pyversion}.egg-info'
         )
         assert isdir(egg_info_folder)
 
diff --git a/tests/functional/test_vcs_bazaar.py b/tests/functional/test_vcs_bazaar.py
index ad24d73d5..57fee51e7 100644
--- a/tests/functional/test_vcs_bazaar.py
+++ b/tests/functional/test_vcs_bazaar.py
@@ -64,7 +64,7 @@ def test_export_rev(script, tmpdir):
     url = hide_url('bzr+' + _test_path_to_file_url(source_dir) + '@1')
     Bazaar().export(str(export_dir), url=url)
 
-    with open(export_dir / 'test_file', 'r') as f:
+    with open(export_dir / 'test_file') as f:
         assert f.read() == 'something initial'
 
 
diff --git a/tests/functional/test_wheel.py b/tests/functional/test_wheel.py
index c5e168039..da040c307 100644
--- a/tests/functional/test_wheel.py
+++ b/tests/functional/test_wheel.py
@@ -49,8 +49,7 @@ def test_pip_wheel_success(script, data):
         'wheel', '--no-index', '-f', data.find_links,
         'simple==3.0',
     )
-    wheel_file_name = 'simple-3.0-py{pyversion[0]}-none-any.whl' \
-        .format(**globals())
+    wheel_file_name = f'simple-3.0-py{pyversion[0]}-none-any.whl'
     wheel_file_path = script.scratch / wheel_file_name
     assert re.search(
         r"Created wheel for simple: "
@@ -70,8 +69,7 @@ def test_pip_wheel_build_cache(script, data):
         'wheel', '--no-index', '-f', data.find_links,
         'simple==3.0',
     )
-    wheel_file_name = 'simple-3.0-py{pyversion[0]}-none-any.whl' \
-        .format(**globals())
+    wheel_file_name = f'simple-3.0-py{pyversion[0]}-none-any.whl'
     wheel_file_path = script.scratch / wheel_file_name
     result.did_create(wheel_file_path)
     assert "Successfully built simple" in result.stdout, result.stdout
@@ -148,8 +146,7 @@ def test_pip_wheel_builds_editable_deps(script, data):
         'wheel', '--no-index', '-f', data.find_links,
         '-e', editable_path
     )
-    wheel_file_name = 'simple-1.0-py{pyversion[0]}-none-any.whl' \
-        .format(**globals())
+    wheel_file_name = f'simple-1.0-py{pyversion[0]}-none-any.whl'
     wheel_file_path = script.scratch / wheel_file_name
     result.did_create(wheel_file_path)
 
@@ -163,8 +160,7 @@ def test_pip_wheel_builds_editable(script, data):
         'wheel', '--no-index', '-f', data.find_links,
         '-e', editable_path
     )
-    wheel_file_name = 'simplewheel-1.0-py{pyversion[0]}-none-any.whl' \
-        .format(**globals())
+    wheel_file_name = f'simplewheel-1.0-py{pyversion[0]}-none-any.whl'
     wheel_file_path = script.scratch / wheel_file_name
     result.did_create(wheel_file_path)
 
@@ -213,8 +209,7 @@ def test_pip_wheel_fail(script, data):
         'wheelbroken==0.1',
         expect_error=True,
     )
-    wheel_file_name = 'wheelbroken-0.1-py{pyversion[0]}-none-any.whl' \
-        .format(**globals())
+    wheel_file_name = f'wheelbroken-0.1-py{pyversion[0]}-none-any.whl'
     wheel_file_path = script.scratch / wheel_file_name
     result.did_not_create(wheel_file_path)
     assert "FakeError" in result.stderr, result.stderr
@@ -236,7 +231,7 @@ def test_no_clean_option_blocks_cleaning_after_wheel(
     build = script.venv_path / 'build'
     result = script.pip(
         'wheel', '--no-clean', '--no-index', '--build', build,
-        '--find-links={data.find_links}'.format(**locals()),
+        f'--find-links={data.find_links}',
         'simple',
         expect_temp=True,
         # TODO: allow_stderr_warning is used for the --build deprecation,
@@ -260,8 +255,7 @@ def test_pip_wheel_source_deps(script, data):
         'wheel', '--no-index', '-f', data.find_links,
         'requires_source',
     )
-    wheel_file_name = 'source-1.0-py{pyversion[0]}-none-any.whl' \
-        .format(**globals())
+    wheel_file_name = f'source-1.0-py{pyversion[0]}-none-any.whl'
     wheel_file_path = script.scratch / wheel_file_name
     result.did_create(wheel_file_path)
     assert "Successfully built source" in result.stdout, result.stdout
@@ -278,8 +272,7 @@ def test_wheel_package_with_latin1_setup(script, data):
 def test_pip_wheel_with_pep518_build_reqs(script, data, common_wheels):
     result = script.pip('wheel', '--no-index', '-f', data.find_links,
                         '-f', common_wheels, 'pep518==3.0',)
-    wheel_file_name = 'pep518-3.0-py{pyversion[0]}-none-any.whl' \
-        .format(**globals())
+    wheel_file_name = f'pep518-3.0-py{pyversion[0]}-none-any.whl'
     wheel_file_path = script.scratch / wheel_file_name
     result.did_create(wheel_file_path)
     assert "Successfully built pep518" in result.stdout, result.stdout
@@ -292,8 +285,7 @@ def test_pip_wheel_with_pep518_build_reqs_no_isolation(script, data):
         'wheel', '--no-index', '-f', data.find_links,
         '--no-build-isolation', 'pep518==3.0',
     )
-    wheel_file_name = 'pep518-3.0-py{pyversion[0]}-none-any.whl' \
-        .format(**globals())
+    wheel_file_name = f'pep518-3.0-py{pyversion[0]}-none-any.whl'
     wheel_file_path = script.scratch / wheel_file_name
     result.did_create(wheel_file_path)
     assert "Successfully built pep518" in result.stdout, result.stdout
@@ -339,8 +331,7 @@ def test_pep517_wheels_are_not_confused_with_other_files(script, tmpdir, data):
     result = script.pip('wheel', pkg_to_wheel, '-w', script.scratch_path)
     assert "Installing build dependencies" in result.stdout, result.stdout
 
-    wheel_file_name = 'withpyproject-0.0.1-py{pyversion[0]}-none-any.whl' \
-        .format(**globals())
+    wheel_file_name = f'withpyproject-0.0.1-py{pyversion[0]}-none-any.whl'
     wheel_file_path = script.scratch / wheel_file_name
     result.did_create(wheel_file_path)
 
@@ -354,7 +345,6 @@ def test_legacy_wheels_are_not_confused_with_other_files(script, tmpdir, data):
     result = script.pip('wheel', pkg_to_wheel, '-w', script.scratch_path)
     assert "Installing build dependencies" not in result.stdout, result.stdout
 
-    wheel_file_name = 'simplewheel-1.0-py{pyversion[0]}-none-any.whl' \
-        .format(**globals())
+    wheel_file_name = f'simplewheel-1.0-py{pyversion[0]}-none-any.whl'
     wheel_file_path = script.scratch / wheel_file_name
     result.did_create(wheel_file_path)
diff --git a/tests/functional/test_yaml.py b/tests/functional/test_yaml.py
deleted file mode 100644
index ba7b17531..000000000
--- a/tests/functional/test_yaml.py
+++ /dev/null
@@ -1,203 +0,0 @@
-"""
-Tests for the resolver
-"""
-
-import os
-import re
-import sys
-
-import pytest
-import yaml
-
-from tests.lib import DATA_DIR, create_basic_wheel_for_package, path_to_url
-
-
-def generate_yaml_tests(directory):
-    """
-    Generate yaml test cases from the yaml files in the given directory
-    """
-    for yml_file in directory.glob("*.yml"):
-        data = yaml.safe_load(yml_file.read_text())
-        assert "cases" in data, "A fixture needs cases to be used in testing"
-
-        # Strip the parts of the directory to only get a name without
-        # extension and resolver directory
-        base_name = str(yml_file)[len(str(directory)) + 1:-4]
-
-        base = data.get("base", {})
-        cases = data["cases"]
-
-        for resolver in 'legacy', '2020-resolver':
-            for i, case_template in enumerate(cases):
-                case = base.copy()
-                case.update(case_template)
-
-                case[":name:"] = base_name
-                if len(cases) > 1:
-                    case[":name:"] += "-" + str(i)
-                case[":name:"] += "*" + resolver
-                case[":resolver:"] = resolver
-
-                skip = case.pop("skip", False)
-                assert skip in [False, True, 'legacy', '2020-resolver']
-                if skip is True or skip == resolver:
-                    case = pytest.param(case, marks=pytest.mark.xfail)
-
-                yield case
-
-
-def id_func(param):
-    """
-    Give a nice parameter name to the generated function parameters
-    """
-    if isinstance(param, dict) and ":name:" in param:
-        return param[":name:"]
-
-    retval = str(param)
-    if len(retval) > 25:
-        retval = retval[:20] + "..." + retval[-2:]
-    return retval
-
-
-def convert_to_dict(string):
-
-    def stripping_split(my_str, splitwith, count=None):
-        if count is None:
-            return [x.strip() for x in my_str.strip().split(splitwith)]
-        else:
-            return [x.strip() for x in my_str.strip().split(splitwith, count)]
-
-    parts = stripping_split(string, ";")
-
-    retval = {}
-    retval["depends"] = []
-    retval["extras"] = {}
-
-    retval["name"], retval["version"] = stripping_split(parts[0], " ")
-
-    for part in parts[1:]:
-        verb, args_str = stripping_split(part, " ", 1)
-        assert verb in ["depends"], f"Unknown verb {verb!r}"
-
-        retval[verb] = stripping_split(args_str, ",")
-
-    return retval
-
-
-def handle_request(script, action, requirement, options, resolver_variant):
-    if action == 'install':
-        args = ['install']
-        if resolver_variant == "legacy":
-            args.append("--use-deprecated=legacy-resolver")
-        args.extend(["--no-index", "--find-links",
-                     path_to_url(script.scratch_path)])
-    elif action == 'uninstall':
-        args = ['uninstall', '--yes']
-    else:
-        raise f"Did not excpet action: {action!r}"
-
-    if isinstance(requirement, str):
-        args.append(requirement)
-    elif isinstance(requirement, list):
-        args.extend(requirement)
-    else:
-        raise f"requirement neither str nor list {requirement!r}"
-
-    args.extend(options)
-    args.append("--verbose")
-
-    result = script.pip(*args,
-                        allow_stderr_error=True,
-                        allow_stderr_warning=True,
-                        allow_error=True)
-
-    # Check which packages got installed
-    state = []
-    for path in os.listdir(script.site_packages_path):
-        if path.endswith(".dist-info"):
-            name, version = (
-                os.path.basename(path)[:-len(".dist-info")]
-            ).rsplit("-", 1)
-            # TODO: information about extras.
-            state.append(" ".join((name, version)))
-
-    return {"result": result, "state": sorted(state)}
-
-
-def check_error(error, result):
-    return_code = error.get('code')
-    if return_code:
-        assert result.returncode == return_code
-
-    stderr = error.get('stderr')
-    if not stderr:
-        return
-
-    if isinstance(stderr, str):
-        patters = [stderr]
-    elif isinstance(stderr, list):
-        patters = stderr
-    else:
-        raise "string or list expected, found %r" % stderr
-
-    for patter in patters:
-        match = re.search(patter, result.stderr, re.I)
-        assert match, 'regex %r not found in stderr: %r' % (
-            stderr, result.stderr)
-
-
-@pytest.mark.yaml
-@pytest.mark.parametrize(
-    "case", generate_yaml_tests(DATA_DIR.parent / "yaml"), ids=id_func
-)
-def test_yaml_based(script, case):
-    available = case.get("available", [])
-    requests = case.get("request", [])
-    responses = case.get("response", [])
-
-    assert len(requests) == len(responses), (
-        "Expected requests and responses counts to be same"
-    )
-
-    # Create a custom index of all the packages that are supposed to be
-    # available
-    # XXX: This doesn't work because this isn't making an index of files.
-    for package in available:
-        if isinstance(package, str):
-            package = convert_to_dict(package)
-
-        assert isinstance(package, dict), "Needs to be a dictionary"
-
-        create_basic_wheel_for_package(script, **package)
-
-    # use scratch path for index
-    for request, response in zip(requests, responses):
-
-        for action in 'install', 'uninstall':
-            if action in request:
-                break
-        else:
-            raise f"Unsupported request {request!r}"
-
-        # Perform the requested action
-        effect = handle_request(script, action,
-                                request[action],
-                                request.get('options', '').split(),
-                                resolver_variant=case[':resolver:'])
-        result = effect['result']
-
-        if 0:  # for analyzing output easier
-            with open(DATA_DIR.parent / "yaml" /
-                      case[':name:'].replace('*', '-'), 'w') as fo:
-                fo.write("=== RETURNCODE = %d\n" % result.returncode)
-                fo.write("=== STDERR ===:\n%s\n" % result.stderr)
-
-        if 'state' in response:
-            assert effect['state'] == (response['state'] or []), str(result)
-
-        error = response.get('error')
-        if error and case[":resolver:"] == 'new' and sys.platform != 'win32':
-            # Note: we currently skip running these tests on Windows, as they
-            # were failing due to different error codes.  There should not
-            # be a reason for not running these this check on Windows.
-            check_error(error, result)
diff --git a/tests/lib/__init__.py b/tests/lib/__init__.py
index 059de3ba1..cbb7b7b6d 100644
--- a/tests/lib/__init__.py
+++ b/tests/lib/__init__.py
@@ -31,7 +31,6 @@ DATA_DIR = Path(__file__).parent.parent.joinpath("data").resolve()
 SRC_DIR = Path(__file__).resolve().parent.parent.parent
 
 pyversion = get_major_minor_version()
-pyversion_tuple = sys.version_info
 
 CURRENT_PY_VERSION_INFO = sys.version_info[:3]
 
@@ -49,12 +48,12 @@ def path_to_url(path):
     path = os.path.normpath(os.path.abspath(path))
     drive, path = os.path.splitdrive(path)
     filepath = path.split(os.path.sep)
-    url = '/'.join(filepath)
+    url = "/".join(filepath)
     if drive:
         # Note: match urllib.request.pathname2url's
         # behavior: uppercase the drive letter.
-        return 'file:///' + drive.upper() + url
-    return 'file://' + url
+        return "file:///" + drive.upper() + url
+    return "file://" + url
 
 
 def _test_path_to_file_url(path):
@@ -64,12 +63,11 @@ def _test_path_to_file_url(path):
     Args:
       path: a tests.lib.path.Path object.
     """
-    return 'file://' + path.resolve().replace('\\', '/')
+    return "file://" + path.resolve().replace("\\", "/")
 
 
 def create_file(path, contents=None):
-    """Create a file on the path, with the given contents
-    """
+    """Create a file on the path, with the given contents"""
     from pip._internal.utils.misc import ensure_dir
 
     ensure_dir(os.path.dirname(path))
@@ -95,7 +93,7 @@ def make_test_search_scope(
 def make_test_link_collector(
     find_links=None,  # type: Optional[List[str]]
     index_urls=None,  # type: Optional[List[str]]
-    session=None,     # type: Optional[PipSession]
+    session=None,  # type: Optional[PipSession]
 ):
     # type: (...) -> LinkCollector
     """
@@ -116,8 +114,8 @@ def make_test_finder(
     find_links=None,  # type: Optional[List[str]]
     index_urls=None,  # type: Optional[List[str]]
     allow_all_prereleases=False,  # type: bool
-    session=None,                 # type: Optional[PipSession]
-    target_python=None,           # type: Optional[TargetPython]
+    session=None,  # type: Optional[PipSession]
+    target_python=None,  # type: Optional[TargetPython]
 ):
     # type: (...) -> PackageFinder
     """
@@ -221,51 +219,59 @@ class TestFailure(AssertionError):
     """
     An "assertion" failed during testing.
     """
+
     pass
 
 
 class TestPipResult:
-
     def __init__(self, impl, verbose=False):
         self._impl = impl
 
         if verbose:
             print(self.stdout)
             if self.stderr:
-                print('======= stderr ========')
+                print("======= stderr ========")
                 print(self.stderr)
-                print('=======================')
+                print("=======================")
 
     def __getattr__(self, attr):
         return getattr(self._impl, attr)
 
-    if sys.platform == 'win32':
+    if sys.platform == "win32":
 
         @property
         def stdout(self):
-            return self._impl.stdout.replace('\r\n', '\n')
+            return self._impl.stdout.replace("\r\n", "\n")
 
         @property
         def stderr(self):
-            return self._impl.stderr.replace('\r\n', '\n')
+            return self._impl.stderr.replace("\r\n", "\n")
 
         def __str__(self):
-            return str(self._impl).replace('\r\n', '\n')
+            return str(self._impl).replace("\r\n", "\n")
+
     else:
         # Python doesn't automatically forward __str__ through __getattr__
 
         def __str__(self):
             return str(self._impl)
 
-    def assert_installed(self, pkg_name, editable=True, with_files=None,
-                         without_files=None, without_egg_link=False,
-                         use_user_site=False, sub_dir=False):
+    def assert_installed(
+        self,
+        pkg_name,
+        editable=True,
+        with_files=None,
+        without_files=None,
+        without_egg_link=False,
+        use_user_site=False,
+        sub_dir=False,
+    ):
         with_files = with_files or []
         without_files = without_files or []
         e = self.test_env
 
         if editable:
-            pkg_dir = e.venv / 'src' / pkg_name.lower()
+            pkg_dir = e.venv / "src" / pkg_name.lower()
             # If package was installed in a sub directory
             if sub_dir:
                 pkg_dir = pkg_dir / sub_dir
@@ -274,78 +280,76 @@ class TestPipResult:
             pkg_dir = e.site_packages / pkg_name
 
         if use_user_site:
-            egg_link_path = e.user_site / pkg_name + '.egg-link'
+            egg_link_path = e.user_site / pkg_name + ".egg-link"
         else:
-            egg_link_path = e.site_packages / pkg_name + '.egg-link'
+            egg_link_path = e.site_packages / pkg_name + ".egg-link"
 
         if without_egg_link:
             if egg_link_path in self.files_created:
                 raise TestFailure(
-                    'unexpected egg link file created: '
-                    '{egg_link_path!r}\n{self}'
-                    .format(**locals())
+                    "unexpected egg link file created: " f"{egg_link_path!r}\n{self}"
                 )
         else:
             if egg_link_path not in self.files_created:
                 raise TestFailure(
-                    'expected egg link file missing: '
-                    '{egg_link_path!r}\n{self}'
-                    .format(**locals())
+                    "expected egg link file missing: " f"{egg_link_path!r}\n{self}"
                 )
 
             egg_link_file = self.files_created[egg_link_path]
-            egg_link_contents = egg_link_file.bytes.replace(os.linesep, '\n')
+            egg_link_contents = egg_link_file.bytes.replace(os.linesep, "\n")
 
             # FIXME: I don't understand why there's a trailing . here
-            if not (egg_link_contents.endswith('\n.') and
-                    egg_link_contents[:-2].endswith(pkg_dir)):
-                raise TestFailure(textwrap.dedent(
-                    '''\
-                    Incorrect egg_link file {egg_link_file!r}
-                    Expected ending: {expected_ending!r}
-                    ------- Actual contents -------
-                    {egg_link_contents!r}
-                    -------------------------------'''.format(
-                        expected_ending=pkg_dir + '\n.',
-                        **locals())
-                ))
+            if not (
+                egg_link_contents.endswith("\n.")
+                and egg_link_contents[:-2].endswith(pkg_dir)
+            ):
+                expected_ending = pkg_dir + "\n."
+                raise TestFailure(
+                    textwrap.dedent(
+                        f"""
+                        Incorrect egg_link file {egg_link_file!r}
+                        Expected ending: {expected_ending!r}
+                        ------- Actual contents -------
+                        {egg_link_contents!r}
+                        -------------------------------
+                        """
+                    ).strip()
+                )
 
         if use_user_site:
-            pth_file = e.user_site / 'easy-install.pth'
+            pth_file = e.user_site / "easy-install.pth"
         else:
-            pth_file = e.site_packages / 'easy-install.pth'
+            pth_file = e.site_packages / "easy-install.pth"
 
         if (pth_file in self.files_updated) == without_egg_link:
-            raise TestFailure(
-                '{pth_file} unexpectedly {maybe}updated by install'.format(
-                    maybe=not without_egg_link and 'not ' or '',
-                    **locals()))
+            maybe = "" if without_egg_link else "not "
+            raise TestFailure(f"{pth_file} unexpectedly {maybe}updated by install")
 
         if (pkg_dir in self.files_created) == (curdir in without_files):
-            raise TestFailure(textwrap.dedent('''\
-            expected package directory {pkg_dir!r} {maybe}to be created
-            actually created:
-            {files}
-            ''').format(
-                pkg_dir=pkg_dir,
-                maybe=curdir in without_files and 'not ' or '',
-                files=sorted(self.files_created.keys()),
-            ))
+            maybe = "not " if curdir in without_files else ""
+            files = sorted(self.files_created)
+            raise TestFailure(
+                textwrap.dedent(
+                    f"""
+                    expected package directory {pkg_dir!r} {maybe}to be created
+                    actually created:
+                    {files}
+                    """
+                )
+            )
 
         for f in with_files:
             normalized_path = os.path.normpath(pkg_dir / f)
             if normalized_path not in self.files_created:
                 raise TestFailure(
-                    'Package directory {pkg_dir!r} missing '
-                    'expected content {f!r}'.format(**locals())
+                    f"Package directory {pkg_dir!r} missing " f"expected content {f!r}"
                 )
 
         for f in without_files:
             normalized_path = os.path.normpath(pkg_dir / f)
             if normalized_path in self.files_created:
                 raise TestFailure(
-                    'Package directory {pkg_dir!r} has unexpected content {f}'
-                    .format(**locals())
+                    f"Package directory {pkg_dir!r} has unexpected content {f}"
                 )
 
     def did_create(self, path, message=None):
@@ -362,8 +366,7 @@ class TestPipResult:
 
 
 def _one_or_both(a, b):
-    """Returns f"{a}\n{b}" if a is truthy, else returns str(b).
-    """
+    """Returns f"{a}\n{b}" if a is truthy, else returns str(b)."""
     if not a:
         return str(b)
 
@@ -374,15 +377,19 @@ def make_check_stderr_message(stderr, line, reason):
     """
     Create an exception message to use inside check_stderr().
     """
-    return dedent("""\
+    return dedent(
+        """\
     {reason}:
      Caused by line: {line!r}
      Complete stderr: {stderr}
-    """).format(stderr=stderr, line=line, reason=reason)
+    """
+    ).format(stderr=stderr, line=line, reason=reason)
 
 
 def _check_stderr(
-    stderr, allow_stderr_warning, allow_stderr_error,
+    stderr,
+    allow_stderr_warning,
+    allow_stderr_error,
 ):
     """
     Check the given stderr for logged warnings and errors.
@@ -403,29 +410,29 @@ def _check_stderr(
         # sent directly to stderr and so bypass any configured log formatter.
         # The "--- Logging error ---" string is used in Python 3.4+, and
         # "Logged from file " is used in Python 2.
-        if (line.startswith('--- Logging error ---') or
-                line.startswith('Logged from file ')):
-            reason = 'stderr has a logging error, which is never allowed'
+        if line.startswith("--- Logging error ---") or line.startswith(
+            "Logged from file "
+        ):
+            reason = "stderr has a logging error, which is never allowed"
             msg = make_check_stderr_message(stderr, line=line, reason=reason)
             raise RuntimeError(msg)
         if allow_stderr_error:
             continue
 
-        if line.startswith('ERROR: '):
+        if line.startswith("ERROR: "):
             reason = (
-                'stderr has an unexpected error '
-                '(pass allow_stderr_error=True to permit this)'
+                "stderr has an unexpected error "
+                "(pass allow_stderr_error=True to permit this)"
             )
             msg = make_check_stderr_message(stderr, line=line, reason=reason)
             raise RuntimeError(msg)
         if allow_stderr_warning:
             continue
 
-        if (line.startswith('WARNING: ') or
-                line.startswith(DEPRECATION_MSG_PREFIX)):
+        if line.startswith("WARNING: ") or line.startswith(DEPRECATION_MSG_PREFIX):
             reason = (
-                'stderr has an unexpected warning '
-                '(pass allow_stderr_warning=True to permit this)'
+                "stderr has an unexpected warning "
+                "(pass allow_stderr_warning=True to permit this)"
             )
             msg = make_check_stderr_message(stderr, line=line, reason=reason)
             raise RuntimeError(msg)
@@ -445,7 +452,7 @@ class PipTestEnvironment(TestFileEnvironment):
     # a name of the form xxxx_path and relative paths have a name that
     # does not end in '_path'.
 
-    exe = sys.platform == 'win32' and '.exe' or ''
+    exe = sys.platform == "win32" and ".exe" or ""
     verbose = False
 
     def __init__(self, base_path, *args, virtualenv, pip_expect_warning=None, **kwargs):
@@ -461,16 +468,16 @@ class PipTestEnvironment(TestFileEnvironment):
         self.user_base_path = self.venv_path.joinpath("user")
         self.user_site_path = self.venv_path.joinpath(
             "user",
-            site.USER_SITE[len(site.USER_BASE) + 1:],
+            site.USER_SITE[len(site.USER_BASE) + 1 :],
         )
-        if sys.platform == 'win32':
+        if sys.platform == "win32":
             if sys.version_info >= (3, 5):
                 scripts_base = Path(
-                    os.path.normpath(self.user_site_path.joinpath('..'))
+                    os.path.normpath(self.user_site_path.joinpath(".."))
                 )
             else:
                 scripts_base = self.user_base_path
-            self.user_bin_path = scripts_base.joinpath('Scripts')
+            self.user_bin_path = scripts_base.joinpath("Scripts")
         else:
             self.user_bin_path = self.user_base_path.joinpath(
                 os.path.relpath(self.bin_path, self.venv_path)
@@ -502,12 +509,21 @@ class PipTestEnvironment(TestFileEnvironment):
         super().__init__(base_path, *args, **kwargs)
 
         # Expand our absolute path directories into relative
-        for name in ["base", "venv", "bin", "lib", "site_packages",
-                     "user_base", "user_site", "user_bin", "scratch"]:
-            real_name = "{name}_path".format(**locals())
-            relative_path = Path(os.path.relpath(
-                getattr(self, real_name), self.base_path
-            ))
+        for name in [
+            "base",
+            "venv",
+            "bin",
+            "lib",
+            "site_packages",
+            "user_base",
+            "user_site",
+            "user_bin",
+            "scratch",
+        ]:
+            real_name = f"{name}_path"
+            relative_path = Path(
+                os.path.relpath(getattr(self, real_name), self.base_path)
+            )
             setattr(self, name, relative_path)
 
         # Make sure temp_path is a Path object
@@ -521,7 +537,7 @@ class PipTestEnvironment(TestFileEnvironment):
         self.user_site_path.joinpath("easy-install.pth").touch()
 
     def _ignore_file(self, fn):
-        if fn.endswith('__pycache__') or fn.endswith(".pyc"):
+        if fn.endswith("__pycache__") or fn.endswith(".pyc"):
             result = True
         else:
             result = super()._ignore_file(fn)
@@ -532,7 +548,7 @@ class PipTestEnvironment(TestFileEnvironment):
         # results because of venv `lib64 -> lib/` symlink on Linux.
         full = os.path.join(self.base_path, path)
         if os.path.isdir(full) and os.path.islink(full):
-            if not self.temp_path or path != 'tmp':
+            if not self.temp_path or path != "tmp":
                 result[path] = FoundDir(self.base_path, path)
         else:
             super()._find_traverse(path, result)
@@ -567,42 +583,40 @@ class PipTestEnvironment(TestFileEnvironment):
             compatibility.
         """
         if self.verbose:
-            print('>> running {args} {kw}'.format(**locals()))
+            print(f">> running {args} {kw}")
 
         assert not cwd or not run_from, "Don't use run_from; it's going away"
         cwd = cwd or run_from or self.cwd
-        if sys.platform == 'win32':
+        if sys.platform == "win32":
             # Partial fix for ScriptTest.run using `shell=True` on Windows.
-            args = [str(a).replace('^', '^^').replace('&', '^&') for a in args]
+            args = [str(a).replace("^", "^^").replace("&", "^&") for a in args]
 
         if allow_error:
-            kw['expect_error'] = True
+            kw["expect_error"] = True
 
         # Propagate default values.
-        expect_error = kw.get('expect_error')
+        expect_error = kw.get("expect_error")
         if expect_error:
             # Then default to allowing logged errors.
             if allow_stderr_error is not None and not allow_stderr_error:
                 raise RuntimeError(
-                    'cannot pass allow_stderr_error=False with '
-                    'expect_error=True'
+                    "cannot pass allow_stderr_error=False with " "expect_error=True"
                 )
             allow_stderr_error = True
 
-        elif kw.get('expect_stderr'):
+        elif kw.get("expect_stderr"):
             # Then default to allowing logged warnings.
             if allow_stderr_warning is not None and not allow_stderr_warning:
                 raise RuntimeError(
-                    'cannot pass allow_stderr_warning=False with '
-                    'expect_stderr=True'
+                    "cannot pass allow_stderr_warning=False with " "expect_stderr=True"
                 )
             allow_stderr_warning = True
 
         if allow_stderr_error:
             if allow_stderr_warning is not None and not allow_stderr_warning:
                 raise RuntimeError(
-                    'cannot pass allow_stderr_warning=False with '
-                    'allow_stderr_error=True'
+                    "cannot pass allow_stderr_warning=False with "
+                    "allow_stderr_error=True"
                 )
 
         # Default values if not set.
@@ -613,7 +627,7 @@ class PipTestEnvironment(TestFileEnvironment):
 
         # Pass expect_stderr=True to allow any stderr.  We do this because
         # we do our checking of stderr further on in check_stderr().
-        kw['expect_stderr'] = True
+        kw["expect_stderr"] = True
         result = super().run(cwd=cwd, *args, **kw)
 
         if expect_error and not allow_error:
@@ -622,7 +636,8 @@ class PipTestEnvironment(TestFileEnvironment):
                 raise AssertionError("Script passed unexpectedly.")
 
         _check_stderr(
-            result.stderr, allow_stderr_error=allow_stderr_error,
+            result.stderr,
+            allow_stderr_error=allow_stderr_error,
             allow_stderr_warning=allow_stderr_warning,
         )
 
@@ -631,24 +646,27 @@ class PipTestEnvironment(TestFileEnvironment):
     def pip(self, *args, use_module=True, **kwargs):
         __tracebackhide__ = True
         if self.pip_expect_warning:
-            kwargs['allow_stderr_warning'] = True
+            kwargs["allow_stderr_warning"] = True
         if use_module:
-            exe = 'python'
-            args = ('-m', 'pip') + args
+            exe = "python"
+            args = ("-m", "pip") + args
         else:
-            exe = 'pip'
+            exe = "pip"
         return self.run(exe, *args, **kwargs)
 
     def pip_install_local(self, *args, **kwargs):
         return self.pip(
-            "install", "--no-index",
-            "--find-links", path_to_url(os.path.join(DATA_DIR, "packages")),
-            *args, **kwargs
+            "install",
+            "--no-index",
+            "--find-links",
+            path_to_url(os.path.join(DATA_DIR, "packages")),
+            *args,
+            **kwargs,
         )
 
     def easy_install(self, *args, **kwargs):
-        args = ('-m', 'easy_install') + args
-        return self.run('python', *args, **kwargs)
+        args = ("-m", "easy_install") + args
+        return self.run("python", *args, **kwargs)
 
 
 # FIXME ScriptTest does something similar, but only within a single
@@ -686,15 +704,15 @@ def diff_states(start, end, ignore=None):
         prefix = prefix.rstrip(os.path.sep) + os.path.sep
         return path.startswith(prefix)
 
-    start_keys = {k for k in start.keys()
-                  if not any([prefix_match(k, i) for i in ignore])}
-    end_keys = {k for k in end.keys()
-                if not any([prefix_match(k, i) for i in ignore])}
+    start_keys = {
+        k for k in start.keys() if not any([prefix_match(k, i) for i in ignore])
+    }
+    end_keys = {k for k in end.keys() if not any([prefix_match(k, i) for i in ignore])}
     deleted = {k: start[k] for k in start_keys.difference(end_keys)}
     created = {k: end[k] for k in end_keys.difference(start_keys)}
     updated = {}
     for k in start_keys.intersection(end_keys):
-        if (start[k].size != end[k].size):
+        if start[k].size != end[k].size:
             updated[k] = end[k]
     return dict(deleted=deleted, created=created, updated=updated)
 
@@ -723,8 +741,10 @@ def assert_all_changes(start_state, end_state, expected_changes):
 
     diff = diff_states(start_files, end_files, ignore=expected_changes)
     if list(diff.values()) != [{}, {}, {}]:
-        raise TestFailure('Unexpected changes:\n' + '\n'.join(
-            [k + ': ' + ', '.join(v.keys()) for k, v in diff.items()]))
+        raise TestFailure(
+            "Unexpected changes:\n"
+            + "\n".join([k + ": " + ", ".join(v.keys()) for k, v in diff.items()])
+        )
 
     # Don't throw away this potentially useful information
     return diff
@@ -735,14 +755,16 @@ def _create_main_file(dir_path, name=None, output=None):
     Create a module with a main() function that prints the given output.
     """
     if name is None:
-        name = 'version_pkg'
+        name = "version_pkg"
     if output is None:
-        output = '0.1'
-    text = textwrap.dedent("""\
-    def main():
-        print({!r})
-    """.format(output))
-    filename = f'{name}.py'
+        output = "0.1"
+    text = textwrap.dedent(
+        f"""
+        def main():
+            print({output!r})
+        """
+    )
+    filename = f"{name}.py"
     dir_path.joinpath(filename).write_text(text)
 
 
@@ -762,7 +784,7 @@ def _git_commit(
       message: an optional commit message.
     """
     if message is None:
-        message = 'test commit'
+        message = "test commit"
 
     args = []
 
@@ -773,151 +795,186 @@ def _git_commit(
         args.append("--all")
 
     new_args = [
-        'git', 'commit', '-q', '--author', 'pip ',
+        "git",
+        "commit",
+        "-q",
+        "--author",
+        "pip ",
     ]
     new_args.extend(args)
-    new_args.extend(['-m', message])
+    new_args.extend(["-m", message])
     env_or_script.run(*new_args, cwd=repo_dir)
 
 
-def _vcs_add(script, version_pkg_path, vcs='git'):
-    if vcs == 'git':
-        script.run('git', 'init', cwd=version_pkg_path)
-        script.run('git', 'add', '.', cwd=version_pkg_path)
-        _git_commit(script, version_pkg_path, message='initial version')
-    elif vcs == 'hg':
-        script.run('hg', 'init', cwd=version_pkg_path)
-        script.run('hg', 'add', '.', cwd=version_pkg_path)
+def _vcs_add(script, version_pkg_path, vcs="git"):
+    if vcs == "git":
+        script.run("git", "init", cwd=version_pkg_path)
+        script.run("git", "add", ".", cwd=version_pkg_path)
+        _git_commit(script, version_pkg_path, message="initial version")
+    elif vcs == "hg":
+        script.run("hg", "init", cwd=version_pkg_path)
+        script.run("hg", "add", ".", cwd=version_pkg_path)
         script.run(
-            'hg', 'commit', '-q',
-            '--user', 'pip ',
-            '-m', 'initial version', cwd=version_pkg_path,
+            "hg",
+            "commit",
+            "-q",
+            "--user",
+            "pip ",
+            "-m",
+            "initial version",
+            cwd=version_pkg_path,
         )
-    elif vcs == 'svn':
+    elif vcs == "svn":
         repo_url = _create_svn_repo(script, version_pkg_path)
         script.run(
-            'svn', 'checkout', repo_url, 'pip-test-package',
-            cwd=script.scratch_path
+            "svn", "checkout", repo_url, "pip-test-package", cwd=script.scratch_path
         )
-        checkout_path = script.scratch_path / 'pip-test-package'
+        checkout_path = script.scratch_path / "pip-test-package"
 
         # svn internally stores windows drives as uppercase; we'll match that.
-        checkout_path = checkout_path.replace('c:', 'C:')
+        checkout_path = checkout_path.replace("c:", "C:")
 
         version_pkg_path = checkout_path
-    elif vcs == 'bazaar':
-        script.run('bzr', 'init', cwd=version_pkg_path)
-        script.run('bzr', 'add', '.', cwd=version_pkg_path)
+    elif vcs == "bazaar":
+        script.run("bzr", "init", cwd=version_pkg_path)
+        script.run("bzr", "add", ".", cwd=version_pkg_path)
         script.run(
-            'bzr', 'whoami', 'pip ',
-            cwd=version_pkg_path)
+            "bzr", "whoami", "pip ", cwd=version_pkg_path
+        )
         script.run(
-            'bzr', 'commit', '-q',
-            '--author', 'pip ',
-            '-m', 'initial version', cwd=version_pkg_path,
+            "bzr",
+            "commit",
+            "-q",
+            "--author",
+            "pip ",
+            "-m",
+            "initial version",
+            cwd=version_pkg_path,
         )
     else:
-        raise ValueError('Unknown vcs: {vcs}'.format(**locals()))
+        raise ValueError(f"Unknown vcs: {vcs}")
     return version_pkg_path
 
 
 def _create_test_package_with_subdirectory(script, subdirectory):
     script.scratch_path.joinpath("version_pkg").mkdir()
-    version_pkg_path = script.scratch_path / 'version_pkg'
+    version_pkg_path = script.scratch_path / "version_pkg"
     _create_main_file(version_pkg_path, name="version_pkg", output="0.1")
     version_pkg_path.joinpath("setup.py").write_text(
-        textwrap.dedent("""
-    from setuptools import setup, find_packages
-    setup(name='version_pkg',
-          version='0.1',
-          packages=find_packages(),
-          py_modules=['version_pkg'],
-          entry_points=dict(console_scripts=['version_pkg=version_pkg:main']))
-        """))
+        textwrap.dedent(
+            """
+            from setuptools import setup, find_packages
+
+            setup(
+                name="version_pkg",
+                version="0.1",
+                packages=find_packages(),
+                py_modules=["version_pkg"],
+                entry_points=dict(console_scripts=["version_pkg=version_pkg:main"]),
+            )
+            """
+        )
+    )
 
     subdirectory_path = version_pkg_path.joinpath(subdirectory)
     subdirectory_path.mkdir()
     _create_main_file(subdirectory_path, name="version_subpkg", output="0.1")
 
-    subdirectory_path.joinpath('setup.py').write_text(
-        textwrap.dedent("""
-from setuptools import setup, find_packages
-setup(name='version_subpkg',
-      version='0.1',
-      packages=find_packages(),
-      py_modules=['version_subpkg'],
-      entry_points=dict(console_scripts=['version_pkg=version_subpkg:main']))
-        """))
+    subdirectory_path.joinpath("setup.py").write_text(
+        textwrap.dedent(
+            """
+            from setuptools import find_packages, setup
 
-    script.run('git', 'init', cwd=version_pkg_path)
-    script.run('git', 'add', '.', cwd=version_pkg_path)
-    _git_commit(script, version_pkg_path, message='initial version')
+            setup(
+                name="version_subpkg",
+                version="0.1",
+                packages=find_packages(),
+                py_modules=["version_subpkg"],
+                entry_points=dict(console_scripts=["version_pkg=version_subpkg:main"]),
+            )
+            """
+        )
+    )
+
+    script.run("git", "init", cwd=version_pkg_path)
+    script.run("git", "add", ".", cwd=version_pkg_path)
+    _git_commit(script, version_pkg_path, message="initial version")
 
     return version_pkg_path
 
 
-def _create_test_package_with_srcdir(script, name='version_pkg', vcs='git'):
+def _create_test_package_with_srcdir(script, name="version_pkg", vcs="git"):
     script.scratch_path.joinpath(name).mkdir()
     version_pkg_path = script.scratch_path / name
-    subdir_path = version_pkg_path.joinpath('subdir')
+    subdir_path = version_pkg_path.joinpath("subdir")
     subdir_path.mkdir()
-    src_path = subdir_path.joinpath('src')
+    src_path = subdir_path.joinpath("src")
     src_path.mkdir()
-    pkg_path = src_path.joinpath('pkg')
+    pkg_path = src_path.joinpath("pkg")
     pkg_path.mkdir()
-    pkg_path.joinpath('__init__.py').write_text('')
-    subdir_path.joinpath("setup.py").write_text(textwrap.dedent("""
-        from setuptools import setup, find_packages
-        setup(
-            name='{name}',
-            version='0.1',
-            packages=find_packages(),
-            package_dir={{'': 'src'}},
+    pkg_path.joinpath("__init__.py").write_text("")
+    subdir_path.joinpath("setup.py").write_text(
+        textwrap.dedent(
+            """
+                from setuptools import setup, find_packages
+                setup(
+                    name="{name}",
+                    version="0.1",
+                    packages=find_packages(),
+                    package_dir={{"": "src"}},
+                )
+            """.format(
+                name=name
+            )
         )
-    """.format(name=name)))
+    )
     return _vcs_add(script, version_pkg_path, vcs)
 
 
-def _create_test_package(script, name='version_pkg', vcs='git'):
+def _create_test_package(script, name="version_pkg", vcs="git"):
     script.scratch_path.joinpath(name).mkdir()
     version_pkg_path = script.scratch_path / name
-    _create_main_file(version_pkg_path, name=name, output='0.1')
-    version_pkg_path.joinpath("setup.py").write_text(textwrap.dedent("""
-        from setuptools import setup, find_packages
-        setup(
-            name='{name}',
-            version='0.1',
-            packages=find_packages(),
-            py_modules=['{name}'],
-            entry_points=dict(console_scripts=['{name}={name}:main'])
+    _create_main_file(version_pkg_path, name=name, output="0.1")
+    version_pkg_path.joinpath("setup.py").write_text(
+        textwrap.dedent(
+            """
+                from setuptools import setup, find_packages
+                setup(
+                    name="{name}",
+                    version="0.1",
+                    packages=find_packages(),
+                    py_modules=["{name}"],
+                    entry_points=dict(console_scripts=["{name}={name}:main"]),
+                )
+            """.format(
+                name=name
+            )
         )
-    """.format(name=name)))
+    )
     return _vcs_add(script, version_pkg_path, vcs)
 
 
 def _create_svn_repo(script, version_pkg_path):
-    repo_url = path_to_url(
-        script.scratch_path / 'pip-test-package-repo' / 'trunk')
+    repo_url = path_to_url(script.scratch_path / "pip-test-package-repo" / "trunk")
+    script.run("svnadmin", "create", "pip-test-package-repo", cwd=script.scratch_path)
     script.run(
-        'svnadmin', 'create', 'pip-test-package-repo',
-        cwd=script.scratch_path
-    )
-    script.run(
-        'svn', 'import', version_pkg_path, repo_url,
-        '-m', 'Initial import of pip-test-package',
-        cwd=script.scratch_path
+        "svn",
+        "import",
+        version_pkg_path,
+        repo_url,
+        "-m",
+        "Initial import of pip-test-package",
+        cwd=script.scratch_path,
     )
     return repo_url
 
 
 def _change_test_package_version(script, version_pkg_path):
     _create_main_file(
-        version_pkg_path, name='version_pkg', output='some different version'
+        version_pkg_path, name="version_pkg", output="some different version"
     )
     # Pass -a to stage the change to the main file.
-    _git_commit(
-        script, version_pkg_path, message='messed version', stage_modified=True
-    )
+    _git_commit(script, version_pkg_path, message="messed version", stage_modified=True)
 
 
 def assert_raises_regexp(exception, reg, run, *args, **kwargs):
@@ -926,7 +983,7 @@ def assert_raises_regexp(exception, reg, run, *args, **kwargs):
 
     try:
         run(*args, **kwargs)
-        assert False, "{exception} should have been thrown".format(**locals())
+        assert False, f"{exception} should have been thrown"
     except exception:
         e = sys.exc_info()[1]
         p = re.compile(reg)
@@ -942,21 +999,25 @@ def requirements_file(contents, tmpdir):
     :param tmpdir: A Path to the folder in which to create the file
 
     """
-    path = tmpdir / 'reqs.txt'
+    path = tmpdir / "reqs.txt"
     path.write_text(contents)
     yield path
     path.unlink()
 
 
 def create_test_package_with_setup(script, **setup_kwargs):
-    assert 'name' in setup_kwargs, setup_kwargs
-    pkg_path = script.scratch_path / setup_kwargs['name']
+    assert "name" in setup_kwargs, setup_kwargs
+    pkg_path = script.scratch_path / setup_kwargs["name"]
     pkg_path.mkdir()
-    pkg_path.joinpath("setup.py").write_text(textwrap.dedent("""
-        from setuptools import setup
-        kwargs = {setup_kwargs!r}
-        setup(**kwargs)
-    """).format(**locals()))
+    pkg_path.joinpath("setup.py").write_text(
+        textwrap.dedent(
+            f"""
+                from setuptools import setup
+                kwargs = {setup_kwargs!r}
+                setup(**kwargs)
+            """
+        )
+    )
     return pkg_path
 
 
@@ -968,9 +1029,7 @@ def urlsafe_b64encode_nopad(data):
 def create_really_basic_wheel(name, version):
     # type: (str, str) -> bytes
     def digest(contents):
-        return "sha256={}".format(
-            urlsafe_b64encode_nopad(sha256(contents).digest())
-        )
+        return "sha256={}".format(urlsafe_b64encode_nopad(sha256(contents).digest()))
 
     def add_file(path, text):
         contents = text.encode("utf-8")
@@ -990,7 +1049,9 @@ def create_really_basic_wheel(name, version):
                 Metadata-Version: 2.1
                 Name: {}
                 Version: {}
-                """.format(name, version)
+                """.format(
+                    name, version
+                )
             ),
         )
         z.writestr(record_path, "\n".join(",".join(r) for r in records))
@@ -1050,7 +1111,6 @@ def create_basic_wheel_for_package(
         metadata_updates=metadata_updates,
         extra_metadata_files={"top_level.txt": name},
         extra_files=extra_files,
-
         # Have an empty RECORD because we don't want to be checking hashes.
         record="",
     )
@@ -1059,9 +1119,7 @@ def create_basic_wheel_for_package(
     return archive_path
 
 
-def create_basic_sdist_for_package(
-    script, name, version, extra_files=None
-):
+def create_basic_sdist_for_package(script, name, version, extra_files=None):
     files = {
         "setup.py": """
             from setuptools import find_packages, setup
@@ -1070,17 +1128,13 @@ def create_basic_sdist_for_package(
     }
 
     # Some useful shorthands
-    archive_name = "{name}-{version}.tar.gz".format(
-        name=name, version=version
-    )
+    archive_name = "{name}-{version}.tar.gz".format(name=name, version=version)
 
     # Replace key-values with formatted values
     for key, value in list(files.items()):
         del files[key]
         key = key.format(name=name)
-        files[key] = textwrap.dedent(value).format(
-            name=name, version=version
-        ).strip()
+        files[key] = textwrap.dedent(value).format(name=name, version=version).strip()
 
     # Add new files after formatting
     if extra_files:
@@ -1094,7 +1148,7 @@ def create_basic_sdist_for_package(
     retval = script.scratch_path / archive_name
     generated = shutil.make_archive(
         retval,
-        'gztar',
+        "gztar",
         root_dir=script.temp_path,
         base_dir=os.curdir,
     )
@@ -1111,15 +1165,15 @@ def need_executable(name, check_cmd):
         try:
             subprocess.check_output(check_cmd)
         except (OSError, subprocess.CalledProcessError):
-            return pytest.mark.skip(
-                reason=f'{name} is not available')(fn)
+            return pytest.mark.skip(reason=f"{name} is not available")(fn)
         return fn
+
     return wrapper
 
 
 def is_bzr_installed():
     try:
-        subprocess.check_output(('bzr', 'version', '--short'))
+        subprocess.check_output(("bzr", "version", "--short"))
     except OSError:
         return False
     return True
@@ -1127,34 +1181,23 @@ def is_bzr_installed():
 
 def is_svn_installed():
     try:
-        subprocess.check_output(('svn', '--version'))
+        subprocess.check_output(("svn", "--version"))
     except OSError:
         return False
     return True
 
 
 def need_bzr(fn):
-    return pytest.mark.bzr(need_executable(
-        'Bazaar', ('bzr', 'version', '--short')
-    )(fn))
+    return pytest.mark.bzr(need_executable("Bazaar", ("bzr", "version", "--short"))(fn))
 
 
 def need_svn(fn):
-    return pytest.mark.svn(need_executable(
-        'Subversion', ('svn', '--version')
-    )(need_executable(
-        'Subversion Admin', ('svnadmin', '--version')
-    )(fn)))
+    return pytest.mark.svn(
+        need_executable("Subversion", ("svn", "--version"))(
+            need_executable("Subversion Admin", ("svnadmin", "--version"))(fn)
+        )
+    )
 
 
 def need_mercurial(fn):
-    return pytest.mark.mercurial(need_executable(
-        'Mercurial', ('hg', 'version')
-    )(fn))
-
-
-# Workaround for test failures after new wheel release.
-windows_workaround_7667 = pytest.mark.skipif(
-    "sys.platform == 'win32' and sys.version_info < (3,)",
-    reason="Workaround for #7667",
-)
+    return pytest.mark.mercurial(need_executable("Mercurial", ("hg", "version"))(fn))
diff --git a/tests/lib/certs.py b/tests/lib/certs.py
index b3a9b8e10..6d69395b2 100644
--- a/tests/lib/certs.py
+++ b/tests/lib/certs.py
@@ -11,13 +11,13 @@ from cryptography.x509.oid import NameOID
 def make_tls_cert(hostname):
     # type: (str) -> Tuple[x509.Certificate, rsa.RSAPrivateKey]
     key = rsa.generate_private_key(
-        public_exponent=65537,
-        key_size=2048,
-        backend=default_backend()
+        public_exponent=65537, key_size=2048, backend=default_backend()
+    )
+    subject = issuer = x509.Name(
+        [
+            x509.NameAttribute(NameOID.COMMON_NAME, hostname),
+        ]
     )
-    subject = issuer = x509.Name([
-        x509.NameAttribute(NameOID.COMMON_NAME, hostname),
-    ])
     cert = (
         x509.CertificateBuilder()
         .subject_name(subject)
diff --git a/tests/lib/configuration_helpers.py b/tests/lib/configuration_helpers.py
index 384a424e2..5b20aafa1 100644
--- a/tests/lib/configuration_helpers.py
+++ b/tests/lib/configuration_helpers.py
@@ -15,7 +15,6 @@ kinds = pip._internal.configuration.kinds
 
 
 class ConfigurationMixin:
-
     def setup(self):
         self.configuration = pip._internal.configuration.Configuration(
             isolated=False,
@@ -41,9 +40,7 @@ class ConfigurationMixin:
     @contextlib.contextmanager
     def tmpfile(self, contents):
         # Create a temporary file
-        fd, path = tempfile.mkstemp(
-            prefix="pip_", suffix="_config.ini", text=True
-        )
+        fd, path = tempfile.mkstemp(prefix="pip_", suffix="_config.ini", text=True)
         os.close(fd)
 
         contents = textwrap.dedent(contents).lstrip()
diff --git a/tests/lib/filesystem.py b/tests/lib/filesystem.py
index dc14b323e..05e2db62c 100644
--- a/tests/lib/filesystem.py
+++ b/tests/lib/filesystem.py
@@ -43,6 +43,4 @@ def get_filelist(base):
             (join_dirpath(p) for p in filenames),
         )
 
-    return set(chain.from_iterable(
-        join(*dirinfo) for dirinfo in os.walk(base)
-    ))
+    return set(chain.from_iterable(join(*dirinfo) for dirinfo in os.walk(base)))
diff --git a/tests/lib/git_submodule_helpers.py b/tests/lib/git_submodule_helpers.py
index 494d329ca..220a926b5 100644
--- a/tests/lib/git_submodule_helpers.py
+++ b/tests/lib/git_submodule_helpers.py
@@ -5,11 +5,11 @@ from tests.lib import _create_main_file, _git_commit
 
 def _create_test_package_submodule(env):
     env.scratch_path.joinpath("version_pkg_submodule").mkdir()
-    submodule_path = env.scratch_path / 'version_pkg_submodule'
-    env.run('touch', 'testfile', cwd=submodule_path)
-    env.run('git', 'init', cwd=submodule_path)
-    env.run('git', 'add', '.', cwd=submodule_path)
-    _git_commit(env, submodule_path, message='initial version / submodule')
+    submodule_path = env.scratch_path / "version_pkg_submodule"
+    env.run("touch", "testfile", cwd=submodule_path)
+    env.run("git", "init", cwd=submodule_path)
+    env.run("git", "add", ".", cwd=submodule_path)
+    _git_commit(env, submodule_path, message="initial version / submodule")
 
     return submodule_path
 
@@ -17,8 +17,8 @@ def _create_test_package_submodule(env):
 def _change_test_package_submodule(env, submodule_path):
     submodule_path.joinpath("testfile").write_text("this is a changed file")
     submodule_path.joinpath("testfile2").write_text("this is an added file")
-    env.run('git', 'add', '.', cwd=submodule_path)
-    _git_commit(env, submodule_path, message='submodule change')
+    env.run("git", "add", ".", cwd=submodule_path)
+    _git_commit(env, submodule_path, message="submodule change")
 
 
 def _pull_in_submodule_changes_to_module(env, module_path, rel_path):
@@ -27,11 +27,9 @@ def _pull_in_submodule_changes_to_module(env, module_path, rel_path):
       rel_path: the location of the submodule relative to the superproject.
     """
     submodule_path = module_path / rel_path
-    env.run('git', 'pull', '-q', 'origin', 'master', cwd=submodule_path)
+    env.run("git", "pull", "-q", "origin", "master", cwd=submodule_path)
     # Pass -a to stage the submodule changes that were just pulled in.
-    _git_commit(
-        env, module_path, message='submodule change', stage_modified=True
-    )
+    _git_commit(env, module_path, message="submodule change", stage_modified=True)
 
 
 def _create_test_package_with_submodule(env, rel_path):
@@ -40,33 +38,37 @@ def _create_test_package_with_submodule(env, rel_path):
       rel_path: the location of the submodule relative to the superproject.
     """
     env.scratch_path.joinpath("version_pkg").mkdir()
-    version_pkg_path = env.scratch_path / 'version_pkg'
+    version_pkg_path = env.scratch_path / "version_pkg"
     version_pkg_path.joinpath("testpkg").mkdir()
-    pkg_path = version_pkg_path / 'testpkg'
+    pkg_path = version_pkg_path / "testpkg"
 
     pkg_path.joinpath("__init__.py").write_text("# hello there")
     _create_main_file(pkg_path, name="version_pkg", output="0.1")
-    version_pkg_path.joinpath("setup.py").write_text(textwrap.dedent('''\
+    version_pkg_path.joinpath("setup.py").write_text(
+        textwrap.dedent(
+            """\
                         from setuptools import setup, find_packages
                         setup(name='version_pkg',
                               version='0.1',
                               packages=find_packages(),
                              )
-                        '''))
-    env.run('git', 'init', cwd=version_pkg_path)
-    env.run('git', 'add', '.', cwd=version_pkg_path)
-    _git_commit(env, version_pkg_path, message='initial version')
+                        """
+        )
+    )
+    env.run("git", "init", cwd=version_pkg_path)
+    env.run("git", "add", ".", cwd=version_pkg_path)
+    _git_commit(env, version_pkg_path, message="initial version")
 
     submodule_path = _create_test_package_submodule(env)
 
     env.run(
-        'git',
-        'submodule',
-        'add',
+        "git",
+        "submodule",
+        "add",
         submodule_path,
         rel_path,
         cwd=version_pkg_path,
     )
-    _git_commit(env, version_pkg_path, message='initial version w submodule')
+    _git_commit(env, version_pkg_path, message="initial version w submodule")
 
     return version_pkg_path, submodule_path
diff --git a/tests/lib/index.py b/tests/lib/index.py
index e6dc2a58b..dff0ac103 100644
--- a/tests/lib/index.py
+++ b/tests/lib/index.py
@@ -3,12 +3,12 @@ from pip._internal.models.link import Link
 
 
 def make_mock_candidate(version, yanked_reason=None, hex_digest=None):
-    url = f'https://example.com/pkg-{version}.tar.gz'
+    url = f"https://example.com/pkg-{version}.tar.gz"
     if hex_digest is not None:
         assert len(hex_digest) == 64
-        url += f'#sha256={hex_digest}'
+        url += f"#sha256={hex_digest}"
 
     link = Link(url, yanked_reason=yanked_reason)
-    candidate = InstallationCandidate('mypackage', version, link)
+    candidate = InstallationCandidate("mypackage", version, link)
 
     return candidate
diff --git a/tests/lib/local_repos.py b/tests/lib/local_repos.py
index 0aa75787e..c57ab16f8 100644
--- a/tests/lib/local_repos.py
+++ b/tests/lib/local_repos.py
@@ -13,15 +13,15 @@ def _create_svn_initools_repo(initools_dir):
     Create the SVN INITools repo.
     """
     directory = os.path.dirname(initools_dir)
-    subprocess.check_call('svnadmin create INITools'.split(), cwd=directory)
+    subprocess.check_call("svnadmin create INITools".split(), cwd=directory)
 
     filename, _ = urllib.request.urlretrieve(
-        'http://bitbucket.org/hltbra/pip-initools-dump/raw/8b55c908a320/'
-        'INITools_modified.dump'
+        "http://bitbucket.org/hltbra/pip-initools-dump/raw/8b55c908a320/"
+        "INITools_modified.dump"
     )
     with open(filename) as dump:
         subprocess.check_call(
-            ['svnadmin', 'load', initools_dir],
+            ["svnadmin", "load", initools_dir],
             stdin=dump,
             stdout=subprocess.DEVNULL,
         )
@@ -30,7 +30,7 @@ def _create_svn_initools_repo(initools_dir):
 
 def local_checkout(
     remote_repo,  # type: str
-    temp_path,    # type: Path
+    temp_path,  # type: Path
 ):
     # type: (...) -> str
     """
@@ -38,27 +38,27 @@ def local_checkout(
         temp directory Path object unique to each test function invocation,
         created as a sub directory of the base temp directory.
     """
-    assert '+' in remote_repo
-    vcs_name = remote_repo.split('+', 1)[0]
+    assert "+" in remote_repo
+    vcs_name = remote_repo.split("+", 1)[0]
     repository_name = os.path.basename(remote_repo)
 
-    directory = temp_path.joinpath('cache')
+    directory = temp_path.joinpath("cache")
     repo_url_path = os.path.join(directory, repository_name)
     assert not os.path.exists(repo_url_path)
 
     if not os.path.exists(directory):
         os.mkdir(directory)
 
-    if vcs_name == 'svn':
-        assert repository_name == 'INITools'
+    if vcs_name == "svn":
+        assert repository_name == "INITools"
         _create_svn_initools_repo(repo_url_path)
-        repo_url_path = os.path.join(repo_url_path, 'trunk')
+        repo_url_path = os.path.join(repo_url_path, "trunk")
     else:
         vcs_backend = vcs.get_backend(vcs_name)
         vcs_backend.obtain(repo_url_path, url=hide_url(remote_repo))
 
-    return '{}+{}'.format(vcs_name, path_to_url(repo_url_path))
+    return "{}+{}".format(vcs_name, path_to_url(repo_url_path))
 
 
 def local_repo(remote_repo, temp_path):
-    return local_checkout(remote_repo, temp_path).split('+', 1)[1]
+    return local_checkout(remote_repo, temp_path).split("+", 1)[1]
diff --git a/tests/lib/options_helpers.py b/tests/lib/options_helpers.py
index 8cc5e306d..8aa105b96 100644
--- a/tests/lib/options_helpers.py
+++ b/tests/lib/options_helpers.py
@@ -7,7 +7,6 @@ from pip._internal.commands import CommandInfo, commands_dict
 
 
 class FakeCommand(Command):
-
     def main(self, args):
         index_opts = cmdoptions.make_option_group(
             cmdoptions.index_group,
@@ -18,11 +17,12 @@ class FakeCommand(Command):
 
 
 class AddFakeCommandMixin:
-
     def setup(self):
-        commands_dict['fake'] = CommandInfo(
-            'tests.lib.options_helpers', 'FakeCommand', 'fake summary',
+        commands_dict["fake"] = CommandInfo(
+            "tests.lib.options_helpers",
+            "FakeCommand",
+            "fake summary",
         )
 
     def teardown(self):
-        commands_dict.pop('fake')
+        commands_dict.pop("fake")
diff --git a/tests/lib/path.py b/tests/lib/path.py
index a9dc29ad7..77d78cce5 100644
--- a/tests/lib/path.py
+++ b/tests/lib/path.py
@@ -22,8 +22,8 @@ class Path(str):
 
     def __new__(cls, *paths):
         if len(paths):
-            return str.__new__(cls, os.path.join(*paths))
-        return str.__new__(cls)
+            return super().__new__(cls, os.path.join(*paths))
+        return super().__new__(cls)
 
     def __div__(self, path):
         """
@@ -73,9 +73,6 @@ class Path(str):
     def __repr__(self):
         return "Path({inner})".format(inner=str.__repr__(self))
 
-    def __hash__(self):
-        return str.__hash__(self)
-
     @property
     def name(self):
         """
@@ -160,7 +157,7 @@ class Path(str):
 
     # TODO: Remove after removing inheritance from str.
     def join(self, *parts):
-        raise RuntimeError('Path.join is invalid, use joinpath instead.')
+        raise RuntimeError("Path.join is invalid, use joinpath instead.")
 
     def read_bytes(self):
         # type: () -> bytes
@@ -191,4 +188,5 @@ class Path(str):
     def stat(self):
         return os.stat(self)
 
+
 curdir = Path(os.path.curdir)
diff --git a/tests/lib/requests_mocks.py b/tests/lib/requests_mocks.py
index b8ae2d232..5db3970cb 100644
--- a/tests/lib/requests_mocks.py
+++ b/tests/lib/requests_mocks.py
@@ -5,7 +5,6 @@ from io import BytesIO
 
 
 class FakeStream:
-
     def __init__(self, contents):
         self._io = BytesIO(contents)
 
@@ -20,7 +19,6 @@ class FakeStream:
 
 
 class MockResponse:
-
     def __init__(self, contents):
         self.raw = FakeStream(contents)
         self.content = contents
@@ -29,12 +27,11 @@ class MockResponse:
         self.status_code = 200
         self.connection = None
         self.url = None
-        self.headers = {'Content-Length': len(contents)}
+        self.headers = {"Content-Length": len(contents)}
         self.history = []
 
 
 class MockConnection:
-
     def _send(self, req, **kwargs):
         raise NotImplementedError("_send must be overridden for tests")
 
@@ -46,7 +43,6 @@ class MockConnection:
 
 
 class MockRequest:
-
     def __init__(self, url):
         self.url = url
         self.headers = {}
diff --git a/tests/lib/server.py b/tests/lib/server.py
index caaa3ffec..6db46d166 100644
--- a/tests/lib/server.py
+++ b/tests/lib/server.py
@@ -34,10 +34,10 @@ if not hasattr(signal, "pthread_sigmask"):
     # practice.
     blocked_signals = nullcontext
 else:
+
     @contextmanager
     def blocked_signals():
-        """Block all signals for e.g. starting a worker thread.
-        """
+        """Block all signals for e.g. starting a worker thread."""
         # valid_signals() was added in Python 3.8 (and not using it results
         # in a warning on pthread_sigmask() call)
         try:
@@ -82,12 +82,13 @@ def _mock_wsgi_adapter(mock):
     """Uses a mock to record function arguments and provide
     the actual function that should respond.
     """
+
     def adapter(environ, start_response):
         # type: (Environ, StartResponse) -> Body
         try:
             responder = mock(environ, start_response)
         except StopIteration:
-            raise RuntimeError('Ran out of mocked responses.')
+            raise RuntimeError("Ran out of mocked responses.")
         return responder(environ, start_response)
 
     return adapter
@@ -136,8 +137,7 @@ def make_mock_server(**kwargs):
 @contextmanager
 def server_running(server):
     # type: (BaseWSGIServer) -> None
-    """Context manager for running the provided server in a separate thread.
-    """
+    """Context manager for running the provided server in a separate thread."""
     thread = threading.Thread(target=server.serve_forever)
     thread.daemon = True
     with blocked_signals():
@@ -156,45 +156,50 @@ def text_html_response(text):
     # type: (str) -> Responder
     def responder(environ, start_response):
         # type: (Environ, StartResponse) -> Body
-        start_response("200 OK", [
-            ("Content-Type", "text/html; charset=UTF-8"),
-        ])
-        return [text.encode('utf-8')]
+        start_response(
+            "200 OK",
+            [
+                ("Content-Type", "text/html; charset=UTF-8"),
+            ],
+        )
+        return [text.encode("utf-8")]
 
     return responder
 
 
 def html5_page(text):
     # type: (str) -> str
-    return dedent("""
+    return (
+        dedent(
+            """
     
     
       
         {}
       
     
-    """).strip().format(text)
+    """
+        )
+        .strip()
+        .format(text)
+    )
 
 
 def index_page(spec):
     # type: (Dict[str, str]) -> Responder
     def link(name, value):
-        return '{}'.format(
-            value, name
-        )
+        return '{}'.format(value, name)
 
-    links = ''.join(link(*kv) for kv in spec.items())
+    links = "".join(link(*kv) for kv in spec.items())
     return text_html_response(html5_page(links))
 
 
 def package_page(spec):
     # type: (Dict[str, str]) -> Responder
     def link(name, value):
-        return '{}'.format(
-            value, name
-        )
+        return '{}'.format(value, name)
 
-    links = ''.join(link(*kv) for kv in spec.items())
+    links = "".join(link(*kv) for kv in spec.items())
     return text_html_response(html5_page(links))
 
 
@@ -204,13 +209,14 @@ def file_response(path):
         # type: (Environ, StartResponse) -> Body
         size = os.stat(path).st_size
         start_response(
-            "200 OK", [
+            "200 OK",
+            [
                 ("Content-Type", "application/octet-stream"),
                 ("Content-Length", str(size)),
             ],
         )
 
-        with open(path, 'rb') as f:
+        with open(path, "rb") as f:
             return [f.read()]
 
     return responder
@@ -223,22 +229,24 @@ def authorization_response(path):
     def responder(environ, start_response):
         # type: (Environ, StartResponse) -> Body
 
-        if environ.get('HTTP_AUTHORIZATION') == correct_auth:
+        if environ.get("HTTP_AUTHORIZATION") == correct_auth:
             size = os.stat(path).st_size
             start_response(
-                "200 OK", [
+                "200 OK",
+                [
                     ("Content-Type", "application/octet-stream"),
                     ("Content-Length", str(size)),
                 ],
             )
         else:
             start_response(
-                "401 Unauthorized", [
+                "401 Unauthorized",
+                [
                     ("WWW-Authenticate", "Basic"),
                 ],
             )
 
-        with open(path, 'rb') as f:
+        with open(path, "rb") as f:
             return [f.read()]
 
     return responder
diff --git a/tests/lib/test_lib.py b/tests/lib/test_lib.py
index 47b97724f..54e8764f0 100644
--- a/tests/lib/test_lib.py
+++ b/tests/lib/test_lib.py
@@ -18,9 +18,7 @@ def assert_error_startswith(exc_type, expected_start):
     with pytest.raises(exc_type) as err:
         yield
 
-    assert str(err.value).startswith(expected_start), (
-        f'full message: {err.value}'
-    )
+    assert str(err.value).startswith(expected_start), f"full message: {err.value}"
 
 
 def test_tmp_dir_exists_in_env(script):
@@ -31,7 +29,7 @@ def test_tmp_dir_exists_in_env(script):
     # need these tests to ensure the assert_no_temp feature of scripttest is
     # working
     script.assert_no_temp()  # this fails if env.tmp_path doesn't exist
-    assert script.environ['TMPDIR'] == script.temp_path
+    assert script.environ["TMPDIR"] == script.temp_path
     assert isdir(script.temp_path)
 
 
@@ -41,16 +39,16 @@ def test_correct_pip_version(script):
     """
     # output is like:
     # pip PIPVERSION from PIPDIRECTORY (python PYVERSION)
-    result = script.pip('--version')
+    result = script.pip("--version")
 
     # compare the directory tree of the invoked pip with that of this source
     # distribution
     pip_folder_outputed = re.match(
-        r'pip \d+(\.[\d]+)+(\.?(b|rc|dev|pre|post)\d+)? from (.*) '
-        r'\(python \d(.[\d])+\)$',
-        result.stdout
+        r"pip \d+(\.[\d]+)+(\.?(b|rc|dev|pre|post)\d+)? from (.*) "
+        r"\(python \d(.[\d])+\)$",
+        result.stdout,
     ).group(4)
-    pip_folder = join(SRC_DIR, 'src', 'pip')
+    pip_folder = join(SRC_DIR, "src", "pip")
 
     diffs = filecmp.dircmp(pip_folder, pip_folder_outputed)
 
@@ -59,32 +57,33 @@ def test_correct_pip_version(script):
     # primary resources other than .py files, this code will need
     # maintenance
     mismatch_py = [
-        x for x in diffs.left_only + diffs.right_only + diffs.diff_files
-        if x.endswith('.py')
+        x
+        for x in diffs.left_only + diffs.right_only + diffs.diff_files
+        if x.endswith(".py")
     ]
     assert not mismatch_py, (
-        'mismatched source files in {pip_folder!r} '
-        'and {pip_folder_outputed!r}: {mismatch_py!r}'.format(**locals())
+        f"mismatched source files in {pip_folder!r} "
+        f"and {pip_folder_outputed!r}: {mismatch_py!r}"
     )
 
 
 def test_as_import(script):
-    """ test that pip.__init__.py does not shadow
+    """test that pip.__init__.py does not shadow
     the command submodule with a dictionary
     """
     import pip._internal.commands.install as inst
+
     assert inst is not None
 
 
 class TestPipTestEnvironment:
-
     def run_stderr_with_prefix(self, script, prefix, **kwargs):
         """
         Call run() that prints stderr with the given prefix.
         """
-        text = f'{prefix}: hello, world\\n'
+        text = f"{prefix}: hello, world\\n"
         command = f'import sys; sys.stderr.write("{text}")'
-        args = [sys.executable, '-c', command]
+        args = [sys.executable, "-c", command]
         script.run(*args, **kwargs)
 
     def run_with_log_command(self, script, sub_string, **kwargs):
@@ -96,14 +95,17 @@ class TestPipTestEnvironment:
             "import logging; logging.basicConfig(level='INFO'); "
             "logging.getLogger().info('sub: {}', 'foo')"
         ).format(sub_string)
-        args = [sys.executable, '-c', command]
+        args = [sys.executable, "-c", command]
         script.run(*args, **kwargs)
 
-    @pytest.mark.parametrize('prefix', (
-        'DEBUG',
-        'INFO',
-        'FOO',
-    ))
+    @pytest.mark.parametrize(
+        "prefix",
+        (
+            "DEBUG",
+            "INFO",
+            "FOO",
+        ),
+    )
     def test_run__allowed_stderr(self, script, prefix):
         """
         Test calling run() with allowed stderr.
@@ -117,21 +119,28 @@ class TestPipTestEnvironment:
         """
         # Check that no error happens.
         self.run_stderr_with_prefix(
-            script, 'WARNING', allow_stderr_warning=True,
+            script,
+            "WARNING",
+            allow_stderr_warning=True,
         )
 
         # Check that an error still happens with ERROR.
-        expected_start = 'stderr has an unexpected error'
+        expected_start = "stderr has an unexpected error"
         with assert_error_startswith(RuntimeError, expected_start):
             self.run_stderr_with_prefix(
-                script, 'ERROR', allow_stderr_warning=True,
+                script,
+                "ERROR",
+                allow_stderr_warning=True,
             )
 
-    @pytest.mark.parametrize('prefix', (
-        'DEPRECATION',
-        'WARNING',
-        'ERROR',
-    ))
+    @pytest.mark.parametrize(
+        "prefix",
+        (
+            "DEPRECATION",
+            "WARNING",
+            "ERROR",
+        ),
+    )
     def test_run__allow_stderr_error(self, script, prefix):
         """
         Test passing allow_stderr_error=True.
@@ -139,11 +148,14 @@ class TestPipTestEnvironment:
         # Check that no error happens.
         self.run_stderr_with_prefix(script, prefix, allow_stderr_error=True)
 
-    @pytest.mark.parametrize('prefix, expected_start', (
-        ('DEPRECATION', 'stderr has an unexpected warning'),
-        ('WARNING', 'stderr has an unexpected warning'),
-        ('ERROR', 'stderr has an unexpected error'),
-    ))
+    @pytest.mark.parametrize(
+        "prefix, expected_start",
+        (
+            ("DEPRECATION", "stderr has an unexpected warning"),
+            ("WARNING", "stderr has an unexpected warning"),
+            ("ERROR", "stderr has an unexpected error"),
+        ),
+    )
     def test_run__unexpected_stderr(self, script, prefix, expected_start):
         """
         Test calling run() with unexpected stderr output.
@@ -156,70 +168,72 @@ class TestPipTestEnvironment:
         Test calling run() with an unexpected logging error.
         """
         # Pass a good substitution string.
-        self.run_with_log_command(script, sub_string='%r')
+        self.run_with_log_command(script, sub_string="%r")
 
-        expected_start = 'stderr has a logging error, which is never allowed'
+        expected_start = "stderr has a logging error, which is never allowed"
         with assert_error_startswith(RuntimeError, expected_start):
             # Pass a bad substitution string.  Also, pass
             # allow_stderr_error=True to check that the RuntimeError occurs
             # even under the stricter test condition of when we are allowing
             # other types of errors.
             self.run_with_log_command(
-                script, sub_string='{!r}', allow_stderr_error=True,
+                script,
+                sub_string="{!r}",
+                allow_stderr_error=True,
             )
 
     def test_run__allow_stderr_error_false_error_with_expect_error(
-        self, script,
+        self,
+        script,
     ):
         """
         Test passing allow_stderr_error=False with expect_error=True.
         """
-        expected_start = (
-            'cannot pass allow_stderr_error=False with expect_error=True'
-        )
+        expected_start = "cannot pass allow_stderr_error=False with expect_error=True"
         with assert_error_startswith(RuntimeError, expected_start):
-            script.run('python', allow_stderr_error=False, expect_error=True)
+            script.run("python", allow_stderr_error=False, expect_error=True)
 
     def test_run__allow_stderr_warning_false_error_with_expect_stderr(
-        self, script,
+        self,
+        script,
     ):
         """
         Test passing allow_stderr_warning=False with expect_stderr=True.
         """
         expected_start = (
-            'cannot pass allow_stderr_warning=False with expect_stderr=True'
+            "cannot pass allow_stderr_warning=False with expect_stderr=True"
         )
         with assert_error_startswith(RuntimeError, expected_start):
             script.run(
-                'python', allow_stderr_warning=False, expect_stderr=True,
+                "python",
+                allow_stderr_warning=False,
+                expect_stderr=True,
             )
 
-    @pytest.mark.parametrize('arg_name', (
-        'expect_error',
-        'allow_stderr_error',
-    ))
+    @pytest.mark.parametrize(
+        "arg_name",
+        (
+            "expect_error",
+            "allow_stderr_error",
+        ),
+    )
     def test_run__allow_stderr_warning_false_error(self, script, arg_name):
         """
         Test passing allow_stderr_warning=False when it is not allowed.
         """
-        kwargs = {'allow_stderr_warning': False, arg_name: True}
+        kwargs = {"allow_stderr_warning": False, arg_name: True}
         expected_start = (
-            'cannot pass allow_stderr_warning=False with '
-            'allow_stderr_error=True'
+            "cannot pass allow_stderr_warning=False with " "allow_stderr_error=True"
         )
         with assert_error_startswith(RuntimeError, expected_start):
-            script.run('python', **kwargs)
+            script.run("python", **kwargs)
 
     def test_run__expect_error_fails_when_zero_returncode(self, script):
-        expected_start = 'Script passed unexpectedly'
+        expected_start = "Script passed unexpectedly"
         with assert_error_startswith(AssertionError, expected_start):
-            script.run(
-                'python', expect_error=True
-            )
+            script.run("python", expect_error=True)
 
     def test_run__no_expect_error_fails_when_nonzero_returncode(self, script):
-        expected_start = 'Script returned code: 1'
+        expected_start = "Script returned code: 1"
         with assert_error_startswith(AssertionError, expected_start):
-            script.run(
-                'python', '-c', 'import sys; sys.exit(1)'
-            )
+            script.run("python", "-c", "import sys; sys.exit(1)")
diff --git a/tests/lib/test_wheel.py b/tests/lib/test_wheel.py
index 835ad31ec..579d48660 100644
--- a/tests/lib/test_wheel.py
+++ b/tests/lib/test_wheel.py
@@ -161,19 +161,20 @@ def test_make_wheel_default_record():
         record_bytes = z.read("simple-0.1.0.dist-info/RECORD")
         record_text = record_bytes.decode()
         record_rows = list(csv.reader(record_text.splitlines()))
-        records = {
-            row[0]: row[1:] for row in record_rows
-        }
+        records = {row[0]: row[1:] for row in record_rows}
 
         expected = {
             "simple/__init__.py": [
-                "sha256=ypeBEsobvcr6wjGzmiPcTaeG7_gUfE5yuYB3ha_uSLs", "1"
+                "sha256=ypeBEsobvcr6wjGzmiPcTaeG7_gUfE5yuYB3ha_uSLs",
+                "1",
             ],
             "simple-0.1.0.data/purelib/info.txt": [
-                "sha256=Ln0sA6lQeuJl7PW1NWiFpTOTogKdJBOUmXJloaJa78Y", "1"
+                "sha256=Ln0sA6lQeuJl7PW1NWiFpTOTogKdJBOUmXJloaJa78Y",
+                "1",
             ],
             "simple-0.1.0.dist-info/LICENSE": [
-                "sha256=PiPoFgA5WUoziU9lZOGxNIu9egCI1CxKy3PurtWcAJ0", "1"
+                "sha256=PiPoFgA5WUoziU9lZOGxNIu9egCI1CxKy3PurtWcAJ0",
+                "1",
             ],
             "simple-0.1.0.dist-info/RECORD": ["", ""],
         }
diff --git a/tests/lib/venv.py b/tests/lib/venv.py
index 6dbdb4dc7..bbf6eaa38 100644
--- a/tests/lib/venv.py
+++ b/tests/lib/venv.py
@@ -17,9 +17,9 @@ class VirtualEnvironment:
 
     def __init__(self, location, template=None, venv_type=None):
         assert template is None or venv_type is None
-        assert venv_type in (None, 'virtualenv', 'venv')
+        assert venv_type in (None, "virtualenv", "venv")
         self.location = Path(location)
-        self._venv_type = venv_type or template._venv_type or 'virtualenv'
+        self._venv_type = venv_type or template._venv_type or "virtualenv"
         self._user_site_packages = False
         self._template = template
         self._sitecustomize = None
@@ -29,11 +29,11 @@ class VirtualEnvironment:
     def _update_paths(self):
         home, lib, inc, bin = _virtualenv.path_locations(self.location)
         self.bin = Path(bin)
-        self.site = Path(lib) / 'site-packages'
+        self.site = Path(lib) / "site-packages"
         # Workaround for https://github.com/pypa/virtualenv/issues/306
         if hasattr(sys, "pypy_version_info"):
-            version_dir = '{0}'.format(*sys.version_info)
-            self.lib = Path(home, 'lib-python', version_dir)
+            version_dir = str(sys.version_info.major)
+            self.lib = Path(home, "lib-python", version_dir)
         else:
             self.lib = Path(lib)
 
@@ -46,17 +46,15 @@ class VirtualEnvironment:
         if self._template:
             # On Windows, calling `_virtualenv.path_locations(target)`
             # will have created the `target` directory...
-            if sys.platform == 'win32' and self.location.exists():
+            if sys.platform == "win32" and self.location.exists():
                 self.location.rmdir()
             # Clone virtual environment from template.
-            shutil.copytree(
-                self._template.location, self.location, symlinks=True
-            )
+            shutil.copytree(self._template.location, self.location, symlinks=True)
             self._sitecustomize = self._template.sitecustomize
             self._user_site_packages = self._template.user_site_packages
         else:
             # Create a new virtual environment.
-            if self._venv_type == 'virtualenv':
+            if self._venv_type == "virtualenv":
                 _virtualenv.create_environment(
                     self.location,
                     no_pip=True,
@@ -64,7 +62,7 @@ class VirtualEnvironment:
                     no_setuptools=True,
                 )
                 self._fix_virtualenv_site_module()
-            elif self._venv_type == 'venv':
+            elif self._venv_type == "venv":
                 builder = _venv.EnvBuilder()
                 context = builder.ensure_directories(self.location)
                 builder.create_configuration(context)
@@ -75,46 +73,44 @@ class VirtualEnvironment:
 
     def _fix_virtualenv_site_module(self):
         # Patch `site.py` so user site work as expected.
-        site_py = self.lib / 'site.py'
+        site_py = self.lib / "site.py"
         with open(site_py) as fp:
             site_contents = fp.read()
         for pattern, replace in (
             (
                 # Ensure enabling user site does not result in adding
                 # the real site-packages' directory to `sys.path`.
+                ("\ndef virtual_addsitepackages(known_paths):\n"),
                 (
-                    '\ndef virtual_addsitepackages(known_paths):\n'
-                ),
-                (
-                    '\ndef virtual_addsitepackages(known_paths):\n'
-                    '    return known_paths\n'
+                    "\ndef virtual_addsitepackages(known_paths):\n"
+                    "    return known_paths\n"
                 ),
             ),
             (
                 # Fix sites ordering: user site must be added before system.
                 (
-                    '\n    paths_in_sys = addsitepackages(paths_in_sys)'
-                    '\n    paths_in_sys = addusersitepackages(paths_in_sys)\n'
+                    "\n    paths_in_sys = addsitepackages(paths_in_sys)"
+                    "\n    paths_in_sys = addusersitepackages(paths_in_sys)\n"
                 ),
                 (
-                    '\n    paths_in_sys = addusersitepackages(paths_in_sys)'
-                    '\n    paths_in_sys = addsitepackages(paths_in_sys)\n'
+                    "\n    paths_in_sys = addusersitepackages(paths_in_sys)"
+                    "\n    paths_in_sys = addsitepackages(paths_in_sys)\n"
                 ),
             ),
         ):
             assert pattern in site_contents
             site_contents = site_contents.replace(pattern, replace)
-        with open(site_py, 'w') as fp:
+        with open(site_py, "w") as fp:
             fp.write(site_contents)
         # Make sure bytecode is up-to-date too.
         assert compileall.compile_file(str(site_py), quiet=1, force=True)
 
     def _customize_site(self):
-        contents = ''
-        if self._venv_type == 'venv':
+        contents = ""
+        if self._venv_type == "venv":
             # Enable user site (before system).
             contents += textwrap.dedent(
-                '''
+                """
                 import os, site, sys
 
                 if not os.environ.get('PYTHONNOUSERSITE', False):
@@ -138,9 +134,10 @@ class VirtualEnvironment:
                     # Third, add back system-sites related paths.
                     for path in site.getsitepackages():
                         site.addsitedir(path)
-                ''').strip()
+                """
+            ).strip()
         if self._sitecustomize is not None:
-            contents += '\n' + self._sitecustomize
+            contents += "\n" + self._sitecustomize
         sitecustomize = self.site / "sitecustomize.py"
         sitecustomize.write_text(contents)
         # Make sure bytecode is up-to-date too.
@@ -170,11 +167,11 @@ class VirtualEnvironment:
     @user_site_packages.setter
     def user_site_packages(self, value):
         self._user_site_packages = value
-        if self._venv_type == 'virtualenv':
+        if self._venv_type == "virtualenv":
             marker = self.lib / "no-global-site-packages.txt"
             if self._user_site_packages:
                 marker.unlink()
             else:
                 marker.touch()
-        elif self._venv_type == 'venv':
+        elif self._venv_type == "venv":
             self._customize_site()
diff --git a/tests/lib/wheel.py b/tests/lib/wheel.py
index e88ce8c61..bfcdc9d27 100644
--- a/tests/lib/wheel.py
+++ b/tests/lib/wheel.py
@@ -30,9 +30,7 @@ from tests.lib.path import Path
 
 # path, digest, size
 RecordLike = Tuple[str, str, str]
-RecordCallback = Callable[
-    [List["Record"]], Union[str, bytes, List[RecordLike]]
-]
+RecordCallback = Callable[[List["Record"]], Union[str, bytes, List[RecordLike]]]
 # As would be used in metadata
 HeaderValue = Union[str, List[str]]
 
@@ -97,11 +95,13 @@ def make_metadata_file(
     if value is not _default:
         return File(path, ensure_binary(value))
 
-    metadata = CaseInsensitiveDict({
-        "Metadata-Version": "2.1",
-        "Name": name,
-        "Version": version,
-    })
+    metadata = CaseInsensitiveDict(
+        {
+            "Metadata-Version": "2.1",
+            "Name": name,
+            "Version": version,
+        }
+    )
     if updates is not _default:
         metadata.update(updates)
 
@@ -128,12 +128,14 @@ def make_wheel_metadata_file(
     if value is not _default:
         return File(path, ensure_binary(value))
 
-    metadata = CaseInsensitiveDict({
-        "Wheel-Version": "1.0",
-        "Generator": "pip-test-suite",
-        "Root-Is-Purelib": "true",
-        "Tag": ["-".join(parts) for parts in tags],
-    })
+    metadata = CaseInsensitiveDict(
+        {
+            "Wheel-Version": "1.0",
+            "Generator": "pip-test-suite",
+            "Root-Is-Purelib": "true",
+            "Tag": ["-".join(parts) for parts in tags],
+        }
+    )
 
     if updates is not _default:
         metadata.update(updates)
@@ -172,10 +174,7 @@ def make_entry_points_file(
 
 def make_files(files):
     # type: (Dict[str, AnyStr]) -> List[File]
-    return [
-        File(name, ensure_binary(contents))
-        for name, contents in files.items()
-    ]
+    return [File(name, ensure_binary(contents)) for name, contents in files.items()]
 
 
 def make_metadata_files(name, version, files):
@@ -203,9 +202,7 @@ def urlsafe_b64encode_nopad(data):
 
 def digest(contents):
     # type: (bytes) -> str
-    return "sha256={}".format(
-        urlsafe_b64encode_nopad(sha256(contents).digest())
-    )
+    return "sha256={}".format(urlsafe_b64encode_nopad(sha256(contents).digest()))
 
 
 def record_file_maker_wrapper(
@@ -219,9 +216,7 @@ def record_file_maker_wrapper(
     records = []  # type: List[Record]
     for file in files:
         records.append(
-            Record(
-                file.name, digest(file.contents), str(len(file.contents))
-            )
+            Record(file.name, digest(file.contents), str(len(file.contents)))
         )
         yield file
 
@@ -250,19 +245,20 @@ def record_file_maker_wrapper(
 
 def wheel_name(name, version, pythons, abis, platforms):
     # type: (str, str, str, str, str) -> str
-    stem = "-".join([
-        name,
-        version,
-        ".".join(pythons),
-        ".".join(abis),
-        ".".join(platforms),
-    ])
+    stem = "-".join(
+        [
+            name,
+            version,
+            ".".join(pythons),
+            ".".join(abis),
+            ".".join(platforms),
+        ]
+    )
     return f"{stem}.whl"
 
 
 class WheelBuilder:
-    """A wheel that can be saved or converted to several formats.
-    """
+    """A wheel that can be saved or converted to several formats."""
 
     def __init__(self, name, files):
         # type: (str, List[File]) -> None
@@ -390,9 +386,7 @@ def make_wheel(
     tags = list(itertools.product(pythons, abis, platforms))
 
     possible_files = [
-        make_metadata_file(
-            name, version, metadata, metadata_updates, metadata_body
-        ),
+        make_metadata_file(name, version, metadata, metadata_updates, metadata_body),
         make_wheel_metadata_file(
             name, version, wheel_metadata, tags, wheel_metadata_updates
         ),
@@ -403,9 +397,7 @@ def make_wheel(
         possible_files.extend(make_files(extra_files))
 
     if extra_metadata_files is not _default:
-        possible_files.extend(
-            make_metadata_files(name, version, extra_metadata_files)
-        )
+        possible_files.extend(make_metadata_files(name, version, extra_metadata_files))
 
     if extra_data_files is not _default:
         possible_files.extend(make_data_files(name, version, extra_data_files))
diff --git a/tests/unit/test_collector.py b/tests/unit/test_collector.py
index 87c947e90..059fbc719 100644
--- a/tests/unit/test_collector.py
+++ b/tests/unit/test_collector.py
@@ -608,7 +608,7 @@ def test_group_locations__file_expand_dir(data):
     files, urls = group_locations([data.find_links], expand_dir=True)
     assert files and not urls, (
         "files and not urls should have been found "
-        "at find-links url: {data.find_links}".format(**locals())
+        f"at find-links url: {data.find_links}"
     )
 
 
diff --git a/tests/unit/test_compat.py b/tests/unit/test_compat.py
index 655e45ab7..2d7cbf5c3 100644
--- a/tests/unit/test_compat.py
+++ b/tests/unit/test_compat.py
@@ -1,11 +1,8 @@
-import locale
 import os
-import sys
 
 import pytest
 
-import pip._internal.utils.compat as pip_compat
-from pip._internal.utils.compat import console_to_str, get_path_uid, str_to_display
+from pip._internal.utils.compat import get_path_uid
 
 
 def test_get_path_uid():
@@ -44,81 +41,3 @@ def test_get_path_uid_symlink_without_NOFOLLOW(tmpdir, monkeypatch):
     os.symlink(f, fs)
     with pytest.raises(OSError):
         get_path_uid(fs)
-
-
-@pytest.mark.parametrize('data, expected', [
-    ('abc', 'abc'),
-    # Test text input with non-ascii characters.
-    ('déf', 'déf'),
-])
-def test_str_to_display(data, expected):
-    actual = str_to_display(data)
-    assert actual == expected, (
-        # Show the encoding for easier troubleshooting.
-        f'encoding: {locale.getpreferredencoding()!r}'
-    )
-
-
-@pytest.mark.parametrize('data, encoding, expected', [
-    # Test str input with non-ascii characters.
-    ('déf', 'utf-8', 'déf'),
-    # Test bytes input with non-ascii characters:
-    ('déf'.encode('utf-8'), 'utf-8', 'déf'),
-    # Test a Windows encoding.
-    ('déf'.encode('cp1252'), 'cp1252', 'déf'),
-    # Test a Windows encoding with incompatibly encoded text.
-    ('déf'.encode('utf-8'), 'cp1252', 'déf'),
-])
-def test_str_to_display__encoding(monkeypatch, data, encoding, expected):
-    monkeypatch.setattr(locale, 'getpreferredencoding', lambda: encoding)
-    actual = str_to_display(data)
-    assert actual == expected, (
-        # Show the encoding for easier troubleshooting.
-        f'encoding: {locale.getpreferredencoding()!r}'
-    )
-
-
-def test_str_to_display__decode_error(monkeypatch, caplog):
-    monkeypatch.setattr(locale, 'getpreferredencoding', lambda: 'utf-8')
-    # Encode with an incompatible encoding.
-    data = 'ab'.encode('utf-16')
-    actual = str_to_display(data)
-    # Keep the expected value endian safe
-    if sys.byteorder == "little":
-        expected = "\\xff\\xfea\x00b\x00"
-    elif sys.byteorder == "big":
-        expected = "\\xfe\\xff\x00a\x00b"
-
-    assert actual == expected, (
-        # Show the encoding for easier troubleshooting.
-        f'encoding: {locale.getpreferredencoding()!r}'
-    )
-    assert len(caplog.records) == 1
-    record = caplog.records[0]
-    assert record.levelname == 'WARNING'
-    assert record.message == (
-        'Bytes object does not appear to be encoded as utf-8'
-    )
-
-
-def test_console_to_str(monkeypatch):
-    some_bytes = b"a\xE9\xC3\xE9b"
-    encodings = ('ascii', 'utf-8', 'iso-8859-1', 'iso-8859-5',
-                 'koi8_r', 'cp850')
-    for e in encodings:
-        monkeypatch.setattr(locale, 'getpreferredencoding', lambda: e)
-        result = console_to_str(some_bytes)
-        assert result.startswith("a")
-        assert result.endswith("b")
-
-
-def test_console_to_str_warning(monkeypatch):
-    some_bytes = b"a\xE9b"
-
-    def check_warning(msg, *args, **kwargs):
-        assert 'does not appear to be encoded as' in msg
-        assert args[0] == 'Subprocess output'
-
-    monkeypatch.setattr(locale, 'getpreferredencoding', lambda: 'utf-8')
-    monkeypatch.setattr(pip_compat.logger, 'warning', check_warning)
-    console_to_str(some_bytes)
diff --git a/tests/unit/test_finder.py b/tests/unit/test_finder.py
index 162a9b356..9638199fb 100644
--- a/tests/unit/test_finder.py
+++ b/tests/unit/test_finder.py
@@ -209,6 +209,8 @@ class TestWheel:
         with pytest.raises(BestVersionAlreadyInstalled):
             finder.find_requirement(req, True)
 
+
+class TestCandidateEvaluator:
     def test_link_sorting(self):
         """
         Test link sorting
@@ -249,7 +251,8 @@ class TestWheel:
         results = sorted(links, key=sort_key, reverse=True)
         results2 = sorted(reversed(links), key=sort_key, reverse=True)
 
-        assert links == results == results2, results2
+        assert links == results, results
+        assert links == results2, results2
 
     def test_link_sorting_wheels_with_build_tags(self):
         """Verify build tags affect sorting."""
@@ -274,7 +277,47 @@ class TestWheel:
         sort_key = candidate_evaluator._sort_key
         results = sorted(links, key=sort_key, reverse=True)
         results2 = sorted(reversed(links), key=sort_key, reverse=True)
-        assert links == results == results2, results2
+
+        assert links == results, results
+        assert links == results2, results2
+
+    def test_build_tag_is_less_important_than_other_tags(self):
+        links = [
+            InstallationCandidate(
+                "simple",
+                "1.0",
+                Link('simple-1.0-1-py3-abi3-linux_x86_64.whl'),
+            ),
+            InstallationCandidate(
+                "simple",
+                '1.0',
+                Link('simple-1.0-2-py3-abi3-linux_i386.whl'),
+            ),
+            InstallationCandidate(
+                "simple",
+                '1.0',
+                Link('simple-1.0-2-py3-any-none.whl'),
+            ),
+            InstallationCandidate(
+                "simple",
+                '1.0',
+                Link('simple-1.0.tar.gz'),
+            ),
+        ]
+        valid_tags = [
+            Tag('py3', 'abi3', 'linux_x86_64'),
+            Tag('py3', 'abi3', 'linux_i386'),
+            Tag('py3', 'any', 'none'),
+        ]
+        evaluator = CandidateEvaluator(
+            'my-project', supported_tags=valid_tags, specifier=SpecifierSet(),
+        )
+        sort_key = evaluator._sort_key
+        results = sorted(links, key=sort_key, reverse=True)
+        results2 = sorted(reversed(links), key=sort_key, reverse=True)
+
+        assert links == results, results
+        assert links == results2, results2
 
 
 def test_finder_priority_file_over_page(data):
diff --git a/tests/unit/test_locations.py b/tests/unit/test_locations.py
index 3d4ec9462..067f4e844 100644
--- a/tests/unit/test_locations.py
+++ b/tests/unit/test_locations.py
@@ -11,7 +11,7 @@ from unittest.mock import Mock
 
 import pytest
 
-from pip._internal.locations import distutils_scheme
+from pip._internal.locations import SCHEME_KEYS, get_scheme
 
 if sys.platform == 'win32':
     pwd = Mock()
@@ -19,6 +19,11 @@ else:
     import pwd
 
 
+def _get_scheme_dict(*args, **kwargs):
+    scheme = get_scheme(*args, **kwargs)
+    return {k: getattr(scheme, k) for k in SCHEME_KEYS}
+
+
 class TestLocations:
     def setup(self):
         self.tempdir = tempfile.mkdtemp()
@@ -83,8 +88,8 @@ class TestDistutilsScheme:
         # root is c:\somewhere\else or /somewhere/else
         root = os.path.normcase(os.path.abspath(
             os.path.join(os.path.sep, 'somewhere', 'else')))
-        norm_scheme = distutils_scheme("example")
-        root_scheme = distutils_scheme("example", root=root)
+        norm_scheme = _get_scheme_dict("example")
+        root_scheme = _get_scheme_dict("example", root=root)
 
         for key, value in norm_scheme.items():
             drive, path = os.path.splitdrive(os.path.abspath(value))
@@ -107,7 +112,7 @@ class TestDistutilsScheme:
             'find_config_files',
             lambda self: [f],
         )
-        scheme = distutils_scheme('example')
+        scheme = _get_scheme_dict('example')
         assert scheme['scripts'] == install_scripts
 
     @pytest.mark.incompatible_with_venv
@@ -129,15 +134,15 @@ class TestDistutilsScheme:
             'find_config_files',
             lambda self: [f],
         )
-        scheme = distutils_scheme('example')
+        scheme = _get_scheme_dict('example')
         assert scheme['platlib'] == install_lib + os.path.sep
         assert scheme['purelib'] == install_lib + os.path.sep
 
     def test_prefix_modifies_appropriately(self):
         prefix = os.path.abspath(os.path.join('somewhere', 'else'))
 
-        normal_scheme = distutils_scheme("example")
-        prefix_scheme = distutils_scheme("example", prefix=prefix)
+        normal_scheme = _get_scheme_dict("example")
+        prefix_scheme = _get_scheme_dict("example", prefix=prefix)
 
         def _calculate_expected(value):
             path = os.path.join(prefix, os.path.relpath(value, sys.prefix))
diff --git a/tests/unit/test_operations_prepare.py b/tests/unit/test_operations_prepare.py
index f6122cebe..4d912fb6e 100644
--- a/tests/unit/test_operations_prepare.py
+++ b/tests/unit/test_operations_prepare.py
@@ -102,7 +102,7 @@ def test_copy_source_tree(clean_project, tmpdir):
     assert expected_files == copied_files
 
 
-@pytest.mark.skipif("sys.platform == 'win32' or sys.version_info < (3,)")
+@pytest.mark.skipif("sys.platform == 'win32'")
 def test_copy_source_tree_with_socket(clean_project, tmpdir, caplog):
     target = tmpdir.joinpath("target")
     expected_files = get_filelist(clean_project)
@@ -121,7 +121,7 @@ def test_copy_source_tree_with_socket(clean_project, tmpdir, caplog):
     assert socket_path in record.message
 
 
-@pytest.mark.skipif("sys.platform == 'win32' or sys.version_info < (3,)")
+@pytest.mark.skipif("sys.platform == 'win32'")
 def test_copy_source_tree_with_socket_fails_with_no_socket_error(
     clean_project, tmpdir
 ):
diff --git a/tests/unit/test_req.py b/tests/unit/test_req.py
index db638659b..5f01a9ecc 100644
--- a/tests/unit/test_req.py
+++ b/tests/unit/test_req.py
@@ -89,6 +89,7 @@ class TestRequirementSet:
                 require_hashes=require_hashes,
                 use_user_site=False,
                 lazy_wheel=False,
+                in_tree_build=False,
             )
             yield Resolver(
                 preparer=preparer,
@@ -194,7 +195,7 @@ class TestRequirementSet:
         ))
         dir_path = data.packages.joinpath('FSPkg')
         reqset.add_requirement(get_processed_req_from_line(
-            'file://{dir_path}'.format(**locals()),
+            f'file://{dir_path}',
             lineno=2,
         ))
         finder = make_test_finder(find_links=[data.find_links])
@@ -254,7 +255,7 @@ class TestRequirementSet:
             (data.packages / 'simple-1.0.tar.gz').resolve())
         reqset = RequirementSet()
         reqset.add_requirement(get_processed_req_from_line(
-            '{file_url} --hash=sha256:badbad'.format(**locals()), lineno=1,
+            f'{file_url} --hash=sha256:badbad', lineno=1,
         ))
         finder = make_test_finder(find_links=[data.find_links])
         with self._basic_resolver(finder, require_hashes=True) as resolver:
@@ -466,7 +467,7 @@ class TestInstallRequirement:
         # match
         for markers in (
             'python_version >= "1.0"',
-            'sys_platform == {sys.platform!r}'.format(**globals()),
+            f'sys_platform == {sys.platform!r}',
         ):
             line = 'name; ' + markers
             req = install_req_from_line(line)
@@ -476,7 +477,7 @@ class TestInstallRequirement:
         # don't match
         for markers in (
             'python_version >= "5.0"',
-            'sys_platform != {sys.platform!r}'.format(**globals()),
+            f'sys_platform != {sys.platform!r}',
         ):
             line = 'name; ' + markers
             req = install_req_from_line(line)
@@ -487,7 +488,7 @@ class TestInstallRequirement:
         # match
         for markers in (
             'python_version >= "1.0"',
-            'sys_platform == {sys.platform!r}'.format(**globals()),
+            f'sys_platform == {sys.platform!r}',
         ):
             line = 'name; ' + markers
             req = install_req_from_line(line, comes_from='')
@@ -497,7 +498,7 @@ class TestInstallRequirement:
         # don't match
         for markers in (
             'python_version >= "5.0"',
-            'sys_platform != {sys.platform!r}'.format(**globals()),
+            f'sys_platform != {sys.platform!r}',
         ):
             line = 'name; ' + markers
             req = install_req_from_line(line, comes_from='')
@@ -507,7 +508,7 @@ class TestInstallRequirement:
     def test_extras_for_line_path_requirement(self):
         line = 'SomeProject[ex1,ex2]'
         filename = 'filename'
-        comes_from = '-r {} (line {})'.format(filename, 1)
+        comes_from = f'-r {filename} (line 1)'
         req = install_req_from_line(line, comes_from=comes_from)
         assert len(req.extras) == 2
         assert req.extras == {'ex1', 'ex2'}
@@ -515,7 +516,7 @@ class TestInstallRequirement:
     def test_extras_for_line_url_requirement(self):
         line = 'git+https://url#egg=SomeProject[ex1,ex2]'
         filename = 'filename'
-        comes_from = '-r {} (line {})'.format(filename, 1)
+        comes_from = f'-r {filename} (line 1)'
         req = install_req_from_line(line, comes_from=comes_from)
         assert len(req.extras) == 2
         assert req.extras == {'ex1', 'ex2'}
@@ -523,7 +524,7 @@ class TestInstallRequirement:
     def test_extras_for_editable_path_requirement(self):
         url = '.[ex1,ex2]'
         filename = 'filename'
-        comes_from = '-r {} (line {})'.format(filename, 1)
+        comes_from = f'-r {filename} (line 1)'
         req = install_req_from_editable(url, comes_from=comes_from)
         assert len(req.extras) == 2
         assert req.extras == {'ex1', 'ex2'}
@@ -531,7 +532,7 @@ class TestInstallRequirement:
     def test_extras_for_editable_url_requirement(self):
         url = 'git+https://url#egg=SomeProject[ex1,ex2]'
         filename = 'filename'
-        comes_from = '-r {} (line {})'.format(filename, 1)
+        comes_from = f'-r {filename} (line 1)'
         req = install_req_from_editable(url, comes_from=comes_from)
         assert len(req.extras) == 2
         assert req.extras == {'ex1', 'ex2'}
diff --git a/tests/unit/test_req_file.py b/tests/unit/test_req_file.py
index 8d61e2b6c..3c534c9ee 100644
--- a/tests/unit/test_req_file.py
+++ b/tests/unit/test_req_file.py
@@ -229,14 +229,14 @@ class TestProcessLine:
     def test_yield_line_requirement(self, line_processor):
         line = 'SomeProject'
         filename = 'filename'
-        comes_from = '-r {} (line {})'.format(filename, 1)
+        comes_from = f'-r {filename} (line 1)'
         req = install_req_from_line(line, comes_from=comes_from)
         assert repr(line_processor(line, filename, 1)[0]) == repr(req)
 
     def test_yield_pep440_line_requirement(self, line_processor):
         line = 'SomeProject @ https://url/SomeProject-py2-py3-none-any.whl'
         filename = 'filename'
-        comes_from = '-r {} (line {})'.format(filename, 1)
+        comes_from = f'-r {filename} (line 1)'
         req = install_req_from_line(line, comes_from=comes_from)
         assert repr(line_processor(line, filename, 1)[0]) == repr(req)
 
@@ -255,16 +255,16 @@ class TestProcessLine:
     ):
         line = 'SomeProject >= 2'
         filename = 'filename'
-        comes_from = '-r {} (line {})'.format(filename, 1)
+        comes_from = f'-r {filename} (line 1)'
         req = install_req_from_line(line, comes_from=comes_from)
         assert repr(line_processor(line, filename, 1)[0]) == repr(req)
         assert str(req.req.specifier) == '>=2'
 
     def test_yield_editable_requirement(self, line_processor):
         url = 'git+https://url#egg=SomeProject'
-        line = '-e {url}'.format(**locals())
+        line = f'-e {url}'
         filename = 'filename'
-        comes_from = '-r {} (line {})'.format(filename, 1)
+        comes_from = f'-r {filename} (line 1)'
         req = install_req_from_editable(url, comes_from=comes_from)
         assert repr(line_processor(line, filename, 1)[0]) == repr(req)
 
@@ -588,7 +588,7 @@ class TestParseRequirements:
         )
 
         def make_var(name):
-            return '${{{name}}}'.format(**locals())
+            return f'${{{name}}}'
 
         env_vars = collections.OrderedDict([
             ('GITHUB_TOKEN', 'notarealtoken'),
diff --git a/tests/unit/test_req_uninstall.py b/tests/unit/test_req_uninstall.py
index 3697a1f29..8de2ae9bc 100644
--- a/tests/unit/test_req_uninstall.py
+++ b/tests/unit/test_req_uninstall.py
@@ -162,9 +162,9 @@ class TestUninstallPathSet:
             pth.add(share_com)
         # Check that the paths were added to entries
         if on_windows:
-            check = set([tmpdir, relative, share, share_com])
+            check = {tmpdir, relative, share, share_com}
         else:
-            check = set([tmpdir, relative])
+            check = {tmpdir, relative}
         assert pth.entries == check
 
     @pytest.mark.skipif("sys.platform == 'win32'")
diff --git a/tests/unit/test_utils_subprocess.py b/tests/unit/test_utils_subprocess.py
index ecae2295c..7a31eeb74 100644
--- a/tests/unit/test_utils_subprocess.py
+++ b/tests/unit/test_utils_subprocess.py
@@ -57,11 +57,6 @@ def test_make_subprocess_output_error__non_ascii_command_arg(monkeypatch):
     Test a command argument with a non-ascii character.
     """
     cmd_args = ['foo', 'déf']
-    if sys.version_info[0] == 2:
-        # Check in Python 2 that the str (bytes object) with the non-ascii
-        # character has the encoding we expect. (This comes from the source
-        # code encoding at the top of the file.)
-        assert cmd_args[1].decode('utf-8') == 'déf'
 
     # We need to monkeypatch so the encoding will be correct on Windows.
     monkeypatch.setattr(locale, 'getpreferredencoding', lambda: 'utf-8')
@@ -80,7 +75,6 @@ def test_make_subprocess_output_error__non_ascii_command_arg(monkeypatch):
     assert actual == expected, f'actual: {actual}'
 
 
-@pytest.mark.skipif("sys.version_info < (3,)")
 def test_make_subprocess_output_error__non_ascii_cwd_python_3(monkeypatch):
     """
     Test a str (text) cwd with a non-ascii character in Python 3.
@@ -102,36 +96,6 @@ def test_make_subprocess_output_error__non_ascii_cwd_python_3(monkeypatch):
     assert actual == expected, f'actual: {actual}'
 
 
-@pytest.mark.parametrize('encoding', [
-    'utf-8',
-    # Test a Windows encoding.
-    'cp1252',
-])
-@pytest.mark.skipif("sys.version_info >= (3,)")
-def test_make_subprocess_output_error__non_ascii_cwd_python_2(
-    monkeypatch, encoding,
-):
-    """
-    Test a str (bytes object) cwd with a non-ascii character in Python 2.
-    """
-    cmd_args = ['test']
-    cwd = '/path/to/cwd/déf'.encode(encoding)
-    monkeypatch.setattr(sys, 'getfilesystemencoding', lambda: encoding)
-    actual = make_subprocess_output_error(
-        cmd_args=cmd_args,
-        cwd=cwd,
-        lines=[],
-        exit_status=1,
-    )
-    expected = dedent("""\
-    Command errored out with exit status 1:
-     command: test
-         cwd: /path/to/cwd/déf
-    Complete output (0 lines):
-    ----------------------------------------""")
-    assert actual == expected, f'actual: {actual}'
-
-
 # This test is mainly important for checking unicode in Python 2.
 def test_make_subprocess_output_error__non_ascii_line():
     """
@@ -430,3 +394,21 @@ class TestCallSubprocess:
                 [sys.executable, '-c', 'input()'],
                 show_stdout=True,
             )
+
+
+def test_unicode_decode_error(caplog):
+    if locale.getpreferredencoding() != "UTF-8":
+        pytest.skip("locale.getpreferredencoding() is not UTF-8")
+    caplog.set_level(INFO)
+    call_subprocess(
+        [
+            sys.executable,
+            "-c",
+            "import sys; sys.stdout.buffer.write(b'\\xff')",
+        ],
+        show_stdout=True
+    )
+
+    assert len(caplog.records) == 2
+    # First log record is "Running command ..."
+    assert caplog.record_tuples[1] == ("pip.subprocessor", INFO, "\\xff")
diff --git a/tests/unit/test_vcs.py b/tests/unit/test_vcs.py
index 8fe5d3e71..6d82e139a 100644
--- a/tests/unit/test_vcs.py
+++ b/tests/unit/test_vcs.py
@@ -303,6 +303,27 @@ def test_version_control__get_url_rev_and_auth__no_revision(url):
     assert 'an empty revision (after @)' in str(excinfo.value)
 
 
+@pytest.mark.parametrize("vcs_cls", [Bazaar, Git, Mercurial, Subversion])
+@pytest.mark.parametrize(
+    "exc_cls, msg_re",
+    [
+        (FileNotFoundError, r"Cannot find command '{name}'"),
+        (PermissionError, r"No permission to execute '{name}'"),
+    ],
+    ids=["FileNotFoundError", "PermissionError"],
+)
+def test_version_control__run_command__fails(vcs_cls, exc_cls, msg_re):
+    """
+    Test that ``VersionControl.run_command()`` raises ``BadCommand``
+    when the command is not found or when the user have no permission
+    to execute it. The error message must contains the command name.
+    """
+    with patch("pip._internal.vcs.versioncontrol.call_subprocess") as call:
+        call.side_effect = exc_cls
+        with pytest.raises(BadCommand, match=msg_re.format(name=vcs_cls.name)):
+            vcs_cls.run_command([])
+
+
 @pytest.mark.parametrize('url, expected', [
     # Test http.
     ('bzr+http://bzr.myproject.org/MyProject/trunk/#egg=MyProject',
diff --git a/tests/yaml/ERRORS.md b/tests/yaml/ERRORS.md
deleted file mode 100644
index 700e3d4ea..000000000
--- a/tests/yaml/ERRORS.md
+++ /dev/null
@@ -1,60 +0,0 @@
-# New resolver error messages
-
-
-## Incompatible requirements
-
-Most resolver error messages are due to incompatible requirements.
-That is, the dependency tree contains conflicting versions of the same
-package.  Take the example:
-
-    base:
-      available:
-        - A 1.0.0; depends B == 1.0.0, C == 2.0.0
-        - B 1.0.0; depends C == 1.0.0
-        - C 1.0.0
-        - C 2.0.0
-
-Here, `A` cannot be installed because it depends on `B` (which depends on
-a different version of `C` than `A` itself.  In real world examples, the
-conflicting version are not so easy to spot. I'm suggesting an error
-message which looks something like this:
-
-    A 1.0.0 -> B 1.0.0 -> C 1.0.0
-    A 1.0.0 -> C 2.0.0
-
-That is, for the conflicting package, we show the user where exactly the
-requirement came from.
-
-
-## Double requirement
-
-I've noticed that in many cases the old resolver messages are more
-informative.  For example, in the simple example:
-
-    base:
-      available:
-        - B 1.0.0
-        - B 2.0.0
-
-Now if we want to install both version of `B` at the same time,
-i.e. the requirement `B==1.0.0 B==2.0.0`, we get:
-
-    ERROR: Could not find a version that satisfies the requirement B==1.0.0
-    ERROR: Could not find a version that satisfies the requirement B==2.0.0
-    No matching distribution found for b, b
-
-Even though both version are actually available and satisfy each requirement,
-just not at once.  When trying to install a version of `B` which does not
-exist, say requirement `B==1.5.0`, you get the same type of error message:
-
-    Could not find a version that satisfies the requirement B==1.5.0
-    No matching distribution found for b
-
-For this case, the old error message was:
-
-    Could not find a version that satisfies the requirement B==1.5.0 (from versions: 1.0.0, 2.0.0)
-    No matching distribution found for B==1.5.0
-
-And the old error message for the requirement `B==1.0.0 B==2.0.0`:
-
-    Double requirement given: B==2.0.0 (already in B==1.0.0, name='B')
diff --git a/tests/yaml/README.md b/tests/yaml/README.md
deleted file mode 100644
index 1a379fdcb..000000000
--- a/tests/yaml/README.md
+++ /dev/null
@@ -1,74 +0,0 @@
-# YAML tests for pip's resolver
-
-This directory contains fixtures for testing pip's resolver.
-The fixtures are written as `.yml` files, with a convenient format
-that allows for specifying a custom index for temporary use.
-
-The `.yml` files are typically organized in the following way.  Here, we are
-going to take a closer look at the `simple.yml` file and step through the
-test cases.  A `base` section defines which packages are available upstream:
-
-    base:
-      available:
-        - simple 0.1.0
-        - simple 0.2.0
-        - base 0.1.0; depends dep
-        - dep 0.1.0
-
-Each package has a name and version number.  Here, there are two
-packages `simple` (with versoin `0.1.0` and `0.2.0`).  The package
-`base 0.1.0` depends on the requirement `dep` (which simply means it
-depends on any version of `dep`.  More generally, a package can also
-depend on a specific version of another package, or a range of versions.
-
-Next, in our yaml file, we have the `cases:` section which is a list of
-test cases.  Each test case has a request and a response.  The request
-is what the user would want to do:
-
-    cases:
-    -
-      request:
-        - install: simple
-        - uninstall: simple
-      response:
-        - state:
-          - simple 0.2.0
-        - state: null
-
-Here the first request is to install the package simple, this would
-basically be equivalent to typing `pip install simple`, and the corresponding
-first response is that the state of installed packages is `simple 0.2.0`.
-Note that by default the highest version of an available package will be
-installed.
-
-The second request is to uninstall simple again, which will result in the
-state `null` (basically an empty list of installed packages).
-
-When the yaml tests are run, each response is verified by checking which
-packages got actually installed.  Note that this is check is done in
-alphabetical order.
-
-
-
-The linter is very useful for initally checking `.yml` files, e.g.:
-
-    $ python linter.py -v simple.yml
-
-To run only the yaml tests, use (from the root of the source tree):
-
-    $ tox -e py38 -- -m yaml -vv
-
-Or, in order to avoid collecting all the test cases:
-
-    $ tox -e py38 -- tests/functional/test_yaml.py
-
-Or, only a specific test:
-
-    $ tox -e py38 -- tests/functional/test_yaml.py -k simple
-
-Or, just a specific test case:
-
-    $ tox -e py38 -- tests/functional/test_yaml.py -k simple-0
-
-
-
diff --git a/tests/yaml/backtrack.yml b/tests/yaml/backtrack.yml
deleted file mode 100644
index ffcb722b8..000000000
--- a/tests/yaml/backtrack.yml
+++ /dev/null
@@ -1,40 +0,0 @@
-# Pradyun's backtracking example
-base:
-  available:
-    - A 1.0.0; depends B == 1.0.0
-    - A 2.0.0; depends B == 2.0.0, C == 1.0.0
-    - A 3.0.0; depends B == 3.0.0, C == 2.0.0
-    - A 4.0.0; depends B == 4.0.0, C == 3.0.0
-    - A 5.0.0; depends B == 5.0.0, C == 4.0.0
-    - A 6.0.0; depends B == 6.0.0, C == 5.0.0
-    - A 7.0.0; depends B == 7.0.0, C == 6.0.0
-    - A 8.0.0; depends B == 8.0.0, C == 7.0.0
-
-    - B 1.0.0; depends C == 1.0.0
-    - B 2.0.0; depends C == 2.0.0
-    - B 3.0.0; depends C == 3.0.0
-    - B 4.0.0; depends C == 4.0.0
-    - B 5.0.0; depends C == 5.0.0
-    - B 6.0.0; depends C == 6.0.0
-    - B 7.0.0; depends C == 7.0.0
-    - B 8.0.0; depends C == 8.0.0
-
-    - C 1.0.0
-    - C 2.0.0
-    - C 3.0.0
-    - C 4.0.0
-    - C 5.0.0
-    - C 6.0.0
-    - C 7.0.0
-    - C 8.0.0
-
-cases:
--
-  request:
-    - install: A
-  response:
-    - state:
-      - A 1.0.0
-      - B 1.0.0
-      - C 1.0.0
-  skip: legacy
diff --git a/tests/yaml/circular.yml b/tests/yaml/circular.yml
deleted file mode 100644
index 95c535454..000000000
--- a/tests/yaml/circular.yml
+++ /dev/null
@@ -1,45 +0,0 @@
-base:
-  available:
-    - A 1.0.0; depends B == 1.0.0
-    - B 1.0.0; depends C == 1.0.0
-    - C 1.0.0; depends D == 1.0.0
-    - D 1.0.0; depends A == 1.0.0
-
-cases:
-# NOTE: Do we want to check the order?
--
-  request:
-    - install: A
-  response:
-    - state:
-      - A 1.0.0
-      - B 1.0.0
-      - C 1.0.0
-      - D 1.0.0
--
-  request:
-    - install: B
-  response:
-    - state:
-      - A 1.0.0
-      - B 1.0.0
-      - C 1.0.0
-      - D 1.0.0
--
-  request:
-    - install: C
-  response:
-    - state:
-      - A 1.0.0
-      - B 1.0.0
-      - C 1.0.0
-      - D 1.0.0
--
-  request:
-    - install: D
-  response:
-    - state:
-      - A 1.0.0
-      - B 1.0.0
-      - C 1.0.0
-      - D 1.0.0
diff --git a/tests/yaml/conflict_1.yml b/tests/yaml/conflict_1.yml
deleted file mode 100644
index dc18be32a..000000000
--- a/tests/yaml/conflict_1.yml
+++ /dev/null
@@ -1,77 +0,0 @@
-base:
-  available:
-    - A 1.0.0; depends B == 1.0.0, B == 2.0.0
-    - B 1.0.0
-    - B 2.0.0
-
-cases:
--
-  request:
-    - install: A
-  response:
-    - error:
-        code: 0
-        stderr: ['incompatible']
-  skip: legacy
-  # -- a good error message would be:
-  # A 1.0.0 has incompatible requirements B==1.0.0, B==2.0.0
-
--
-  request:
-    - install: ['B==1.0.0', 'B']
-  response:
-    - state:
-       - B 1.0.0
-  skip: legacy
-  # -- old error:
-  # Double requirement given: B (already in B==1.0.0, name='B')
-
--
-  request:
-    - install: ['B==1.0.0', 'B==2.0.0']
-  response:
-    - state: null
-      error:
-        code: 1
-        stderr: >-
-          Cannot install B==1.0.0 and B==2.0.0 because these
-          package versions have conflicting dependencies.
-  skip: legacy
-  # -- currently the (new resolver) error message is:
-  # Could not find a version that satisfies the requirement B==1.0.0
-  # Could not find a version that satisfies the requirement B==2.0.0
-  # No matching distribution found for b, b
-  # -- better would be:
-  # cannot install different version (1.0.0, 2.0.0) of package B at the
-  # same time.
-  # -- the old error message was actually better here:
-  # Double requirement given: B==2.0.0 (already in B==1.0.0, name='B')
-
--
-  request:
-    - install: B==1.5.0
-  response:
-    - state: null
-      error:
-        code: 1
-        stderr: 'no\s+matching\s+distribution'
-  skip: legacy
-  # -- currently (new resolver) error message is:
-  # Could not find a version that satisfies the requirement B==1.5.0
-  # No matching distribution found for b
-  # -- the old error message was actually better here:
-  # Could not find a version that satisfies the requirement B==1.5.0 (from versions: 1.0.0, 2.0.0)
-  # No matching distribution found for B==1.5.0
-
--
-  request:
-    - install: A==2.0
-  response:
-    - state: null
-      error:
-        code: 1
-        stderr: 'no\s+matching\s+distribution'
-  skip: legacy
-  # -- currently the error message is:
-  # Could not find a version that satisfies the requirement A==2.0
-  # No matching distribution found for a
diff --git a/tests/yaml/conflict_2.yml b/tests/yaml/conflict_2.yml
deleted file mode 100644
index 7ec5848ed..000000000
--- a/tests/yaml/conflict_2.yml
+++ /dev/null
@@ -1,28 +0,0 @@
-# Tzu-ping mentioned this example
-base:
-  available:
-    - name: virtualenv
-      version: 20.0.2
-      depends: ['six>=1.12.0,<2']
-    - six 1.11
-    - six 1.12
-    - six 1.13
-
-cases:
--
-  request:
-    - install: virtualenv
-  response:
-    - state:
-      - six 1.13
-      - virtualenv 20.0.2
--
-  request:
-    - install: ['six<1.12', 'virtualenv==20.0.2']
-  response:
-    - state: null
-      error:
-        stderr: >-
-          Cannot install six<1.12 and virtualenv 20.0.2 because these
-          package versions have conflicting dependencies.
-  skip: legacy
diff --git a/tests/yaml/conflict_3.yml b/tests/yaml/conflict_3.yml
deleted file mode 100644
index 53f2b4a98..000000000
--- a/tests/yaml/conflict_3.yml
+++ /dev/null
@@ -1,22 +0,0 @@
-base:
-  available:
-    - A 1.0.0; depends B == 1.0.0, C == 2.0.0
-    - B 1.0.0; depends C == 1.0.0
-    - C 1.0.0
-    - C 2.0.0
-
-cases:
--
-  request:
-    - install: A
-  response:
-    - state: null
-  skip: legacy
-  # -- currently the error message is:
-  # Could not find a version that satisfies the requirement C==2.0.0 (from a)
-  # Could not find a version that satisfies the requirement C==1.0.0 (from b)
-  # No matching distribution found for c, c
-  # -- This is a bit confusing, as both versions of C are available.
-  # -- better would be something like:
-  # A 1.0.0 -> B 1.0.0 -> C 1.0.0
-  # A 1.0.0 -> C 2.0.0
diff --git a/tests/yaml/conflicting_diamond.yml b/tests/yaml/conflicting_diamond.yml
deleted file mode 100644
index c28b667ac..000000000
--- a/tests/yaml/conflicting_diamond.yml
+++ /dev/null
@@ -1,19 +0,0 @@
-cases:
--
-  available:
-    - A 1.0.0; depends B == 1.0.0, C == 1.0.0
-    - B 1.0.0; depends D == 1.0.0
-    - C 1.0.0; depends D == 2.0.0
-    - D 1.0.0
-    - D 2.0.0
-  request:
-    - install: A
-  response:
-    - error:
-        code: 1
-        stderr: >-
-          Cannot install A and A because these package
-          versions have conflicting dependencies.
-        # TODO: Tweak this error message to make sense.
-        # https://github.com/pypa/pip/issues/8495
-  skip: legacy
diff --git a/tests/yaml/conflicting_triangle.yml b/tests/yaml/conflicting_triangle.yml
deleted file mode 100644
index 02b348ca2..000000000
--- a/tests/yaml/conflicting_triangle.yml
+++ /dev/null
@@ -1,18 +0,0 @@
-cases:
--
-  available:
-    - A 1.0.0; depends C == 1.0.0
-    - B 1.0.0; depends C == 2.0.0
-    - C 1.0.0
-    - C 2.0.0
-  request:
-    - install: A
-    - install: B
-  response:
-    - state:
-      - A 1.0.0
-      - C 1.0.0
-    - error:
-        code: 0
-        stderr: ['c==1\.0\.0', 'incompatible']
-  skip: legacy
diff --git a/tests/yaml/extras.yml b/tests/yaml/extras.yml
deleted file mode 100644
index b0f4e992c..000000000
--- a/tests/yaml/extras.yml
+++ /dev/null
@@ -1,49 +0,0 @@
-base:
-  available:
-    - A 1.0.0; depends B == 1.0.0, C == 1.0.0, D == 1.0.0
-    - B 1.0.0; depends D[extra_1] == 1.0.0
-    - C 1.0.0; depends D[extra_2] == 1.0.0
-    - name: D
-      version: 1.0.0
-      depends: []
-      extras:
-        extra_1: [E == 1.0.0]
-        extra_2: [F == 1.0.0]
-    - E 1.0.0
-    - F 1.0.0
-cases:
--
-  request:
-    - install: B
-  response:
-    - state:
-      - B 1.0.0
-      - D 1.0.0
-      - E 1.0.0
--
-  request:
-    - install: C
-  response:
-    - state:
-      - C 1.0.0
-      - D 1.0.0
-      - F 1.0.0
--
-  request:
-    - install: A
-  response:
-    - state:
-      - A 1.0.0
-      - B 1.0.0
-      - C 1.0.0
-      - D 1.0.0
-      - E 1.0.0
-      - F 1.0.0
-  skip: legacy
--
-  request:
-    - install: D[extra_1]
-      options: --no-deps
-  response:
-    - state:
-      - D 1.0.0
diff --git a/tests/yaml/fallback.yml b/tests/yaml/fallback.yml
deleted file mode 100644
index 86925398a..000000000
--- a/tests/yaml/fallback.yml
+++ /dev/null
@@ -1,20 +0,0 @@
-base:
-  available:
-    - A 1.0.0; depends B == 1.0.0, C == 1.0.0
-    - A 0.8.0
-    - B 1.0.0; depends D == 1.0.0
-    - C 1.0.0; depends D == 2.0.0
-    - D 1.0.0
-    - D 2.0.0
-
-cases:
--
-  request:
-    - install: A
-  response:
-    - state:
-      - A 0.8.0
-  # the old resolver tries to install A 1.0.0 (which fails), but the new
-  # resolver realises that A 1.0.0 cannot be installed and falls back to
-  # installing the older version A 0.8.0 instead.
-  skip: legacy
diff --git a/tests/yaml/huge.yml b/tests/yaml/huge.yml
deleted file mode 100644
index 01bfdf26f..000000000
--- a/tests/yaml/huge.yml
+++ /dev/null
@@ -1,1260 +0,0 @@
-base:
-  available:
-    - alabaster 0.7.10
-    - alabaster 0.7.11
-    - appdirs 1.4.3
-    - asn1crypto 0.22.0
-    - asn1crypto 0.23.0
-    - asn1crypto 0.24.0
-    - name: astroid
-      version: 1.5.3
-      depends: ['lazy-object-proxy', 'setuptools', 'six', 'wrapt']
-    - name: astroid
-      version: 1.6.0
-      depends: ['lazy-object-proxy', 'setuptools', 'six', 'wrapt']
-    - name: astroid
-      version: 1.6.1
-      depends: ['lazy-object-proxy', 'setuptools', 'six', 'wrapt']
-    - name: astroid
-      version: 1.6.2
-      depends: ['lazy-object-proxy', 'setuptools', 'six', 'wrapt']
-    - name: astroid
-      version: 1.6.3
-      depends: ['lazy-object-proxy', 'setuptools', 'six', 'wrapt']
-    - name: astroid
-      version: 1.6.4
-      depends: ['lazy-object-proxy', 'setuptools', 'six', 'wrapt']
-    - name: astroid
-      version: 1.6.5
-      depends: ['lazy-object-proxy', 'setuptools', 'six', 'wrapt']
-    - name: astroid
-      version: 2.0.2
-      depends: ['lazy-object-proxy', 'six', 'wrapt']
-    - name: astroid
-      version: 2.0.4
-      depends: ['lazy-object-proxy', 'six', 'wrapt']
-    - name: attrs
-      version: 17.2.0
-      depends: ['hypothesis', 'pympler', 'zope', 'zope.interface']
-    - name: attrs
-      version: 17.3.0
-      depends: ['hypothesis', 'pympler', 'zope', 'zope.interface']
-    - attrs 17.4.0
-    - attrs 18.1.0
-    - name: automat
-      version: 0.6.0
-      depends: ['attrs', 'six']
-    - name: automat
-      version: 0.7.0
-      depends: ['attrs', 'six']
-    - name: babel
-      version: 2.5.0
-      depends: ['pytz']
-    - name: babel
-      version: 2.5.1
-      depends: ['pytz']
-    - name: babel
-      version: 2.5.3
-      depends: ['pytz']
-    - name: babel
-      version: 2.6.0
-      depends: ['pytz']
-    - backcall 0.1.0
-    - backports 1.0
-    - name: backports.functools_lru_cache
-      version: '1.4'
-      depends: ['backports', 'setuptools']
-    - name: backports.functools_lru_cache
-      version: '1.5'
-      depends: ['backports', 'setuptools']
-    - name: backports.shutil_get_terminal_size
-      version: 1.0.0
-      depends: ['backports']
-    - backports_abc 0.5
-    - beautifulsoup4 4.6.0
-    - beautifulsoup4 4.6.1
-    - beautifulsoup4 4.6.3
-    - bitarray 0.8.1
-    - bitarray 0.8.2
-    - bitarray 0.8.3
-    - name: bkcharts
-      version: '0.2'
-      depends: ['numpy >=1.7.1', 'pandas', 'six >=1.5.2']
-    - name: bleach
-      version: 2.0.0
-      depends: ['html5lib >=0.99999999', 'six']
-    - name: bleach
-      version: 2.1.1
-      depends: ['html5lib >=0.99999999', 'setuptools', 'six']
-    - name: bleach
-      version: 2.1.2
-      depends: ['html5lib >=0.99999999', 'setuptools', 'six']
-    - name: bleach
-      version: 2.1.3
-      depends: ['html5lib >=0.99999999', 'setuptools', 'six']
-    - name: bokeh
-      version: 0.12.10
-      depends: ['jinja2 >=2.7', 'numpy >=1.7.1', 'python-dateutil >=2.1', 'pyyaml >=3.10', 'six >=1.5.2', 'tornado >=4.3']
-    - name: bokeh
-      version: 0.12.11
-      depends: ['jinja2 >=2.7', 'numpy >=1.7.1', 'python-dateutil >=2.1', 'pyyaml >=3.10', 'six >=1.5.2', 'tornado >=4.3']
-    - name: bokeh
-      version: 0.12.13
-      depends: ['jinja2 >=2.7', 'numpy >=1.7.1', 'python-dateutil >=2.1', 'pyyaml >=3.10', 'six >=1.5.2', 'tornado >=4.3']
-    - name: bokeh
-      version: 0.12.14
-      depends: ['jinja2 >=2.7', 'numpy >=1.7.1', 'packaging >=16.8', 'python-dateutil >=2.1', 'pyyaml >=3.10', 'six >=1.5.2', 'tornado >=4.3']
-    - name: bokeh
-      version: 0.12.15
-      depends: ['jinja2 >=2.7', 'numpy >=1.7.1', 'packaging >=16.8', 'python-dateutil >=2.1', 'pyyaml >=3.10', 'six >=1.5.2', 'tornado >=4.3']
-    - name: bokeh
-      version: 0.12.16
-      depends: ['jinja2 >=2.7', 'numpy >=1.7.1', 'packaging >=16.8', 'python-dateutil >=2.1', 'pyyaml >=3.10', 'six >=1.5.2', 'tornado >=4.3']
-    - name: bokeh
-      version: 0.12.7
-      depends: ['bkcharts >=0.2', 'jinja2 >=2.7', 'matplotlib', 'numpy >=1.7.1', 'pandas', 'python-dateutil >=2.1', 'pyyaml >=3.10', 'requests >=1.2.3', 'six >=1.5.2', 'tornado >=4.3']
-    - name: bokeh
-      version: 0.12.9
-      depends: ['jinja2 >=2.7', 'numpy >=1.7.1', 'python-dateutil >=2.1', 'pyyaml >=3.10', 'six >=1.5.2', 'tornado >=4.3']
-    - name: bokeh
-      version: 0.13.0
-      depends: ['jinja2 >=2.7', 'numpy >=1.7.1', 'packaging >=16.8', 'python-dateutil >=2.1', 'pyyaml >=3.10', 'six >=1.5.2', 'tornado >=4.3']
-    - name: boto3
-      version: 1.4.7
-      depends: ['botocore >=1.7.0,<1.8.0', 'jmespath >=0.7.1,<1.0.0', 's3transfer >=0.1.10,<0.2.0']
-    - name: boto3
-      version: 1.4.8
-      depends: ['botocore >=1.8.0,<1.9.0', 'jmespath >=0.7.1,<1.0.0', 's3transfer >=0.1.10,<0.2.0']
-    - name: boto3
-      version: 1.5.32
-      depends: ['botocore >=1.8.46,<1.9.0', 'jmespath >=0.7.1,<1.0.0', 's3transfer >=0.1.10,<0.2.0']
-    - name: boto3
-      version: 1.6.18
-      depends: ['botocore >=1.9.18,<1.10.0', 'jmespath >=0.7.1,<1.0.0', 's3transfer >=0.1.10,<0.2.0']
-    - name: boto3
-      version: 1.7.24
-      depends: ['botocore >=1.10.24,<1.11.0', 'jmespath >=0.7.1,<1.0.0', 's3transfer >=0.1.10,<0.2.0']
-    - name: boto3
-      version: 1.7.32
-      depends: ['botocore >=1.10.32,<1.11.0', 'jmespath >=0.7.1,<1.0.0', 's3transfer >=0.1.10,<0.2.0']
-    - name: boto3
-      version: 1.7.4
-      depends: ['botocore >=1.10.4,<1.11.0', 'jmespath >=0.7.1,<1.0.0', 's3transfer >=0.1.10,<0.2.0']
-    - name: boto3
-      version: 1.7.45
-      depends: ['botocore >=1.10.45,<1.11.0', 'jmespath >=0.7.1,<1.0.0', 's3transfer >=0.1.10,<0.2.0']
-    - name: boto3
-      version: 1.7.62
-      depends: ['botocore >=1.10.62,<1.11.0', 'jmespath >=0.7.1,<1.0.0', 's3transfer >=0.1.10,<0.2.0']
-    - name: botocore
-      version: 1.10.12
-      depends: ['docutils >=0.10', 'jmespath >=0.7.1,<1.0.0', 'python-dateutil >=2.1,<3.0.0']
-    - name: botocore
-      version: 1.10.24
-      depends: ['docutils >=0.10', 'jmespath >=0.7.1,<1.0.0', 'python-dateutil >=2.1,<3.0.0']
-    - name: botocore
-      version: 1.10.32
-      depends: ['docutils >=0.10', 'jmespath >=0.7.1,<1.0.0', 'python-dateutil >=2.1,<3.0.0']
-    - name: botocore
-      version: 1.10.4
-      depends: ['docutils >=0.10', 'jmespath >=0.7.1,<1.0.0', 'python-dateutil >=2.1,<2.7.0']
-    - name: botocore
-      version: 1.10.45
-      depends: ['docutils >=0.10', 'jmespath >=0.7.1,<1.0.0', 'python-dateutil >=2.1,<3.0.0']
-    - name: botocore
-      version: 1.10.62
-      depends: ['docutils >=0.10', 'jmespath >=0.7.1,<1.0.0', 'python-dateutil >=2.1,<3.0.0']
-    - name: botocore
-      version: 1.5.78
-      depends: ['docutils >=0.10', 'jmespath >=0.7.1,<1.0.0', 'python-dateutil >=2.1,<3.0.0']
-    - name: botocore
-      version: 1.7.14
-      depends: ['docutils >=0.10', 'jmespath >=0.7.1,<1.0.0', 'python-dateutil >=2.1,<3.0.0']
-    - name: botocore
-      version: 1.7.20
-      depends: ['docutils >=0.10', 'jmespath >=0.7.1,<1.0.0', 'python-dateutil >=2.1,<3.0.0']
-    - name: botocore
-      version: 1.7.40
-      depends: ['docutils >=0.10', 'jmespath >=0.7.1,<1.0.0', 'python-dateutil >=2.1,<3.0.0']
-    - name: botocore
-      version: 1.7.5
-      depends: ['docutils >=0.10', 'jmespath >=0.7.1,<1.0.0', 'python-dateutil >=2.1,<3.0.0']
-    - name: botocore
-      version: 1.8.21
-      depends: ['docutils >=0.10', 'jmespath >=0.7.1,<1.0.0', 'python-dateutil >=2.1,<3.0.0']
-    - name: botocore
-      version: 1.8.46
-      depends: ['docutils >=0.10', 'jmespath >=0.7.1,<1.0.0', 'python-dateutil >=2.1,<3.0.0']
-    - name: botocore
-      version: 1.8.5
-      depends: ['docutils >=0.10', 'jmespath >=0.7.1,<1.0.0', 'python-dateutil >=2.1,<3.0.0']
-    - name: botocore
-      version: 1.9.18
-      depends: ['docutils >=0.10', 'jmespath >=0.7.1,<1.0.0', 'python-dateutil >=2.1,<2.7.0']
-    - certifi 2017.11.5
-    - certifi 2017.7.27.1
-    - certifi 2018.1.18
-    - certifi 2018.4.16
-    - certifi 2018.8.13
-    # cffi is a bundled module in PyPy and causes resolution errors if pip
-    # tries to installed it. Give it a different name since we are simply
-    # checking the graph anyway and the identifier doesn't really matter.
-    - name: cffi_not_really
-      version: 1.10.0
-      depends: ['pycparser']
-    - name: cffi_not_really
-      version: 1.11.2
-      depends: ['pycparser']
-    - name: cffi_not_really
-      version: 1.11.4
-      depends: ['pycparser']
-    - name: cffi_not_really
-      version: 1.11.5
-      depends: ['pycparser']
-    - chardet 3.0.4
-    - click 6.7
-    - cloudpickle 0.4.0
-    - cloudpickle 0.4.2
-    - cloudpickle 0.5.2
-    - cloudpickle 0.5.3
-    - colorama 0.3.9
-    - configparser 3.5.0
-    - constantly 15.1.0
-    - contextlib2 0.5.5
-    - coverage 4.4.2
-    - coverage 4.5.1
-    - name: cryptography
-      version: 2.0.3
-      depends: ['asn1crypto >=0.21.0', 'cffi_not_really >=1.7', 'idna >=2.1', 'openssl 1.0.*', 'six >=1.4.1']
-    - name: cryptography
-      version: 2.1.3
-      depends: ['asn1crypto >=0.21.0', 'cffi_not_really >=1.7', 'idna >=2.1', 'openssl 1.0.*', 'openssl >=1.0.2m,<1.0.3a', 'six >=1.4.1']
-    - name: cryptography
-      version: 2.1.4
-      depends: ['asn1crypto >=0.21.0', 'cffi_not_really >=1.7', 'idna >=2.1', 'openssl 1.0.*', 'openssl >=1.0.2m,<1.0.3a', 'six >=1.4.1']
-    - name: cryptography
-      version: 2.2.1
-      depends: ['asn1crypto >=0.21.0', 'cffi_not_really >=1.7', 'idna >=2.1', 'openssl 1.0.*', 'openssl >=1.0.2n,<1.0.3a', 'six >=1.4.1']
-    - name: cryptography
-      version: 2.2.2
-      depends: ['asn1crypto >=0.21.0', 'cffi_not_really >=1.7', 'idna >=2.1', 'openssl 1.0.*', 'openssl >=1.0.2o,<1.0.3a', 'six >=1.4.1']
-    - name: cryptography
-      version: '2.3'
-      depends: ['asn1crypto >=0.21.0', 'cffi_not_really >=1.7', 'cryptography-vectors 2.3.*', 'idna >=2.1', 'openssl >=1.0.2o,<1.0.3a', 'six >=1.4.1']
-    - cryptography-vectors 2.0.3
-    - cryptography-vectors 2.1.3
-    - cryptography-vectors 2.1.4
-    - cryptography-vectors 2.2.1
-    - cryptography-vectors 2.2.2
-    - cryptography-vectors 2.3
-    - name: cycler
-      version: 0.10.0
-      depends: ['six']
-    - name: cytoolz
-      version: 0.8.2
-      depends: ['toolz >=0.8.0']
-    - name: cytoolz
-      version: 0.9.0
-      depends: ['toolz >=0.8.0']
-    - name: cytoolz
-      version: 0.9.0.1
-      depends: ['toolz >=0.8.0']
-    - name: dask
-      version: 0.15.2
-      depends: ['bokeh', 'cloudpickle >=0.2.1', 'dask-core 0.15.2.*', 'distributed >=1.16.0', 'numpy >=1.10', 'pandas >=0.19.0', 'partd >=0.3.8', 'toolz >=0.7.3']
-    - name: dask
-      version: 0.15.3
-      depends: ['bokeh', 'cloudpickle >=0.2.1', 'dask-core 0.15.3.*', 'distributed >=1.19.0', 'numpy >=1.10', 'pandas >=0.19.0', 'partd >=0.3.8', 'toolz >=0.7.3']
-    - name: dask
-      version: 0.15.4
-      depends: ['bokeh', 'cloudpickle >=0.2.1', 'dask-core 0.15.4.*', 'distributed >=1.19.0', 'numpy >=1.10', 'pandas >=0.19.0', 'partd >=0.3.8', 'toolz >=0.7.3']
-    - name: dask
-      version: 0.16.0
-      depends: ['bokeh', 'cloudpickle >=0.2.1', 'dask-core 0.16.0.*', 'distributed >=1.20.0', 'numpy >=1.10', 'pandas >=0.19.0', 'partd >=0.3.8', 'toolz >=0.7.3']
-    - name: dask
-      version: 0.16.1
-      depends: ['bokeh', 'cloudpickle >=0.2.1', 'dask-core 0.16.1.*', 'distributed >=1.20.0', 'numpy >=1.10', 'pandas >=0.19.0', 'partd >=0.3.8', 'toolz >=0.7.3']
-    - name: dask
-      version: 0.17.0
-      depends: ['bokeh', 'cloudpickle >=0.2.1', 'dask-core 0.17.0.*', 'distributed >=1.21.0', 'numpy >=1.10', 'pandas >=0.19.0', 'partd >=0.3.8', 'toolz >=0.7.3']
-    - name: dask
-      version: 0.17.1
-      depends: ['bokeh', 'cloudpickle >=0.2.1', 'dask-core 0.17.1.*', 'distributed >=1.21.1', 'numpy >=1.10', 'pandas >=0.19.0', 'partd >=0.3.8', 'toolz >=0.7.3']
-    - name: dask
-      version: 0.17.2
-      depends: ['bokeh', 'cloudpickle >=0.2.1', 'cytoolz >=0.7.3', 'dask-core 0.17.2.*', 'distributed >=1.21.0', 'numpy >=1.10.4', 'pandas >=0.19.0', 'partd >=0.3.8', 'toolz >=0.7.3']
-    - name: dask
-      version: 0.17.3
-      depends: ['bokeh', 'cloudpickle >=0.2.1', 'cytoolz >=0.7.3', 'dask-core 0.17.3.*', 'distributed >=1.21.0', 'numpy >=1.11.0', 'pandas >=0.19.0', 'partd >=0.3.8', 'toolz >=0.7.3']
-    - name: dask
-      version: 0.17.4
-      depends: ['bokeh', 'cloudpickle >=0.2.1', 'cytoolz >=0.7.3', 'dask-core 0.17.4.*', 'distributed >=1.21.0', 'numpy >=1.11.0', 'pandas >=0.19.0', 'partd >=0.3.8', 'toolz >=0.7.3']
-    - name: dask
-      version: 0.17.5
-      depends: ['bokeh', 'cloudpickle >=0.2.1', 'cytoolz >=0.7.3', 'dask-core 0.17.5.*', 'distributed >=1.21.0', 'numpy >=1.11.0', 'pandas >=0.19.0', 'partd >=0.3.8', 'toolz >=0.7.3']
-    - name: dask
-      version: 0.18.0
-      depends: ['bokeh', 'cloudpickle >=0.2.1', 'cytoolz >=0.7.3', 'dask-core 0.18.0.*', 'distributed >=1.22.0', 'numpy >=1.11.0', 'pandas >=0.19.0', 'partd >=0.3.8', 'toolz >=0.7.3']
-    - name: dask
-      version: 0.18.1
-      depends: ['bokeh', 'cloudpickle >=0.2.1', 'cytoolz >=0.7.3', 'dask-core 0.18.1.*', 'distributed >=1.22.0', 'numpy >=1.11.0', 'pandas >=0.19.0', 'partd >=0.3.8', 'toolz >=0.7.3']
-    - name: dask
-      version: 0.18.2
-      depends: ['bokeh', 'cloudpickle >=0.2.1', 'cytoolz >=0.7.3', 'dask-core 0.18.2.*', 'distributed >=1.22.0', 'numpy >=1.11.0', 'pandas >=0.19.0', 'partd >=0.3.8', 'toolz >=0.7.3']
-    - dask-core 0.15.2
-    - dask-core 0.15.3
-    - dask-core 0.15.4
-    - dask-core 0.16.0
-    - dask-core 0.16.1
-    - dask-core 0.17.0
-    - dask-core 0.17.1
-    - dask-core 0.17.2
-    - dask-core 0.17.3
-    - dask-core 0.17.4
-    - dask-core 0.17.5
-    - dask-core 0.18.0
-    - dask-core 0.18.1
-    - dask-core 0.18.2
-    - decorator 4.1.2
-    - decorator 4.2.1
-    - decorator 4.3.0
-    - dill 0.2.7.1
-    - dill 0.2.8.2
-    - name: distributed
-      version: 1.18.3
-      depends: ['click >=6.6', 'cloudpickle >=0.2.2', 'dask-core >=0.15.2', 'msgpack-python', 'psutil', 'six', 'sortedcontainers', 'tblib', 'toolz >=0.7.4', 'tornado >=4.5.1', 'zict >=0.1.2']
-    - name: distributed
-      version: 1.19.1
-      depends: ['click >=6.6', 'cloudpickle >=0.2.2', 'dask-core >=0.15.2', 'msgpack-python', 'psutil', 'six', 'sortedcontainers', 'tblib', 'toolz >=0.7.4', 'tornado >=4.5.1', 'zict >=0.1.3']
-    - name: distributed
-      version: 1.20.0
-      depends: ['click >=6.6', 'cloudpickle >=0.2.2', 'dask-core >=0.16.0', 'msgpack-python', 'psutil', 'pyyaml', 'six', 'sortedcontainers', 'tblib', 'toolz >=0.7.4', 'tornado >=4.5.1', 'zict >=0.1.3']
-    - name: distributed
-      version: 1.20.1
-      depends: ['click >=6.6', 'cloudpickle >=0.2.2', 'dask-core >=0.16.0', 'msgpack-python', 'psutil', 'pyyaml', 'six', 'sortedcontainers', 'tblib', 'toolz >=0.7.4', 'tornado >=4.5.1', 'zict >=0.1.3']
-    - name: distributed
-      version: 1.20.2
-      depends: ['click >=6.6', 'cloudpickle >=0.2.2', 'dask-core >=0.16.0', 'msgpack-python', 'psutil', 'pyyaml', 'six', 'sortedcontainers', 'tblib', 'toolz >=0.7.4', 'tornado >=4.5.1', 'zict >=0.1.3']
-    - name: distributed
-      version: 1.21.0
-      depends: ['click >=6.6', 'cloudpickle >=0.2.2', 'dask-core >=0.17.0', 'msgpack-python', 'psutil', 'pyyaml', 'six', 'sortedcontainers', 'tblib', 'toolz >=0.7.4', 'tornado >=4.5.1', 'zict >=0.1.3']
-    - name: distributed
-      version: 1.21.1
-      depends: ['click >=6.6', 'cloudpickle >=0.2.2', 'dask-core >=0.17.0', 'msgpack-python', 'psutil', 'pyyaml', 'six', 'sortedcontainers', 'tblib', 'toolz >=0.7.4', 'tornado >=4.5.1', 'zict >=0.1.3']
-    - name: distributed
-      version: 1.21.2
-      depends: ['click >=6.6', 'cloudpickle >=0.2.2', 'dask-core >=0.17.0', 'msgpack-python', 'psutil', 'pyyaml', 'six', 'sortedcontainers', 'tblib', 'toolz >=0.7.4', 'tornado >=4.5.1', 'zict >=0.1.3']
-    - name: distributed
-      version: 1.21.3
-      depends: ['click >=6.6', 'cloudpickle >=0.2.2', 'cytoolz >=0.7.4', 'dask-core >=0.17.0', 'msgpack-python', 'psutil', 'pyyaml', 'six', 'sortedcontainers', 'tblib', 'toolz >=0.7.4', 'tornado >=4.5.1', 'zict >=0.1.3']
-    - name: distributed
-      version: 1.21.4
-      depends: ['click >=6.6', 'cloudpickle >=0.2.2', 'cytoolz >=0.7.4', 'dask-core >=0.17.0', 'msgpack-python', 'psutil', 'pyyaml', 'six', 'sortedcontainers', 'tblib', 'toolz >=0.7.4', 'tornado >=4.5.1', 'zict >=0.1.3']
-    - name: distributed
-      version: 1.21.5
-      depends: ['click >=6.6', 'cloudpickle >=0.2.2', 'cytoolz >=0.7.4', 'dask-core >=0.17.0', 'msgpack-python', 'psutil', 'pyyaml', 'six', 'sortedcontainers', 'tblib', 'toolz >=0.7.4', 'tornado >=4.5.1', 'zict >=0.1.3']
-    - name: distributed
-      version: 1.21.6
-      depends: ['click >=6.6', 'cloudpickle >=0.2.2', 'cytoolz >=0.7.4', 'dask-core >=0.17.0', 'msgpack-python', 'psutil', 'pyyaml', 'six', 'sortedcontainers', 'tblib', 'toolz >=0.7.4', 'tornado >=4.5.1', 'zict >=0.1.3']
-    - name: distributed
-      version: 1.21.8
-      depends: ['click >=6.6', 'cloudpickle >=0.2.2', 'cytoolz >=0.7.4', 'dask-core >=0.17.0', 'msgpack-python', 'psutil', 'pyyaml', 'six', 'sortedcontainers', 'tblib', 'toolz >=0.7.4', 'tornado >=4.5.1', 'zict >=0.1.3']
-    - name: distributed
-      version: 1.22.0
-      depends: ['click >=6.6', 'cloudpickle >=0.2.2', 'cytoolz >=0.7.4', 'dask-core >=0.18.0', 'msgpack-python', 'psutil', 'pyyaml', 'six', 'sortedcontainers', 'tblib', 'toolz >=0.7.4', 'tornado >=4.5.1', 'zict >=0.1.3']
-    - name: distributed
-      version: 1.22.1
-      depends: ['click >=6.6', 'cloudpickle >=0.2.2', 'cytoolz >=0.7.4', 'dask-core >=0.18.0', 'msgpack-python', 'psutil', 'pyyaml', 'six', 'sortedcontainers', 'tblib', 'toolz >=0.7.4', 'tornado >=4.5.1', 'zict >=0.1.3']
-    - docutils 0.14
-    - entrypoints 0.2.3
-    - enum34 1.1.6
-    - expat 2.2.4
-    - expat 2.2.5
-    - filelock 2.0.12
-    - filelock 2.0.13
-    - filelock 3.0.4
-    - name: flask
-      version: 0.12.2
-      depends: ['click >=2.0', 'itsdangerous >=0.21', 'jinja2 >=2.4', 'werkzeug >=0.7']
-    - name: flask
-      version: 1.0.2
-      depends: ['click >=5.1', 'itsdangerous >=0.24', 'jinja2 >=2.10', 'werkzeug >=0.14']
-    - fribidi 1.0.2
-    - fribidi 1.0.4
-    - funcsigs 1.0.2
-    - functools32 3.2.3.2
-    - future 0.16.0
-    - futures 3.1.1
-    - futures 3.2.0
-    - name: gevent
-      version: 1.2.2
-      depends: ['cffi_not_really >=1.3.0', 'greenlet >=0.4.10']
-    - name: gevent
-      version: 1.3.0
-      depends: ['cffi_not_really >=1.11.5', 'greenlet >=0.4.10']
-    - name: gevent
-      version: 1.3.2.post0
-      depends: ['cffi_not_really >=1.11.5', 'greenlet >=0.4.13']
-    - name: gevent
-      version: 1.3.3
-      depends: ['cffi_not_really >=1.11.5', 'greenlet >=0.4.13']
-    - name: gevent
-      version: 1.3.4
-      depends: ['cffi_not_really >=1.11.5', 'greenlet >=0.4.13']
-    - name: gevent
-      version: 1.3.5
-      depends: ['cffi_not_really >=1.11.5', 'greenlet >=0.4.13']
-    - glob2 0.5
-    - glob2 0.6
-    - gmp 6.1.2
-    - graphite2 1.3.10
-    - graphite2 1.3.11
-    - greenlet 0.4.12
-    - greenlet 0.4.13
-    - greenlet 0.4.14
-    - name: html5lib
-      version: '0.999999999'
-      depends: ['six >=1.9', 'webencodings']
-    - name: html5lib
-      version: 1.0.1
-      depends: ['six >=1.9', 'webencodings']
-    - name: hyperlink
-      version: 18.0.0
-      depends: ['idna >=2.5']
-    - hypothesis 3.23.0
-    - name: hypothesis
-      version: 3.37.0
-      depends: ['attrs', 'coverage']
-    - name: hypothesis
-      version: 3.38.5
-      depends: ['attrs', 'coverage']
-    - name: hypothesis
-      version: 3.46.0
-      depends: ['attrs', 'coverage']
-    - name: hypothesis
-      version: 3.52.0
-      depends: ['attrs >=16.0.0', 'coverage']
-    - name: hypothesis
-      version: 3.53.0
-      depends: ['attrs >=16.0.0', 'coverage']
-    - name: hypothesis
-      version: 3.56.0
-      depends: ['attrs >=16.0.0', 'coverage']
-    - name: hypothesis
-      version: 3.57.0
-      depends: ['attrs >=16.0.0', 'coverage']
-    - name: hypothesis
-      version: 3.59.1
-      depends: ['attrs >=16.0.0', 'coverage']
-    - name: ibis-framework
-      version: 0.12.0
-      depends: ['impyla >=0.14.0', 'multipledispatch', 'numpy >=1.10.0', 'pandas >=0.18.1', 'psycopg2', 'python-graphviz', 'setuptools', 'six', 'sqlalchemy >=1.0.0', 'thrift', 'thriftpy <=0.3.9', 'toolz']
-    - name: ibis-framework
-      version: 0.13.0
-      depends: ['impyla >=0.14.0', 'multipledispatch', 'numpy >=1.10.0', 'pandas >=0.18.1', 'psycopg2', 'python-graphviz', 'setuptools', 'six', 'sqlalchemy >=1.0.0', 'thrift', 'thriftpy <=0.3.9', 'toolz']
-    - icu 58.2
-    - idna 2.6
-    - idna 2.7
-    - imagesize 0.7.1
-    - imagesize 1.0.0
-    - name: impyla
-      version: 0.14.0
-      depends: ['bitarray', 'setuptools', 'six', 'thriftpy >=0.3.5']
-    - name: impyla
-      version: 0.14.1
-      depends: ['bitarray', 'setuptools', 'six', 'thriftpy >=0.3.5']
-    - incremental 17.5.0
-    - ipaddress 1.0.18
-    - ipaddress 1.0.19
-    - ipaddress 1.0.22
-    - name: ipykernel
-      version: 4.6.1
-      depends: ['ipython', 'jupyter_client', 'tornado >=4.0', 'traitlets >=4.1']
-    - name: ipykernel
-      version: 4.7.0
-      depends: ['ipython', 'jupyter_client', 'tornado >=4.0', 'traitlets >=4.1']
-    - name: ipykernel
-      version: 4.8.0
-      depends: ['ipython >=4.0.0', 'jupyter_client', 'tornado >=4.0', 'traitlets >=4.1']
-    - name: ipykernel
-      version: 4.8.2
-      depends: ['ipython >=4.0.0', 'jupyter_client', 'tornado >=4.0', 'traitlets >=4.1']
-    - name: ipython
-      version: 5.4.1
-      depends: ['decorator', 'pexpect', 'pickleshare', 'prompt_toolkit >=1.0.4,<2.0.0', 'pygments', 'simplegeneric >0.8', 'traitlets']
-    - name: ipython
-      version: 5.5.0
-      depends: ['decorator', 'pexpect', 'pickleshare', 'prompt_toolkit >=1.0.4,<2.0.0', 'pygments', 'simplegeneric >0.8', 'traitlets']
-    - name: ipython
-      version: 5.6.0
-      depends: ['decorator', 'pexpect', 'pickleshare', 'prompt_toolkit >=1.0.4,<2.0.0', 'pygments', 'simplegeneric >0.8', 'traitlets']
-    - name: ipython
-      version: 5.7.0
-      depends: ['backports.shutil_get_terminal_size', 'decorator', 'pathlib2', 'pexpect', 'pickleshare', 'prompt_toolkit >=1.0.4,<2.0.0', 'pygments', 'simplegeneric >0.8', 'traitlets']
-    - name: ipython
-      version: 5.8.0
-      depends: ['decorator', 'pexpect', 'pickleshare', 'prompt_toolkit >=1.0.4,<2.0.0', 'pygments', 'simplegeneric >0.8', 'traitlets']
-    - name: ipython
-      version: 6.1.0
-      depends: ['decorator', 'jedi >=0.10', 'pexpect', 'pickleshare', 'prompt_toolkit >=1.0.4,<2.0.0', 'pygments', 'simplegeneric >0.8', 'traitlets']
-    - name: ipython
-      version: 6.2.1
-      depends: ['decorator', 'jedi >=0.10', 'pexpect', 'pickleshare', 'prompt_toolkit >=1.0.4,<2.0.0', 'pygments', 'simplegeneric >0.8', 'traitlets']
-    - name: ipython
-      version: 6.3.0
-      depends: ['backcall', 'decorator', 'jedi >=0.10', 'pexpect', 'pickleshare', 'prompt_toolkit >=1.0.4,<2.0.0', 'pygments', 'simplegeneric >0.8', 'traitlets >=4.2']
-    - name: ipython
-      version: 6.3.1
-      depends: ['backcall', 'decorator', 'jedi >=0.10', 'pexpect', 'pickleshare', 'prompt_toolkit >=1.0.4,<2.0.0', 'pygments', 'simplegeneric >0.8', 'traitlets >=4.2']
-    - name: ipython
-      version: 6.4.0
-      depends: ['backcall', 'decorator', 'jedi >=0.10', 'pexpect', 'pickleshare', 'prompt_toolkit >=1.0.4,<2.0.0', 'pygments', 'simplegeneric >0.8', 'traitlets >=4.2']
-    - name: ipython
-      version: 6.5.0
-      depends: ['backcall', 'decorator', 'jedi >=0.10', 'pexpect', 'pickleshare', 'prompt_toolkit >=1.0.4,<2.0.0', 'pygments', 'simplegeneric >0.8', 'traitlets >=4.2']
-    - name: ipython-notebook
-      version: 0.13.2
-      depends: ['ipython 0.13.2', 'pyzmq 2.2.0.1', 'tornado']
-    - name: ipython-notebook
-      version: 1.0.0
-      depends: ['ipython 1.0.0', 'pyzmq 2.2.0.1', 'tornado']
-    - name: ipython-notebook
-      version: 1.1.0
-      depends: ['ipython 1.1.0', 'jinja2', 'pyzmq 2.2.0.1', 'tornado']
-    - name: ipython-notebook
-      version: 2.0.0
-      depends: ['ipython 2.0.0', 'jinja2', 'pyzmq 14.*', 'tornado']
-    - name: ipython-notebook
-      version: 2.1.0
-      depends: ['ipython 2.1.0', 'jinja2', 'pyzmq 14.*', 'tornado']
-    - name: ipython-notebook
-      version: 2.2.0
-      depends: ['ipython 2.2.0', 'jinja2', 'pyzmq 14.*', 'tornado']
-    - name: ipython-notebook
-      version: 2.3.0
-      depends: ['ipython 2.3.0', 'jinja2', 'pyzmq 14.*', 'tornado']
-    - name: ipython-notebook
-      version: 2.3.1
-      depends: ['ipython 2.3.1', 'jinja2', 'pyzmq 14.*', 'tornado']
-    - name: ipython-notebook
-      version: 2.4.1
-      depends: ['ipython 2.4.1', 'jinja2', 'pyzmq 14.*', 'tornado']
-    - name: ipython-notebook
-      version: 3.0.0
-      depends: ['ipython 3.0.0', 'jinja2', 'jsonschema 2.4.0', 'mistune', 'pygments', 'pyzmq 14.*', 'terminado 0.5', 'tornado']
-    - name: ipython-notebook
-      version: 3.1.0
-      depends: ['ipython 3.1.0', 'jinja2', 'jsonschema 2.4.0', 'mistune', 'pygments', 'pyzmq 14.*', 'terminado 0.5', 'tornado']
-    - name: ipython-notebook
-      version: 3.2.0
-      depends: ['ipython 3.2.0', 'jinja2', 'jsonschema 2.4.0', 'mistune', 'pygments', 'pyzmq 14.*', 'terminado 0.5', 'tornado']
-    - name: ipython-notebook
-      version: 3.2.1
-      depends: ['ipython 3.2.1', 'jinja2', 'jsonschema 2.4.0', 'mistune', 'pygments', 'pyzmq 14.*', 'terminado 0.5', 'tornado']
-    - name: ipython-notebook
-      version: 4.0.4
-      depends: ['notebook']
-    - ipython_genutils 0.2.0
-    - name: ipywidgets
-      version: 7.0.0
-      depends: ['ipykernel >=4.5.1', 'ipython', 'nbformat >=4.2.0', 'traitlets >=4.3.1', 'widgetsnbextension >=3.0.0']
-    - name: ipywidgets
-      version: 7.0.5
-      depends: ['ipykernel >=4.5.1', 'ipython', 'nbformat >=4.2.0', 'traitlets >=4.3.1', 'widgetsnbextension >=3.0.0']
-    - name: ipywidgets
-      version: 7.1.0
-      depends: ['ipykernel >=4.5.1', 'ipython', 'nbformat >=4.2.0', 'traitlets >=4.3.1', 'widgetsnbextension >=3.0.0']
-    - name: ipywidgets
-      version: 7.1.1
-      depends: ['ipykernel >=4.5.1', 'ipython >=4.0.0', 'nbformat >=4.2.0', 'traitlets >=4.3.1,<5.0.0', 'widgetsnbextension >=3.1.0,<4.0']
-    - name: ipywidgets
-      version: 7.1.2
-      depends: ['ipykernel >=4.5.1', 'ipython >=4.0.0', 'nbformat >=4.2.0', 'traitlets >=4.3.1,<5.0.0', 'widgetsnbextension >=3.1.0,<4.0']
-    - name: ipywidgets
-      version: 7.2.0
-      depends: ['ipykernel >=4.5.1', 'ipython >=4.0.0', 'nbformat >=4.2.0', 'traitlets >=4.3.1,<5.0.0', 'widgetsnbextension >=3.2.0,<4.0.0']
-    - name: ipywidgets
-      version: 7.2.1
-      depends: ['ipykernel >=4.5.1', 'ipython >=4.0.0', 'nbformat >=4.2.0', 'traitlets >=4.3.1,<5.0.0', 'widgetsnbextension >=3.2.0,<4.0.0']
-    - name: ipywidgets
-      version: 7.3.0
-      depends: ['ipykernel >=4.5.1', 'ipython >=4.0.0', 'nbformat >=4.2.0', 'traitlets >=4.3.1,<5.0.0', 'widgetsnbextension >=3.3.0,<3.4.0']
-    - name: ipywidgets
-      version: 7.3.1
-      depends: ['ipykernel >=4.5.1', 'ipython >=4.0.0', 'nbformat >=4.2.0', 'traitlets >=4.3.1,<5.0.0', 'widgetsnbextension >=3.3.0,<3.4.0']
-    - name: ipywidgets
-      version: 7.4.0
-      depends: ['ipykernel >=4.5.1', 'ipython >=4.0.0', 'nbformat >=4.2.0', 'traitlets >=4.3.1,<5.0.0', 'widgetsnbextension >=3.4.0,<3.5.0']
-    - itsdangerous 0.24
-    - jedi 0.10.2
-    - name: jedi
-      version: 0.11.0
-      depends: ['parso ==0.1.0']
-    - name: jedi
-      version: 0.11.1
-      depends: ['numpydoc', 'parso >=0.1.0,<0.2']
-    - name: jedi
-      version: 0.12.0
-      depends: ['parso >=0.2.0']
-    - name: jedi
-      version: 0.12.1
-      depends: ['parso >=0.3.0']
-    - name: jinja2
-      version: '2.10'
-      depends: ['markupsafe >=0.23', 'setuptools']
-    - name: jinja2
-      version: 2.9.6
-      depends: ['markupsafe >=0.23', 'setuptools']
-    - jmespath 0.9.3
-    - jpeg 9b
-    - name: jsonschema
-      version: 2.6.0
-      depends: ['setuptools']
-    - name: jupyter
-      version: 1.0.0
-      depends: ['ipykernel', 'ipywidgets', 'jupyter_console', 'nbconvert', 'notebook', 'qtconsole']
-    - name: jupyter_client
-      version: 5.1.0
-      depends: ['jupyter_core', 'python-dateutil >=2.1', 'pyzmq >=13', 'traitlets']
-    - name: jupyter_client
-      version: 5.2.1
-      depends: ['jupyter_core', 'python-dateutil >=2.1', 'pyzmq >=13', 'traitlets']
-    - name: jupyter_client
-      version: 5.2.2
-      depends: ['jupyter_core', 'python-dateutil >=2.1', 'pyzmq >=13', 'tornado', 'traitlets']
-    - name: jupyter_client
-      version: 5.2.3
-      depends: ['jupyter_core', 'python-dateutil >=2.1', 'pyzmq >=13', 'tornado', 'traitlets']
-    - name: jupyter_console
-      version: 5.2.0
-      depends: ['ipykernel', 'ipython', 'jupyter_client', 'pexpect', 'prompt_toolkit', 'pygments']
-    - name: jupyter_core
-      version: 4.3.0
-      depends: ['traitlets']
-    - name: jupyter_core
-      version: 4.4.0
-      depends: ['traitlets']
-    - kiwisolver 1.0.0
-    - kiwisolver 1.0.1
-    - lazy-object-proxy 1.3.1
-    - llvmlite 0.20.0
-    - llvmlite 0.21.0
-    - llvmlite 0.22.0
-    - locket 0.2.0
-    - name: logilab-common
-      version: 1.4.1
-      depends: ['setuptools', 'six >=1.4.0']
-    - make 4.2.1
-    - markupsafe 1.0
-    - name: matplotlib
-      version: 2.0.2
-      depends: ['cycler >=0.10', 'numpy', 'pyparsing', 'pyqt 5.6.*', 'python-dateutil', 'pytz', 'setuptools', 'tornado']
-    - name: matplotlib
-      version: 2.1.0
-      depends: ['cycler >=0.10', 'numpy', 'pyparsing', 'pyqt 5.6.*', 'python-dateutil', 'pytz', 'setuptools', 'tornado']
-    - name: matplotlib
-      version: 2.1.1
-      depends: ['cycler >=0.10', 'numpy', 'pyparsing', 'pyqt 5.6.*', 'python-dateutil', 'pytz', 'setuptools', 'tornado']
-    - name: matplotlib
-      version: 2.1.2
-      depends: ['cycler >=0.10', 'numpy', 'pyparsing', 'pyqt 5.6.*', 'python-dateutil', 'pytz', 'setuptools', 'tornado']
-    - name: matplotlib
-      version: 2.2.0
-      depends: ['cycler >=0.10', 'numpy', 'pyparsing', 'pyqt 5.6.*', 'python-dateutil', 'pytz', 'setuptools', 'tornado']
-    - name: matplotlib
-      version: 2.2.2
-      depends: ['cycler >=0.10', 'numpy', 'pyparsing', 'pyqt >=5.6,<6.0a0', 'python-dateutil', 'pytz', 'setuptools', 'tornado']
-    - name: matplotlib
-      version: 2.2.3
-      depends: ['cycler >=0.10', 'numpy', 'pyparsing', 'pyqt 5.9.*', 'python-dateutil', 'pytz', 'setuptools', 'tornado']
-    - mistune 0.7.4
-    - mistune 0.8.1
-    - mistune 0.8.3
-    - msgpack-python 0.4.8
-    - msgpack-python 0.5.1
-    - msgpack-python 0.5.5
-    - msgpack-python 0.5.6
-    - multipledispatch 0.4.9
-    - multipledispatch 0.5.0
-    - name: multipledispatch
-      version: 0.6.0
-      depends: ['six']
-    - name: nbconvert
-      version: 5.3.1
-      depends: ['bleach', 'entrypoints >=0.2.2', 'jinja2', 'jupyter_client >=4.2', 'jupyter_core', 'mistune >0.6', 'nbformat', 'pandoc', 'pandocfilters >=1.4.1', 'pygments', 'testpath', 'traitlets']
-    - name: nbformat
-      version: 4.4.0
-      depends: ['ipython_genutils', 'jsonschema >=2.4,!=2.5.0', 'jupyter_core', 'traitlets >=4.1']
-    - ncurses 6.0
-    - ncurses 6.1
-    - name: nose
-      version: 1.3.7
-      depends: ['setuptools']
-    - name: notebook
-      version: 5.0.0
-      depends: ['ipykernel', 'ipython_genutils', 'jinja2', 'jupyter_client', 'jupyter_core', 'nbconvert', 'nbformat', 'terminado >=0.3.3', 'tornado >=4', 'traitlets >=4.3']
-    - name: notebook
-      version: 5.1.0
-      depends: ['ipykernel', 'ipython_genutils', 'jinja2', 'jupyter_client', 'jupyter_core', 'nbconvert', 'nbformat', 'terminado >=0.3.3', 'tornado >=4', 'traitlets >=4.3']
-    - name: notebook
-      version: 5.2.0
-      depends: ['ipykernel', 'ipython_genutils', 'jinja2', 'jupyter_client', 'jupyter_core', 'nbconvert', 'nbformat', 'terminado >=0.3.3', 'tornado >=4', 'traitlets >=4.3']
-    - name: notebook
-      version: 5.2.1
-      depends: ['ipykernel', 'ipython_genutils', 'jinja2', 'jupyter_client', 'jupyter_core', 'nbconvert', 'nbformat', 'terminado >=0.3.3', 'tornado >=4', 'traitlets >=4.3']
-    - name: notebook
-      version: 5.2.2
-      depends: ['ipykernel', 'ipython_genutils', 'jinja2', 'jupyter_client', 'jupyter_core', 'nbconvert', 'nbformat', 'terminado >=0.3.3', 'tornado >=4', 'traitlets >=4.3']
-    - name: notebook
-      version: 5.3.1
-      depends: ['ipykernel', 'ipython_genutils', 'jinja2', 'jupyter_client >=5.2.0', 'jupyter_core >=4.4.0', 'nbconvert', 'nbformat', 'send2trash', 'terminado >=0.8.1', 'tornado >=4', 'traitlets >=4.2.1']
-    - name: notebook
-      version: 5.4.0
-      depends: ['ipykernel', 'ipython_genutils', 'jinja2', 'jupyter_client >=5.2.0', 'jupyter_core >=4.4.0', 'nbconvert', 'nbformat', 'send2trash', 'terminado >=0.8.1', 'tornado >=4', 'traitlets >=4.2.1']
-    - name: notebook
-      version: 5.4.1
-      depends: ['ipykernel', 'ipython_genutils', 'jinja2', 'jupyter_client >=5.2.0', 'jupyter_core >=4.4.0', 'nbconvert', 'nbformat', 'send2trash', 'terminado >=0.8.1', 'tornado >=4', 'traitlets >=4.2.1']
-    - name: notebook
-      version: 5.5.0
-      depends: ['ipykernel', 'ipython_genutils', 'jinja2', 'jupyter_client >=5.2.0', 'jupyter_core >=4.4.0', 'nbconvert', 'nbformat', 'pyzmq >=17', 'send2trash', 'terminado >=0.8.1', 'tornado >=4', 'traitlets >=4.2.1']
-    - name: notebook
-      version: 5.6.0
-      depends: ['ipykernel', 'ipython_genutils', 'jinja2', 'jupyter_client >=5.2.0', 'jupyter_core >=4.4.0', 'nbconvert', 'nbformat', 'prometheus_client', 'pyzmq >=17', 'send2trash', 'terminado >=0.8.1', 'tornado >=4', 'traitlets >=4.2.1']
-    - numpy 1.11.3
-    - numpy 1.12.1
-    - numpy 1.13.1
-    - numpy 1.13.3
-    - numpy 1.14.0
-    - numpy 1.14.1
-    - numpy 1.14.2
-    - numpy 1.14.3
-    - numpy 1.14.4
-    - numpy 1.14.5
-    - numpy 1.15.0
-    - numpy 1.9.3
-    - name: numpydoc
-      version: 0.7.0
-      depends: ['sphinx']
-    - name: numpydoc
-      version: 0.8.0
-      depends: ['sphinx']
-    - name: openssl
-      version: 1.0.2l
-      depends: ['ca-certificates']
-    - name: openssl
-      version: 1.0.2m
-      depends: ['ca-certificates']
-    - name: openssl
-      version: 1.0.2n
-      depends: ['ca-certificates']
-    - name: openssl
-      version: 1.0.2o
-      depends: ['ca-certificates']
-    - name: openssl
-      version: 1.0.2p
-      depends: ['ca-certificates']
-    - name: packaging
-      version: '16.8'
-      depends: ['pyparsing', 'six']
-    - name: packaging
-      version: '17.1'
-      depends: ['pyparsing', 'six']
-    - name: pandas
-      version: 0.20.3
-      depends: ['numpy >=1.9', 'python-dateutil', 'pytz']
-    - name: pandas
-      version: 0.21.0
-      depends: ['numpy >=1.9.3,<2.0a0', 'python-dateutil', 'pytz']
-    - name: pandas
-      version: 0.21.1
-      depends: ['numpy >=1.9.3,<2.0a0', 'python-dateutil', 'pytz']
-    - name: pandas
-      version: 0.22.0
-      depends: ['numpy >=1.9.3,<2.0a0', 'python-dateutil', 'pytz']
-    - name: pandas
-      version: 0.23.0
-      depends: ['numpy >=1.9.3,<2.0a0', 'python-dateutil', 'pytz']
-    - name: pandas
-      version: 0.23.1
-      depends: ['numpy >=1.9.3,<2.0a0', 'python-dateutil >=2.5.*', 'pytz']
-    - name: pandas
-      version: 0.23.2
-      depends: ['numpy >=1.11.3,<2.0a0', 'python-dateutil >=2.5.*', 'pytz']
-    - name: pandas
-      version: 0.23.3
-      depends: ['numpy >=1.11.3,<2.0a0', 'python-dateutil >=2.5.*', 'pytz']
-    - name: pandas
-      version: 0.23.4
-      depends: ['numpy >=1.11.3,<2.0a0', 'python-dateutil >=2.5.*', 'pytz']
-    - pandocfilters 1.4.2
-    - parso 0.1.0
-    - parso 0.1.1
-    - parso 0.2.0
-    - parso 0.2.1
-    - parso 0.3.0
-    - parso 0.3.1
-    - name: partd
-      version: 0.3.8
-      depends: ['locket', 'toolz']
-    - patchelf 0.9
-    - path.py 10.3.1
-    - path.py 10.5
-    - path.py 11.0
-    - path.py 11.0.1
-    - name: pathlib2
-      version: 2.3.0
-      depends: ['six']
-    - name: pathlib2
-      version: 2.3.2
-      depends: ['six']
-    - pcre 8.41
-    - pcre 8.42
-    - perl 5.26.2
-    - name: perl-app-cpanminus
-      version: '1.7039'
-      depends: ['perl 5.22.0*']
-    - name: perl-encode-locale
-      version: '1.05'
-      depends: ['perl >=5.26.2,<5.27.0a0']
-    - name: pexpect
-      version: 4.2.1
-      depends: ['ptyprocess >=0.5']
-    - name: pexpect
-      version: 4.3.0
-      depends: ['ptyprocess >=0.5']
-    - name: pexpect
-      version: 4.3.1
-      depends: ['ptyprocess >=0.5']
-    - name: pexpect
-      version: 4.4.0
-      depends: ['ptyprocess >=0.5']
-    - name: pexpect
-      version: 4.5.0
-      depends: ['ptyprocess >=0.5']
-    - name: pexpect
-      version: 4.6.0
-      depends: ['ptyprocess >=0.5']
-    - pickleshare 0.7.4
-    - name: pip
-      version: 10.0.1
-      depends: ['setuptools', 'wheel']
-    - name: pip
-      version: 9.0.1
-      depends: ['setuptools', 'wheel']
-    - name: pip
-      version: 9.0.3
-      depends: ['setuptools', 'wheel']
-    - pixman 0.34.0
-    - pkginfo 1.4.1
-    - pkginfo 1.4.2
-    - ply 3.10
-    - ply 3.11
-    - name: prometheus_client
-      version: 0.2.0
-      depends: ['twisted']
-    - name: prometheus_client
-      version: 0.3.0
-      depends: ['twisted']
-    - name: prometheus_client
-      version: 0.3.1
-      depends: ['twisted']
-    - name: prompt_toolkit
-      version: 1.0.15
-      depends: ['pygments', 'six >=1.9.0', 'wcwidth']
-    - name: prompt_toolkit
-      version: 2.0.2
-      depends: ['pygments', 'six >=1.9.0', 'wcwidth']
-    - name: prompt_toolkit
-      version: 2.0.3
-      depends: ['pygments', 'six >=1.9.0', 'wcwidth']
-    - name: prompt_toolkit
-      version: 2.0.4
-      depends: ['pygments', 'six >=1.9.0', 'wcwidth']
-    - psutil 5.2.2
-    - psutil 5.3.1
-    - psutil 5.4.0
-    - psutil 5.4.1
-    - psutil 5.4.3
-    - psutil 5.4.5
-    - psutil 5.4.6
-    - psycopg2 2.7.3.1
-    - psycopg2 2.7.3.2
-    - psycopg2 2.7.4
-    - psycopg2 2.7.5
-    - ptyprocess 0.5.2
-    - ptyprocess 0.6.0
-    - pyasn1 0.3.7
-    - pyasn1 0.4.2
-    - pyasn1 0.4.3
-    - pyasn1 0.4.4
-    - name: pyasn1-modules
-      version: 0.2.1
-      depends: ['pyasn1 >=0.4.1,<0.5.0']
-    - name: pyasn1-modules
-      version: 0.2.2
-      depends: ['pyasn1 >=0.4.1,<0.5.0']
-    - pycosat 0.6.2
-    - pycosat 0.6.3
-    - pycparser 2.18
-    - name: pygments
-      version: 2.2.0
-      depends: ['setuptools']
-    - pympler 0.5
-    - name: pyopenssl
-      version: 17.2.0
-      depends: ['cryptography >=1.9', 'six >=1.5.2']
-    - name: pyopenssl
-      version: 17.4.0
-      depends: ['cryptography >=1.9', 'six >=1.5.2']
-    - name: pyopenssl
-      version: 17.5.0
-      depends: ['cryptography >=2.1.4', 'six >=1.5.2']
-    - name: pyopenssl
-      version: 18.0.0
-      depends: ['cryptography >=2.2.1', 'six >=1.5.2']
-    - pyparsing 2.2.0
-    - name: pyqt
-      version: 5.6.0
-      depends: ['qt 5.6.*', 'sip 4.18.*']
-    - name: pyqt
-      version: 5.9.2
-      depends: ['dbus >=1.13.2,<2.0a0', 'qt 5.9.*', 'qt >=5.9.6,<5.10.0a0', 'sip >=4.19.4']
-    - pysocks 1.6.7
-    - pysocks 1.6.8
-    - name: python-dateutil
-      version: 2.6.1
-      depends: ['six']
-    - name: python-dateutil
-      version: 2.7.0
-      depends: ['six >=1.5']
-    - name: python-dateutil
-      version: 2.7.2
-      depends: ['six >=1.5']
-    - name: python-dateutil
-      version: 2.7.3
-      depends: ['six >=1.5']
-    - name: python-digest
-      version: 1.1.1
-      depends: ['cryptography <2.2']
-    - python-graphviz 0.8.2
-    - python-graphviz 0.8.3
-    - python-graphviz 0.8.4
-    - pytz 2017.2
-    - pytz 2017.3
-    - pytz 2018.3
-    - pytz 2018.4
-    - pytz 2018.5
-    - pyyaml 3.12
-    - pyyaml 3.13
-    - pyzmq 16.0.2
-    - pyzmq 16.0.3
-    - pyzmq 17.0.0
-    - pyzmq 17.1.0
-    - pyzmq 17.1.2
-    - name: qtconsole
-      version: 4.3.1
-      depends: ['ipykernel >=4.1', 'jupyter_client >=4.1', 'jupyter_core', 'pygments', 'pyqt', 'traitlets']
-    - name: qtconsole
-      version: 4.4.0
-      depends: ['ipykernel >=4.1', 'jupyter_client >=4.1', 'jupyter_core', 'pygments', 'pyqt >=5.9.2,<5.10.0a0', 'traitlets']
-    - redis 4.0.10
-    - redis 4.0.2
-    - redis 4.0.8
-    - redis 4.0.9
-    - redis-py 2.10.6
-    - name: requests
-      version: 2.18.4
-      depends: ['certifi >=2017.4.17', 'chardet >=3.0.2,<3.1.0', 'idna >=2.5,<2.7', 'urllib3 >=1.21.1,<1.23']
-    - name: requests
-      version: 2.19.1
-      depends: ['certifi >=2017.4.17', 'chardet >=3.0.2,<3.1.0', 'idna >=2.5,<2.8', 'urllib3 >=1.21.1,<1.24']
-    - name: ruamel_yaml
-      version: 0.11.14
-      depends: ['yaml']
-    - name: ruamel_yaml
-      version: 0.15.35
-      depends: ['yaml', 'yaml >=0.1.7,<0.2.0a0']
-    - name: ruamel_yaml
-      version: 0.15.37
-      depends: ['yaml >=0.1.7,<0.2.0a0']
-    - name: ruamel_yaml
-      version: 0.15.40
-      depends: ['yaml >=0.1.7,<0.2.0a0']
-    - name: ruamel_yaml
-      version: 0.15.42
-      depends: ['yaml >=0.1.7,<0.2.0a0']
-    - name: ruamel_yaml
-      version: 0.15.46
-      depends: ['yaml >=0.1.7,<0.2.0a0']
-    - name: s3fs
-      version: 0.1.3
-      depends: ['boto3']
-    - name: s3fs
-      version: 0.1.4
-      depends: ['boto3']
-    - name: s3fs
-      version: 0.1.5
-      depends: ['boto3']
-    - name: s3transfer
-      version: 0.1.10
-      depends: ['botocore >=1.3.0,<2.0.0']
-    - name: s3transfer
-      version: 0.1.11
-      depends: ['botocore >=1.3.0,<2.0.0']
-    - name: s3transfer
-      version: 0.1.13
-      depends: ['botocore >=1.3.0,<2.0.0']
-    - scandir 1.5
-    - scandir 1.6
-    - scandir 1.7
-    - scandir 1.8
-    - scandir 1.9.0
-    - name: scipy
-      version: 0.19.1
-      depends: ['numpy >=1.9.3,<2.0a0']
-    - name: scipy
-      version: 1.0.0
-      depends: ['numpy >=1.9.3,<2.0a0']
-    - name: scipy
-      version: 1.0.1
-      depends: ['numpy >=1.9.3,<2.0a0']
-    - name: scipy
-      version: 1.1.0
-      depends: ['numpy >=1.11.3,<2.0a0']
-    - send2trash 1.4.2
-    - send2trash 1.5.0
-    - name: service_identity
-      version: 17.0.0
-      depends: ['attrs >=16.0.0', 'pyasn1', 'pyasn1-modules', 'pyopenssl >=0.12']
-    - name: setuptools
-      version: 36.5.0
-      depends: ['certifi']
-    - name: setuptools
-      version: 38.4.0
-      depends: ['certifi >=2016.09']
-    - name: setuptools
-      version: 38.5.1
-      depends: ['certifi >=2016.09']
-    - name: setuptools
-      version: 39.0.1
-      depends: ['certifi >=2016.09']
-    - name: setuptools
-      version: 39.1.0
-      depends: ['certifi >=2016.09']
-    - name: setuptools
-      version: 39.2.0
-      depends: ['certifi >=2016.09']
-    - name: setuptools
-      version: 40.0.0
-      depends: ['certifi >=2016.09']
-    - simplegeneric 0.8.1
-    - name: singledispatch
-      version: 3.4.0.3
-      depends: ['six']
-    - sip 4.18.1
-    - sip 4.19.8
-    - six 1.10.0
-    - six 1.11.0
-    - snowballstemmer 1.2.1
-    - name: sortedcollections
-      version: 0.5.3
-      depends: ['sortedcontainers']
-    - name: sortedcollections
-      version: 0.6.1
-      depends: ['sortedcontainers']
-    - name: sortedcollections
-      version: 1.0.1
-      depends: ['sortedcontainers >=2.0']
-    - sortedcontainers 1.5.10
-    - sortedcontainers 1.5.7
-    - sortedcontainers 1.5.9
-    - sortedcontainers 2.0.2
-    - sortedcontainers 2.0.3
-    - sortedcontainers 2.0.4
-    - name: sphinx
-      version: 1.6.3
-      depends: ['alabaster', 'babel', 'docutils', 'imagesize', 'jinja2', 'pygments', 'requests', 'six', 'snowballstemmer', 'sphinxcontrib-websupport', 'typing']
-    - name: sphinx
-      version: 1.6.6
-      depends: ['alabaster', 'babel', 'docutils', 'imagesize', 'jinja2', 'pygments', 'requests', 'six', 'snowballstemmer', 'sphinxcontrib-websupport', 'typing']
-    - name: sphinx
-      version: 1.7.0
-      depends: ['alabaster', 'babel', 'docutils', 'imagesize', 'jinja2', 'packaging', 'pygments', 'requests', 'six', 'snowballstemmer', 'sphinxcontrib-websupport', 'typing']
-    - name: sphinx
-      version: 1.7.1
-      depends: ['alabaster', 'babel', 'docutils', 'imagesize', 'jinja2', 'packaging', 'pygments', 'requests', 'six', 'snowballstemmer', 'sphinxcontrib-websupport', 'typing']
-    - name: sphinx
-      version: 1.7.2
-      depends: ['alabaster', 'babel', 'docutils', 'imagesize', 'jinja2', 'packaging', 'pygments', 'requests', 'six', 'snowballstemmer', 'sphinxcontrib-websupport', 'typing']
-    - name: sphinx
-      version: 1.7.3
-      depends: ['alabaster', 'babel', 'docutils', 'imagesize', 'jinja2', 'packaging', 'pygments', 'requests', 'six', 'snowballstemmer', 'sphinxcontrib-websupport', 'typing']
-    - name: sphinx
-      version: 1.7.4
-      depends: ['alabaster', 'babel', 'docutils', 'imagesize', 'jinja2', 'packaging', 'pygments', 'requests', 'six', 'snowballstemmer', 'sphinxcontrib-websupport', 'typing']
-    - name: sphinx
-      version: 1.7.5
-      depends: ['alabaster >=0.7,<0.8', 'babel >=1.3,!=2.0', 'docutils >=0.11', 'imagesize', 'jinja2 >=2.3', 'packaging', 'pygments >2.0', 'requests >2.0.0', 'six >=1.5', 'snowballstemmer >=1.1', 'sphinxcontrib-websupport']
-    - name: sphinx
-      version: 1.7.6
-      depends: ['alabaster >=0.7,<0.8', 'babel >=1.3,!=2.0', 'docutils >=0.11', 'imagesize', 'jinja2 >=2.3', 'packaging', 'pygments >2.0', 'requests >2.0.0', 'six >=1.5', 'snowballstemmer >=1.1', 'sphinxcontrib-websupport']
-    - sphinxcontrib 1.0
-    - name: sphinxcontrib-websupport
-      version: 1.0.1
-      depends: ['sphinxcontrib']
-    - name: sphinxcontrib-websupport
-      version: 1.1.0
-      depends: ['sphinxcontrib']
-    - sqlalchemy 1.1.13
-    - sqlalchemy 1.2.0
-    - sqlalchemy 1.2.1
-    - sqlalchemy 1.2.10
-    - sqlalchemy 1.2.3
-    - sqlalchemy 1.2.4
-    - sqlalchemy 1.2.5
-    - sqlalchemy 1.2.6
-    - sqlalchemy 1.2.7
-    - sqlalchemy 1.2.8
-    - name: ssl_match_hostname
-      version: 3.5.0.1
-      depends: ['backports']
-    - subprocess32 3.2.7
-    - subprocess32 3.5.0
-    - subprocess32 3.5.1
-    - subprocess32 3.5.2
-    - tblib 1.3.2
-    - name: terminado
-      version: '0.6'
-      depends: ['ptyprocess', 'tornado >=4']
-    - name: terminado
-      version: 0.8.1
-      depends: ['ptyprocess', 'tornado >=4']
-    - testpath 0.3.1
-    - name: thrift
-      version: 0.11.0
-      depends: ['six >=1.7.2']
-    - thrift 0.9.3
-    - name: thriftpy
-      version: 0.3.9
-      depends: ['ply >=3.4,<4.0']
-    - toolz 0.8.2
-    - toolz 0.9.0
-    - tornado 4.5.2
-    - tornado 4.5.3
-    - tornado 5.0
-    - tornado 5.0.1
-    - tornado 5.0.2
-    - tornado 5.1
-    - name: traitlets
-      version: 4.3.2
-      depends: ['decorator', 'ipython_genutils', 'six']
-    - name: twisted
-      version: 17.9.0
-      depends: ['appdirs >=1.4.0', 'automat >=0.3.0', 'constantly >=15.1', 'cryptography >=1.5', 'hyperlink >=17.1.1', 'idna >=0.6,!=2.3', 'incremental >=16.10.1', 'pyasn1', 'pyopenssl >=16.0.0', 'service_identity', 'zope.interface >=4.0.2']
-    - name: twisted
-      version: 18.4.0
-      depends: ['appdirs >=1.4.0', 'automat >=0.3.0', 'constantly >=15.1', 'cryptography >=1.5', 'hyperlink >=17.1.1', 'idna >=0.6,!=2.3', 'incremental >=16.10.1', 'pyasn1', 'pyopenssl >=16.0.0', 'service_identity', 'zope.interface >=4.0.2']
-    - name: twisted
-      version: 18.7.0
-      depends: ['appdirs >=1.4.0', 'automat >=0.3.0', 'constantly >=15.1', 'cryptography >=1.5', 'hyperlink >=17.1.1', 'idna >=0.6,!=2.3', 'incremental >=16.10.1', 'pyasn1', 'pyopenssl >=16.0.0', 'service_identity', 'zope.interface >=4.0.2']
-    - typed-ast 1.1.0
-    - typing 3.6.2
-    - typing 3.6.4
-    - ujson 1.35
-    - name: urllib3
-      version: '1.22'
-      depends: ['certifi', 'cryptography >=1.3.4', 'idna >=2.0.0', 'pyopenssl >=0.14', 'pysocks >=1.5.6,<2.0,!=1.5.7']
-    - name: urllib3
-      version: '1.23'
-      depends: ['certifi', 'cryptography >=1.3.4', 'idna >=2.0.0', 'pyopenssl >=0.14', 'pysocks >=1.5.6,<2.0,!=1.5.7']
-    - wcwidth 0.1.7
-    - webencodings 0.5.1
-    - werkzeug 0.12.2
-    - werkzeug 0.14.1
-    - name: wheel
-      version: 0.29.0
-      depends: ['setuptools']
-    - name: wheel
-      version: 0.30.0
-      depends: ['setuptools']
-    - name: wheel
-      version: 0.31.0
-      depends: ['setuptools']
-    - name: wheel
-      version: 0.31.1
-      depends: ['setuptools']
-    - name: widgetsnbextension
-      version: 3.0.2
-      depends: ['notebook >=4.4.1']
-    - name: widgetsnbextension
-      version: 3.0.8
-      depends: ['notebook >=4.4.1']
-    - name: widgetsnbextension
-      version: 3.1.0
-      depends: ['notebook >=4.4.1']
-    - name: widgetsnbextension
-      version: 3.1.4
-      depends: ['notebook >=4.4.1']
-    - name: widgetsnbextension
-      version: 3.2.0
-      depends: ['notebook >=4.4.1']
-    - name: widgetsnbextension
-      version: 3.2.1
-      depends: ['notebook >=4.4.1']
-    - name: widgetsnbextension
-      version: 3.3.0
-      depends: ['notebook >=4.4.1']
-    - name: widgetsnbextension
-      version: 3.3.1
-      depends: ['notebook >=4.4.1']
-    - name: widgetsnbextension
-      version: 3.4.0
-      depends: ['notebook >=4.4.1']
-    - wrapt 1.10.11
-    - xz 5.2.3
-    - xz 5.2.4
-    - yaml 0.1.7
-    - zeromq 4.2.2
-    - zeromq 4.2.3
-    - zeromq 4.2.5
-    - name: zict
-      version: 0.1.2
-      depends: ['heapdict']
-    - name: zict
-      version: 0.1.3
-      depends: ['heapdict']
-    - zope 1.0
-    - name: zope.interface
-      version: 4.4.3
-      depends: ['zope']
-    - name: zope.interface
-      version: 4.5.0
-      depends: ['zope']
-
-cases:
--
-  request:
-    - install: alabaster
-  response:
-    - state:
-      - alabaster 0.7.11
--
-  request:
-    - install: ipython==6.3.1
-  response:
-    - state:
-      - backcall 0.1.0
-      - decorator 4.3.0
-      - ipython 6.3.1
-      - ipython_genutils 0.2.0
-      - jedi 0.12.1
-      - parso 0.3.1
-      - pexpect 4.6.0
-      - pickleshare 0.7.4
-      - prompt_toolkit 1.0.15
-      - ptyprocess 0.6.0
-      - pygments 2.2.0
-      - simplegeneric 0.8.1
-      - six 1.11.0
-      - traitlets 4.3.2
-      - wcwidth 0.1.7
diff --git a/tests/yaml/large.yml b/tests/yaml/large.yml
deleted file mode 100644
index fbb1c737e..000000000
--- a/tests/yaml/large.yml
+++ /dev/null
@@ -1,295 +0,0 @@
-# The 129 available packages have been obtained by transforming a
-# conda repodata.json, and doing some manual fixes.
-base:
-  available:
-    - affine 2.2.0
-    - affine 2.2.1
-    - asn1crypto 0.22.0
-    - asn1crypto 0.23.0
-    - asn1crypto 0.24.0
-    - backports 1.0
-    - name: backports.functools_lru_cache
-      version: '1.4'
-      depends: ['backports', 'setuptools']
-    - name: backports.functools_lru_cache
-      version: '1.5'
-      depends: ['backports', 'setuptools']
-    - beautifulsoup4 4.6.0
-    - beautifulsoup4 4.6.1
-    - beautifulsoup4 4.6.3
-    - name: cachecontrol
-      version: 0.12.3
-      depends: ['msgpack_python', 'requests']
-    - name: cachecontrol
-      version: 0.12.4
-      depends: ['msgpack_python', 'requests']
-    - name: cachecontrol
-      version: 0.12.5
-      depends: ['msgpack_python', 'requests']
-    - certifi 2017.11.5
-    - certifi 2017.7.27.1
-    - certifi 2018.1.18
-    - certifi 2018.4.16
-    - certifi 2018.8.13
-    # cffi is a bundled module in PyPy and causes resolution errors if pip
-    # tries to installed it. Give it a different name since we are simply
-    # checking the graph anyway and the identifier doesn't really matter.
-    - name: cffi_not_really
-      version: 1.10.0
-      depends: ['pycparser']
-    - name: cffi_not_really
-      version: 1.11.2
-      depends: ['pycparser']
-    - name: cffi_not_really
-      version: 1.11.4
-      depends: ['pycparser']
-    - name: cffi_not_really
-      version: 1.11.5
-      depends: ['pycparser']
-    - chardet 3.0.4
-    - click 6.7
-    - colorama 0.3.9
-    - colour 0.1.4
-    - colour 0.1.5
-    - contextlib2 0.5.5
-    - name: cryptography
-      version: 2.0.3
-      depends: ['asn1crypto >=0.21.0', 'cffi_not_really >=1.7', 'idna >=2.1', 'six >=1.4.1']
-    - name: cryptography
-      version: 2.1.3
-      depends: ['asn1crypto >=0.21.0', 'cffi_not_really >=1.7', 'idna >=2.1', 'six >=1.4.1']
-    - name: cryptography
-      version: 2.1.4
-      depends: ['asn1crypto >=0.21.0', 'cffi_not_really >=1.7', 'idna >=2.1', 'six >=1.4.1']
-    - name: cryptography
-      version: 2.2.1
-      depends: ['asn1crypto >=0.21.0', 'cffi_not_really >=1.7', 'idna >=2.1', 'six >=1.4.1']
-    - name: cryptography
-      version: '2.3'
-      depends: ['asn1crypto >=0.21.0', 'cffi_not_really >=1.7', 'cryptography_vectors ~=2.3', 'idna >=2.1', 'six >=1.4.1']
-    - cryptography_vectors 2.0.3
-    - cryptography_vectors 2.1.3
-    - cryptography_vectors 2.1.4
-    - cryptography_vectors 2.2.1
-    - cryptography_vectors 2.2.2
-    - cryptography_vectors 2.3.0
-    - name: cytoolz
-      version: 0.8.2
-      depends: ['toolz >=0.8.0']
-    - name: cytoolz
-      version: 0.9.0
-      depends: ['toolz >=0.8.0']
-    - name: cytoolz
-      version: 0.9.0.1
-      depends: ['toolz >=0.8.0']
-    - distlib 0.2.5
-    - distlib 0.2.6
-    - distlib 0.2.7
-    - enum34 1.1.6
-    - filelock 2.0.12
-    - filelock 2.0.13
-    - filelock 3.0.4
-    - future 0.16.0
-    - futures 3.1.1
-    - futures 3.2.0
-    - glob2 0.5
-    - glob2 0.6
-    - name: html5lib
-      version: '0.999999999'
-      depends: ['six >=1.9', 'webencodings']
-    - name: html5lib
-      version: 1.0.1
-      depends: ['six >=1.9', 'webencodings']
-    - idna 2.6
-    - idna 2.7
-    - ipaddress 1.0.18
-    - ipaddress 1.0.19
-    - ipaddress 1.0.22
-    - name: jinja2
-      version: '2.10'
-      depends: ['markupsafe >=0.23', 'setuptools']
-    - name: jinja2
-      version: 2.9.6
-      depends: ['markupsafe >=0.23', 'setuptools']
-    - lockfile 0.12.2
-    - markupsafe 1.0
-    - msgpack_python 0.4.8
-    - msgpack_python 0.5.1
-    - msgpack_python 0.5.5
-    - msgpack_python 0.5.6
-    - name: packaging
-      version: '16.8'
-      depends: ['pyparsing', 'six']
-    - name: packaging
-      version: '17.1'
-      depends: ['pyparsing', 'six']
-    - name: pip
-      version: 10.0.1
-      depends: ['setuptools', 'wheel']
-    - name: pip
-      version: 9.0.1
-      depends: ['cachecontrol', 'colorama', 'distlib', 'html5lib', 'lockfile', 'packaging', 'progress', 'requests', 'setuptools', 'webencodings', 'wheel']
-    - name: pip
-      version: 9.0.3
-      depends: ['setuptools', 'wheel']
-    - pkginfo 1.4.1
-    - pkginfo 1.4.2
-    - progress 1.3
-    - progress 1.4
-    - psutil 5.2.2
-    - psutil 5.3.1
-    - psutil 5.4.0
-    - psutil 5.4.1
-    - psutil 5.4.3
-    - psutil 5.4.5
-    - psutil 5.4.6
-    - pycosat 0.6.2
-    - pycosat 0.6.3
-    - pycparser 2.18
-    - name: pyopenssl
-      version: 17.2.0
-      depends: ['cryptography >=1.9', 'six >=1.5.2']
-    - name: pyopenssl
-      version: 17.4.0
-      depends: ['cryptography >=1.9', 'six >=1.5.2']
-    - name: pyopenssl
-      version: 17.5.0
-      depends: ['cryptography >=2.1.4', 'six >=1.5.2']
-    - name: pyopenssl
-      version: 18.0.0
-      depends: ['cryptography >=2.2.1', 'six >=1.5.2']
-    - pyparsing 2.2.0
-    - name: pysocks
-      version: 1.6.7
-      depends: ['win_inet_pton']
-    - name: pysocks
-      version: 1.6.8
-      depends: ['win_inet_pton']
-    - pywin32 221
-    - pywin32 222
-    - pywin32 223
-    - pyyaml 3.12
-    - pyyaml 3.13
-    - name: requests
-      version: 2.18.4
-      depends: ['certifi >=2017.4.17', 'chardet >=3.0.2,<3.1.0', 'idna >=2.5,<2.7', 'urllib3 >=1.21.1,<1.23']
-    - name: requests
-      version: 2.19.1
-      depends: ['certifi >=2017.4.17', 'chardet >=3.0.2,<3.1.0', 'idna >=2.5,<2.8', 'urllib3 >=1.21.1,<1.24']
-    - scandir 1.5
-    - scandir 1.6
-    - scandir 1.7
-    - scandir 1.8
-    - scandir 1.9.0
-    - name: setuptools
-      version: 36.2.2
-      depends: ['certifi', 'wincertstore']
-    - name: setuptools
-      version: 36.5.0
-      depends: ['certifi', 'wincertstore']
-    - name: setuptools
-      version: 38.4.0
-      depends: ['certifi >=2016.09', 'wincertstore >=0.2']
-    - name: setuptools
-      version: 38.5.1
-      depends: ['certifi >=2016.09', 'wincertstore >=0.2']
-    - name: setuptools
-      version: 39.0.1
-      depends: ['certifi >=2016.09', 'wincertstore >=0.2']
-    - name: setuptools
-      version: 39.1.0
-      depends: ['certifi >=2016.09', 'wincertstore >=0.2']
-    - name: setuptools
-      version: 39.2.0
-      depends: ['certifi >=2016.09', 'wincertstore >=0.2']
-    - name: setuptools
-      version: 40.0.0
-      depends: ['certifi >=2016.09', 'wincertstore >=0.2']
-    - six 1.8.2
-    - six 1.10.0
-    - six 1.11.0
-    - toolz 0.8.2
-    - toolz 0.9.0
-    - name: urllib3
-      version: '1.22'
-      depends: ['certifi', 'cryptography >=1.3.4', 'idna >=2.0.0', 'pyopenssl >=0.14', 'pysocks >=1.5.6,<2.0,!=1.5.7']
-    - name: urllib3
-      version: '1.23'
-      depends: ['certifi', 'cryptography >=1.3.4', 'idna >=2.0.0', 'pyopenssl >=0.14', 'pysocks >=1.5.6,<2.0,!=1.5.7']
-    - webencodings 0.5.1
-    - name: wheel
-      version: 0.29.0
-      depends: ['setuptools']
-    - name: wheel
-      version: 0.30.0
-      depends: ['setuptools']
-    - name: wheel
-      version: 0.31.0
-      depends: ['setuptools']
-    - name: wheel
-      version: 0.31.1
-      depends: ['setuptools']
-    - win_inet_pton 1.0.1
-    - wincertstore 0.2
-
-cases:
--
-  request:
-    - install: affine
-  response:
-    - state:
-      - affine 2.2.1
--
-  request:
-    - install: cryptography
-  response:
-    - state:
-      - asn1crypto 0.24.0
-      - cffi_not_really 1.11.5
-      - cryptography 2.3
-      - cryptography_vectors 2.3.0
-      - idna 2.7
-      - pycparser 2.18
-      - six 1.11.0
-  skip: legacy
--
-  request:
-    - install: cachecontrol
-  response:
-    - state:
-      - asn1crypto 0.24.0
-      - cachecontrol 0.12.5
-      - certifi 2018.8.13
-      - cffi_not_really 1.11.5
-      - chardet 3.0.4
-      - cryptography 2.3
-      - cryptography_vectors 2.3.0
-      - idna 2.7
-      - msgpack_python 0.5.6
-      - pycparser 2.18
-      - pyopenssl 18.0.0
-      - pysocks 1.6.8
-      - requests 2.19.1
-      - six 1.11.0
-      - urllib3 1.23
-      - win_inet_pton 1.0.1
--
-  request:
-    - install: cytoolz
-  response:
-    - state:
-      - cytoolz 0.9.0.1
-      - toolz 0.9.0
--
-  request:
-    - install: ['html5lib', 'six ==1.8.2']
-  response:
-    - state: null
-      error:
-        code: 1
-        stderr: >-
-          Cannot install six==1.8.2, html5lib 1.0.1, six==1.8.2 and
-          html5lib 0.999999999 because these package versions have
-          conflicting dependencies.
-
-  skip: legacy
diff --git a/tests/yaml/linter.py b/tests/yaml/linter.py
deleted file mode 100644
index ac17bbc41..000000000
--- a/tests/yaml/linter.py
+++ /dev/null
@@ -1,108 +0,0 @@
-import re
-import sys
-from pprint import pprint
-
-import yaml
-
-sys.path.insert(0, '../../src')
-sys.path.insert(0, '../..')
-
-
-def check_dict(d, required=None, optional=None):
-    assert isinstance(d, dict)
-    if required is None:
-        required = []
-    if optional is None:
-        optional = []
-    for key in required:
-        if key not in d:
-            sys.exit("key %r is required" % key)
-    allowed_keys = set(required)
-    allowed_keys.update(optional)
-    for key in d.keys():
-        if key not in allowed_keys:
-            sys.exit("key %r is not allowed.  Allowed keys are: %r" %
-                     (key, allowed_keys))
-
-
-def lint_case(case, verbose=False):
-    from tests.functional.test_yaml import convert_to_dict
-
-    if verbose:
-        print("--- linting case ---")
-        pprint(case)
-
-    check_dict(case, optional=['available', 'request', 'response', 'skip'])
-    available = case.get("available", [])
-    requests = case.get("request", [])
-    responses = case.get("response", [])
-    assert isinstance(available, list)
-    assert isinstance(requests, list)
-    assert isinstance(responses, list)
-    assert len(requests) == len(responses)
-
-    for package in available:
-        if isinstance(package, str):
-            package = convert_to_dict(package)
-        if verbose:
-            pprint(package)
-        check_dict(package,
-                   required=['name', 'version'],
-                   optional=['depends', 'extras'])
-        version = package['version']
-        assert isinstance(version, str), repr(version)
-
-    for request, response in zip(requests, responses):
-        check_dict(request, optional=['install', 'uninstall', 'options'])
-        check_dict(response, optional=['state', 'error'])
-        assert len(response) >= 1
-        assert isinstance(response.get('state') or [], list)
-        error = response.get('error')
-        if error:
-            check_dict(error, optional=['code', 'stderr'])
-            stderr = error.get('stderr')
-            if stderr:
-                if isinstance(stderr, str):
-                    patters = [stderr]
-                elif isinstance(stderr, list):
-                    patters = stderr
-                else:
-                    raise "string or list expected, found %r" % stderr
-                for patter in patters:
-                    re.compile(patter, re.I)
-
-
-def lint_yml(yml_file, verbose=False):
-    if verbose:
-        print("=== linting: %s ===" % yml_file)
-    assert yml_file.endswith(".yml")
-    with open(yml_file) as fi:
-        data = yaml.safe_load(fi)
-    if verbose:
-        pprint(data)
-
-    check_dict(data, required=['cases'], optional=['base'])
-    base = data.get("base", {})
-    cases = data["cases"]
-    for _, case_template in enumerate(cases):
-        case = base.copy()
-        case.update(case_template)
-        lint_case(case, verbose)
-
-
-if __name__ == '__main__':
-    from optparse import OptionParser
-
-    p = OptionParser(usage="usage: %prog [options] FILE ...",
-                     description="linter for pip's yaml test FILE(s)")
-
-    p.add_option('-v', '--verbose',
-                 action="store_true")
-
-    opts, args = p.parse_args()
-
-    if len(args) < 1:
-        p.error('at least one argument required, try -h')
-
-    for yml_file in args:
-        lint_yml(yml_file, opts.verbose)
diff --git a/tests/yaml/non_pinned.yml b/tests/yaml/non_pinned.yml
deleted file mode 100644
index 6e9b26c4c..000000000
--- a/tests/yaml/non_pinned.yml
+++ /dev/null
@@ -1,24 +0,0 @@
-base:
-  available:
-    - A 1.0.0; depends B < 2.0.0
-    - A 2.0.0; depends B < 3.0.0
-    - B 1.0.0
-    - B 2.0.0
-    - B 2.1.0
-    - B 3.0.0
-
-cases:
--
-  request:
-    - install: A >= 2.0.0
-  response:
-    - state:
-      - A 2.0.0
-      - B 2.1.0
--
-  request:
-    - install: A < 2.0.0
-  response:
-    - state:
-      - A 1.0.0
-      - B 1.0.0
diff --git a/tests/yaml/overlap1.yml b/tests/yaml/overlap1.yml
deleted file mode 100644
index 9afbb04c3..000000000
--- a/tests/yaml/overlap1.yml
+++ /dev/null
@@ -1,44 +0,0 @@
-# https://medium.com/knerd/the-nine-circles-of-python-dependency-hell-481d53e3e025
-# Circle 4: Overlapping transitive dependencies
-base:
-  available:
-    - myapp 0.2.4; depends fussy, capridous
-    - name: fussy
-      version: 3.8.0
-      depends: ['requests >=1.2.0,<3']
-    - name: capridous
-      version: 1.1.0
-      depends: ['requests >=1.0.3,<2']
-    - requests 1.0.1
-    - requests 1.0.3
-    - requests 1.1.0
-    - requests 1.2.0
-    - requests 1.3.0
-    - requests 2.1.0
-    - requests 3.2.0
-
-cases:
--
-  request:
-    - install: myapp
-  response:
-    - state:
-      - capridous 1.1.0
-      - fussy 3.8.0
-      - myapp 0.2.4
-      - requests 1.3.0
-  skip: legacy
--
-  request:
-    - install: fussy
-  response:
-    - state:
-      - fussy 3.8.0
-      - requests 2.1.0
--
-  request:
-    - install: capridous
-  response:
-    - state:
-      - capridous 1.1.0
-      - requests 1.3.0
diff --git a/tests/yaml/pinned.yml b/tests/yaml/pinned.yml
deleted file mode 100644
index c8bd3f35d..000000000
--- a/tests/yaml/pinned.yml
+++ /dev/null
@@ -1,29 +0,0 @@
-base:
-  available:
-    - A 1.0.0
-    - A 2.0.0
-    - B 1.0.0; depends A == 1.0.0
-    - B 2.0.0; depends A == 2.0.0
-
-cases:
--
-  request:
-    - install: B
-  response:
-    - state:
-      - A 2.0.0
-      - B 2.0.0
--
-  request:
-    - install: B == 2.0.0
-  response:
-    - state:
-      - A 2.0.0
-      - B 2.0.0
--
-  request:
-    - install: B == 1.0.0
-  response:
-    - state:
-      - A 1.0.0
-      - B 1.0.0
diff --git a/tests/yaml/pip988.yml b/tests/yaml/pip988.yml
deleted file mode 100644
index 1190d2a4e..000000000
--- a/tests/yaml/pip988.yml
+++ /dev/null
@@ -1,37 +0,0 @@
-# https://github.com/pypa/pip/issues/988#issuecomment-606967707
-base:
-  available:
-    - A 1.0.0; depends B >= 1.0.0, C >= 1.0.0
-    - A 2.0.0; depends B >= 2.0.0, C >= 1.0.0
-    - B 1.0.0; depends C >= 1.0.0
-    - B 2.0.0; depends C >= 2.0.0
-    - C 1.0.0
-    - C 2.0.0
-
-cases:
--
-  request:
-    - install: C==1.0.0
-    - install: B==1.0.0
-    - install: A==1.0.0
-    - install: A==2.0.0
-  response:
-    - state:
-      - C 1.0.0
-    - state:
-      - B 1.0.0
-      - C 1.0.0
-    - state:
-      - A 1.0.0
-      - B 1.0.0
-      - C 1.0.0
-    - state:
-      - A 2.0.0
-      - B 2.0.0
-      - C 2.0.0
-  # for the last install (A==2.0.0) the old resolver gives
-  #   - A 2.0.0
-  #   - B 2.0.0
-  #   - C 1.0.0
-  # but because B 2.0.0 depends on C >=2.0.0 this is wrong
-  skip: legacy
diff --git a/tests/yaml/poetry2298.yml b/tests/yaml/poetry2298.yml
deleted file mode 100644
index 8b0670896..000000000
--- a/tests/yaml/poetry2298.yml
+++ /dev/null
@@ -1,24 +0,0 @@
-# see: https://github.com/python-poetry/poetry/issues/2298
-base:
-  available:
-    - poetry 1.0.5; depends zappa == 0.51.0, sphinx == 3.0.1
-    - zappa 0.51.0; depends boto3
-    - sphinx 3.0.1; depends docutils
-    - boto3 1.4.5; depends botocore ~=1.5.0
-    - botocore 1.5.92; depends docutils <0.16
-    - docutils 0.16.0
-    - docutils 0.15.0
-
-cases:
--
-  request:
-    - install: poetry
-  response:
-    - state:
-      - boto3 1.4.5
-      - botocore 1.5.92
-      - docutils 0.15.0
-      - poetry 1.0.5
-      - sphinx 3.0.1
-      - zappa 0.51.0
-  skip: legacy
diff --git a/tests/yaml/simple.yml b/tests/yaml/simple.yml
deleted file mode 100644
index 8e90e605d..000000000
--- a/tests/yaml/simple.yml
+++ /dev/null
@@ -1,47 +0,0 @@
-base:
-  available:
-    - simple 0.1.0
-    - simple 0.2.0
-    - base 0.1.0; depends dep
-    - dep 0.1.0
-
-cases:
--
-  request:
-    - install: simple
-    - uninstall: simple
-  response:
-    - state:
-      - simple 0.2.0
-    - state: null
--
-  request:
-    - install: simple
-    - install: dep
-  response:
-    - state:
-      - simple 0.2.0
-    - state:
-      - dep 0.1.0
-      - simple 0.2.0
--
-  request:
-    - install: base
-  response:
-    - state:
-      - base 0.1.0
-      - dep 0.1.0
--
-  request:
-    - install: base
-      options: --no-deps
-  response:
-    - state:
-      - base 0.1.0
--
-  request:
-    - install: ['dep', 'simple==0.1.0']
-  response:
-    - state:
-      - dep 0.1.0
-      - simple 0.1.0
diff --git a/tests/yaml/trivial.yml b/tests/yaml/trivial.yml
deleted file mode 100644
index 418422044..000000000
--- a/tests/yaml/trivial.yml
+++ /dev/null
@@ -1,24 +0,0 @@
-base:
-  available:
-    - a 0.1.0
-    - b 0.2.0
-    - c 0.3.0
-
-cases:
--
-  request:
-    - install: ['a', 'b']
-    - install: c
-    - uninstall: ['b', 'c']
-    - uninstall: a
-  response:
-    - state:
-      - a 0.1.0
-      - b 0.2.0
-    - state:
-      - a 0.1.0
-      - b 0.2.0
-      - c 0.3.0
-    - state:
-      - a 0.1.0
-    - state: null
diff --git a/.azure-pipelines/scripts/New-RAMDisk.ps1 b/tools/ci/New-RAMDisk.ps1
similarity index 100%
rename from .azure-pipelines/scripts/New-RAMDisk.ps1
rename to tools/ci/New-RAMDisk.ps1
diff --git a/tools/automation/news/template.rst b/tools/news/template.rst
similarity index 100%
rename from tools/automation/news/template.rst
rename to tools/news/template.rst
diff --git a/tools/automation/release/__init__.py b/tools/release/__init__.py
similarity index 98%
rename from tools/automation/release/__init__.py
rename to tools/release/__init__.py
index 768bbcec6..ec3a0eeb7 100644
--- a/tools/automation/release/__init__.py
+++ b/tools/release/__init__.py
@@ -28,7 +28,7 @@ def get_version_from_arguments(session: Session) -> Optional[str]:
     cmd = [
         # https://github.com/theacodes/nox/pull/378
         os.path.join(session.bin, "python"),  # type: ignore
-        "tools/automation/release/check_version.py",
+        "tools/release/check_version.py",
         version
     ]
     not_ok = subprocess.run(cmd).returncode
@@ -91,7 +91,7 @@ def generate_news(session: Session, version: str) -> None:
 
 
 def update_version_file(version: str, filepath: str) -> None:
-    with open(filepath, "r", encoding="utf-8") as f:
+    with open(filepath, encoding="utf-8") as f:
         content = list(f)
 
     file_modified = False
diff --git a/tools/automation/release/check_version.py b/tools/release/check_version.py
similarity index 100%
rename from tools/automation/release/check_version.py
rename to tools/release/check_version.py
diff --git a/tools/requirements/docs.txt b/tools/requirements/docs.txt
index a5aae67c1..aed18b508 100644
--- a/tools/requirements/docs.txt
+++ b/tools/requirements/docs.txt
@@ -1,5 +1,7 @@
 sphinx == 3.2.1
 furo
+myst_parser
+sphinx-copybutton
 sphinx-inline-tabs
 sphinxcontrib-towncrier
 
diff --git a/tools/requirements/tests.txt b/tools/requirements/tests.txt
index 9b4e98490..7badf2a27 100644
--- a/tools/requirements/tests.txt
+++ b/tools/requirements/tests.txt
@@ -1,16 +1,12 @@
---use-feature=2020-resolver
-cryptography==2.8
+cryptography
 freezegun
-mock
 pretend
 pytest
 pytest-cov
 pytest-rerunfailures
-pytest-timeout
 pytest-xdist
-pyyaml
 scripttest
-setuptools>=39.2.0  # Needed for `setuptools.wheel.Wheel` support.
-https://github.com/pypa/virtualenv/archive/legacy.zip#egg=virtualenv
-werkzeug==0.16.0
+setuptools
+virtualenv < 20.0
+werkzeug
 wheel
diff --git a/tools/tox_pip.py b/tools/tox_pip.py
index 5996dade6..fe7621342 100644
--- a/tools/tox_pip.py
+++ b/tools/tox_pip.py
@@ -1,17 +1,16 @@
-# The following comment should be removed at some point in the future.
-# mypy: disallow-untyped-defs=False
-
 import os
 import shutil
 import subprocess
 import sys
 from glob import glob
+from typing import List
 
 VIRTUAL_ENV = os.environ['VIRTUAL_ENV']
 TOX_PIP_DIR = os.path.join(VIRTUAL_ENV, 'pip')
 
 
 def pip(args):
+    # type: (List[str]) -> None
     # First things first, get a recent (stable) version of pip.
     if not os.path.exists(TOX_PIP_DIR):
         subprocess.check_call([sys.executable, '-m', 'pip',
@@ -20,8 +19,8 @@ def pip(args):
                                'pip'])
         shutil.rmtree(glob(os.path.join(TOX_PIP_DIR, 'pip-*.dist-info'))[0])
     # And use that version.
-    pypath = os.environ.get('PYTHONPATH')
-    pypath = pypath.split(os.pathsep) if pypath is not None else []
+    pypath_env = os.environ.get('PYTHONPATH')
+    pypath = pypath_env.split(os.pathsep) if pypath_env is not None else []
     pypath.insert(0, TOX_PIP_DIR)
     os.environ['PYTHONPATH'] = os.pathsep.join(pypath)
     subprocess.check_call([sys.executable, '-m', 'pip'] + args)
diff --git a/tools/travis/install.sh b/tools/travis/install.sh
deleted file mode 100755
index 3b12d69a2..000000000
--- a/tools/travis/install.sh
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/bin/bash
-set -e
-set -x
-
-pip install --upgrade setuptools
-pip install --upgrade tox tox-venv
-pip freeze --all
diff --git a/tools/travis/run.sh b/tools/travis/run.sh
deleted file mode 100755
index df8f03e7a..000000000
--- a/tools/travis/run.sh
+++ /dev/null
@@ -1,65 +0,0 @@
-#!/bin/bash
-set -e
-
-# Short circuit test runs if there are no code changes involved.
-if [[ $TOXENV != docs ]] || [[ $TOXENV != lint ]]; then
-    if [[ "$TRAVIS_PULL_REQUEST" == "false" ]]
-    then
-        echo "This is not a PR -- will do a complete build."
-    else
-        # Pull requests are slightly complicated because $TRAVIS_COMMIT_RANGE
-        # may include more changes than desired if the history is convoluted.
-        # Instead, explicitly fetch the base branch and compare against the
-        # merge-base commit.
-        git fetch -q origin +refs/heads/$TRAVIS_BRANCH
-        changes=$(git diff --name-only HEAD $(git merge-base HEAD FETCH_HEAD))
-        echo "Files changed:"
-        echo "$changes"
-        if ! echo "$changes" | grep -qvE '(\.rst$)|(^docs)|(^news)|(^\.github)'
-        then
-            echo "Code was not changed -- skipping build."
-            exit
-        fi
-    fi
-fi
-
-# Export the correct TOXENV when not provided.
-echo "Determining correct TOXENV..."
-if [[ -z "$TOXENV" ]]; then
-    if [[ ${TRAVIS_PYTHON_VERSION} == pypy* ]]; then
-        export TOXENV=pypy
-    else
-        # We use the syntax ${string:index:length} to make 2.7 -> py27
-        _major=${TRAVIS_PYTHON_VERSION:0:1}
-        _minor=${TRAVIS_PYTHON_VERSION:2:1}
-        export TOXENV="py${_major}${_minor}"
-    fi
-fi
-echo "TOXENV=${TOXENV}"
-
-if [[ -z "$NEW_RESOLVER" ]]; then
-    RESOLVER_SWITCH=''
-else
-    RESOLVER_SWITCH='--new-resolver'
-fi
-
-# Print the commands run for this test.
-set -x
-if [[ "$GROUP" == "1" ]]; then
-    # Unit tests
-    tox -- --use-venv -m unit -n auto
-    # Integration tests (not the ones for 'pip install')
-    tox -- -m integration -n auto --durations=5 -k "not test_install" \
-        --use-venv $RESOLVER_SWITCH
-elif [[ "$GROUP" == "2" ]]; then
-    # Separate Job for running integration tests for 'pip install'
-    tox -- -m integration -n auto --durations=5 -k "test_install" \
-        --use-venv $RESOLVER_SWITCH
-elif [[ "$GROUP" == "3" ]]; then
-    # Separate Job for tests that fail with the new resolver
-    tox -- -m fails_on_new_resolver -n auto --durations=5 \
-        --use-venv $RESOLVER_SWITCH --new-resolver-runtests
-else
-    # Non-Testing Jobs should run once
-    tox
-fi
diff --git a/tools/travis/setup.sh b/tools/travis/setup.sh
deleted file mode 100755
index c52ce5f16..000000000
--- a/tools/travis/setup.sh
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/bin/bash
-set -e
-
-echo "Setting Git Credentials..."
-git config --global user.email "distutils-sig@python.org"
-git config --global user.name "pip"
diff --git a/tools/automation/vendoring/patches/appdirs.patch b/tools/vendoring/patches/appdirs.patch
similarity index 100%
rename from tools/automation/vendoring/patches/appdirs.patch
rename to tools/vendoring/patches/appdirs.patch
diff --git a/tools/automation/vendoring/patches/certifi.patch b/tools/vendoring/patches/certifi.patch
similarity index 100%
rename from tools/automation/vendoring/patches/certifi.patch
rename to tools/vendoring/patches/certifi.patch
diff --git a/tools/automation/vendoring/patches/requests.patch b/tools/vendoring/patches/requests.patch
similarity index 100%
rename from tools/automation/vendoring/patches/requests.patch
rename to tools/vendoring/patches/requests.patch
diff --git a/tox.ini b/tox.ini
index 79586eba8..0b9511bf9 100644
--- a/tox.ini
+++ b/tox.ini
@@ -27,7 +27,7 @@ deps = -r{toxinidir}/tools/requirements/tests.txt
 commands_pre =
     python -c 'import shutil, sys; shutil.rmtree(sys.argv[1], ignore_errors=True)' {toxinidir}/tests/data/common_wheels
     {[helpers]pip} wheel -w {toxinidir}/tests/data/common_wheels -r {toxinidir}/tools/requirements/tests-common_wheels.txt
-commands = pytest --timeout 300 []
+commands = pytest []
 install_command = {[helpers]pip} install {opts} {packages}
 list_dependencies_command = {[helpers]pip} freeze --all
 
@@ -35,7 +35,7 @@ list_dependencies_command = {[helpers]pip} freeze --all
 basepython = python3
 commands =
     {[helpers]mkdirp} {toxinidir}/.coverage-output
-    pytest --timeout 300 --cov=pip --cov-config={toxinidir}/setup.cfg []
+    pytest --cov=pip --cov-config={toxinidir}/setup.cfg []
 
 setenv =
     # Used in coverage configuration in setup.cfg.