Merge branch 'master' into pr/4612

This commit is contained in:
Pradyun Gedam 2017-11-04 12:45:15 +05:30
commit 032009380f
No known key found for this signature in database
GPG Key ID: DA17C4B29CB32E4B
470 changed files with 10306 additions and 3451 deletions

36
.gitignore vendored
View File

@ -1,15 +1,37 @@
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# Distribution / packaging
build/
dist/
docs/_build/
pip.egg-info/
MANIFEST
.tox
.cache
*.egg
*.eggs
*.py[cod]
*~
*.egg-info/
MANIFEST
# Documentation
docs/build/
# mypy
.mypy_cache/
# Unit test / coverage reports
.tox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
# Misc
*~
.*.sw?
# For IntelliJ IDEs (basically PyCharm)
.idea/
# Scratch Pad for experiments
.scratch/

View File

@ -1,2 +1,2 @@
ignore-paths:
- pip/_vendor/
- src/pip/_vendor/

View File

@ -26,7 +26,10 @@ Ludovic Gasc <gmludo@gmail.com> <git@gmludo.eu>
Markus Hametner <fin+github@xbhd.org>
Masklinn <bitbucket.org@masklinn.net>
Matthew Iversen <teh.ivo@gmail.com> <teh.ivo@gmail.com>
Pi Delport <pjdelport@gmail.com>
<pnasrat@gmail.com> <pnasrat@googlemail.com>
Pradyun Gedam <pradyunsg@gmail.com> <pradyunsg@users.noreply.github.com>
Pradyun Gedam <pradyunsg@gmail.com>
Preston Holmes <preston@ptone.com>
Przemek Wrzos <hetmankp@none>
<hodgestar@gmail.com> <hodgestar+hg@gmail.com>

View File

@ -1,31 +1,35 @@
language: python
sudo: false
dist: trusty
matrix:
fast_finish: true
include:
- env: TOXENV=docs
- env: TOXENV=lint-py2
- env: TOXENV=lint-py3
- env: TOXENV=mypy
- env: TOXENV=packaging
# PyPy jobs start first -- they are the slowest
- env: TOXENV=pypy
python: pypy
- env: TOXENV=pypy3
python: pypy3
# Latest Stable CPython jobs
- env: TOXENV=py27
python: 2.7
- env: TOXENV=py36
python: 3.6
# All the other Py3 versions
- env: TOXENV=py33
python: 3.3
- env: TOXENV=py34
python: 3.4
- env: TOXENV=py35
python: 3.5
- env: TOXENV=py36
python: 3.6
# Nightly Python goes last
- env: TOXENV=py37
python: nightly
- env: TOXENV=pypy
python: pypy-5.4
- env: "TOXENV=py27 VENDOR=no WHEELS=yes"
python: 2.7
- env: "TOXENV=py36 VENDOR=no WHEELS=yes"
python: 3.6
allow_failures:
- python: nightly

View File

@ -2,49 +2,11 @@
set -e
set -x
# We want to create the virtual environment here, but not actually run anything
tox --notest
# If we have a VENDOR=no then we want to reinstall pip into the virtual
# environment without the vendor directory included as well as install the
# dependencies we need installed.
if [[ $VENDOR = "no" ]]; then
# Install our dependencies if we're not installing from wheels
if [[ $WHEELS != "yes" ]]; then
.tox/$TOXENV/bin/pip install -r pip/_vendor/vendor.txt --no-deps
fi
# Install our dependencies if we're installing from wheels
if [[ $WHEELS = "yes" ]]; then
mkdir -p /tmp/wheels
pip wheel --wheel-dir /tmp/wheels --no-deps -r pip/_vendor/vendor.txt
cp /tmp/wheels/* `echo .tox/$TOXENV/lib/python*/site-packages/pip/_vendor/`
fi
# Remove the vendored dependencies from within the installed pip inside of
# our installed copy of pip.
find .tox/$TOXENV/lib/python*/site-packages/pip/_vendor -d \
-not -regex '.*/pip/_vendor/__init__\.py$' \
-not -regex '.*/pip/_vendor$' \
-exec rm -rf {} \;
# Patch our installed pip/_vendor/__init__.py so that it knows to look for
# the vendored dependencies instead of only looking for the vendored.
sed -i 's/DEBUNDLED = False/DEBUNDLED = True/' \
.tox/$TOXENV/lib/python*/site-packages/pip/_vendor/__init__.py
# Test to make sure that we successfully installed without vendoring
if [ -f .tox/$TOXENV/lib/python*/site-packages/pip/_vendor/six.py ]; then
echo "Did not successfully unvendor"
exit 1
fi
fi
if [[ $TOXENV == py* ]]; then
# Run unit tests
tox -- -m unit
# Run integration tests
tox -- -m integration -n 8 --duration=5
tox -- -m integration -n 4 --duration=5
else
# Run once
tox

View File

@ -235,7 +235,7 @@ Phil Freo <phil@philfreo.com>
Phil Whelan <phil123@gmail.com>
Philippe Ombredanne <pombredanne@gmail.com>
Pierre-Yves Rofes <github@rofes.fr>
Piet Delport <pjdelport@gmail.com>
Pi Delport <pjdelport@gmail.com>
Pradyun <pradyunsg@users.noreply.github.com>
Pradyun S. Gedam <pradyunsg@gmail.com>
Preston Holmes <preston@ptone.com>

View File

@ -3,24 +3,27 @@ include LICENSE.txt
include NEWS.rst
include README.rst
include pyproject.toml
include pip/_vendor/README.rst
include pip/_vendor/vendor.txt
include src/pip/_vendor/README.rst
include src/pip/_vendor/vendor.txt
exclude .coveragerc
exclude .mailmap
exclude .travis.yml
exclude .landscape.yml
exclude pip/_vendor/Makefile
exclude src/pip/_vendor/Makefile
exclude tox.ini
exclude dev-requirements.txt
exclude *-requirements.txt
exclude appveyor.yml
recursive-include pip/_vendor *.pem
recursive-include src/pip/_vendor *.pem
recursive-include docs Makefile *.rst *.py *.bat
exclude src/pip/_vendor/six
recursive-exclude src/pip/_vendor *.pyi
prune .github
prune .travis
prune docs/_build
prune docs/build
prune news
prune contrib
prune tasks

View File

@ -769,7 +769,7 @@
than erroring out. (#963)
- ``pip bundle`` and support for installing from pybundle files is now
considered deprecated and will be removed in pip v1.5.
- Fix a number of isses related to cleaning up and not reusing build
- Fix a number of issues related to cleaning up and not reusing build
directories. (#413, #709, #634, #602, #939, #865, #948)
- Added a User Agent so that pip is identifiable in logs. (#901)
- Added ssl and --user support to get-pip.py. Thanks Gabriel de Perthuis.

View File

@ -1,16 +1,36 @@
environment:
matrix:
# Unit and integration tests.
- PYTHON: "C:\\Python27"
- PYTHON: "C:\\Python33"
- PYTHON: "C:\\Python34"
- PYTHON: "C:\\Python35"
RUN_INTEGRATION_TESTS: "True"
- PYTHON: "C:\\Python36-x64"
RUN_INTEGRATION_TESTS: "True"
# Unit tests only.
- PYTHON: "C:\\Python27-x64"
- PYTHON: "C:\\Python33"
- PYTHON: "C:\\Python33-x64"
- PYTHON: "C:\\Python34"
- PYTHON: "C:\\Python34-x64"
- PYTHON: "C:\\Python35"
- PYTHON: "C:\\Python35-x64"
- PYTHON: "C:\\Python36"
install:
cmd: "%PYTHON%\\python.exe -m pip install tox"
- "SET PATH=%PYTHON%;%PYTHON%\\Scripts;%PATH%"
- "python --version"
- "pip install certifi tox"
# Fix git SSL errors.
- "python -m certifi >cacert.txt"
- "set /p GIT_SSL_CAINFO=<cacert.txt"
- "set GIT_SSL_CAINFO"
build: off
test_script:
- "%PYTHON%\\Scripts\\tox.exe -e py -- -m unit -n 8"
# Shorten paths, workaround https://bugs.python.org/issue18199
- "subst T: %TEMP%"
- "set TEMP=T:\\"
- "set TMP=T:\\"
- "tox -e py -- -m unit -n 3"
- "if \"%RUN_INTEGRATION_TESTS%\" == \"True\" (
tox -e py -- -m integration -n 3 --duration=5 )"

View File

@ -5,6 +5,7 @@ pytest-catchlog
pytest-rerunfailures
pytest-timeout
pytest-xdist
mock<1.1
pyyaml
mock
scripttest>=1.3
https://github.com/pypa/virtualenv/archive/master.zip#egg=virtualenv

7
docs-requirements.txt Normal file
View File

@ -0,0 +1,7 @@
sphinx == 1.6.1
git+https://github.com/python/python-docs-theme.git#egg=python-docs-theme
git+https://github.com/pypa/pypa-docs-theme.git#egg=pypa-docs-theme
# XXX: This is a workaround for conf.py not seeing the development pip version
# when the documentation is built on ReadTheDocs.
.

View File

@ -5,7 +5,7 @@
SPHINXOPTS =
SPHINXBUILD = sphinx-build
PAPER =
BUILDDIR = _build
BUILDDIR = build
# User-friendly check for sphinx-build
ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)

View File

@ -33,7 +33,7 @@ extensions = ['sphinx.ext.extlinks', 'docs.pipext', 'sphinx.ext.intersphinx']
# intersphinx
intersphinx_cache_limit = 0
intersphinx_mapping = {
'pypug': ('https://packaging.python.org/en/latest/', None),
'pypug': ('https://packaging.python.org/', None),
'pypa': ('https://www.pypa.io/en/latest/', None),
}
@ -52,7 +52,7 @@ master_doc = 'index'
# General information about the project.
project = 'pip'
copyright = '2008-2016, PyPA'
copyright = '2008-2017, PyPA'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
@ -68,6 +68,11 @@ try:
except ImportError:
version = release = 'dev'
# We have this here because readthedocs plays tricks sometimes and there seems
# to be a hiesenbug, related to the version of pip discovered. This is here to
# help debug that if someone decides to do that in the future.
print(version)
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
# language = None
@ -83,7 +88,7 @@ today_fmt = '%B %d, %Y'
# List of directories, relative to source directory, that shouldn't be searched
# for source files.
exclude_patterns = ['_build/', 'man/']
exclude_patterns = ['build/', 'man/']
# The reST default role (used for this markup: `text`) to use for all documents
# default_role = None
@ -114,19 +119,17 @@ extlinks = {
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
html_theme = 'default'
if not on_rtd:
try:
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
except ImportError:
pass
html_theme = "pypa_theme"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
html_theme_options = {
'collapsiblesidebar': True,
'externalrefs': True,
'navigation_depth': 2,
'issues_url': 'https://github.com/pypa/pip/issues'
}
# Add any paths that contain custom themes here, relative to this directory.
@ -157,10 +160,13 @@ html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = False
smart_quotes = False
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
html_sidebars = {
'**': ['localtoc.html', 'relations.html'],
'index': ['localtoc.html']
}
# Additional templates that should be rendered to pages, maps page names to
# template names.

View File

@ -12,7 +12,7 @@ The `PyPA recommended <https://packaging.python.org/en/latest/current/>`_ tool
for installing Python packages.
.. toctree::
:maxdepth: 2
:maxdepth: 1
quickstart
installing

View File

@ -6,13 +6,11 @@ Installation
Do I need to install pip?
-------------------------
pip is already installed if you're using Python 2 >=2.7.9 or Python 3 >=3.4
binaries downloaded from `python.org <https://www.python.org>`_, but you'll
need to :ref:`upgrade pip <Upgrading pip>`.
Additionally, pip will already be installed if you're working in a :ref:`Virtual
Environment <pypug:Creating and using Virtual Environments>` created by
:ref:`pypug:virtualenv` or :ref:`pyvenv <pypug:venv>`.
pip is already installed if you are using Python 2 >=2.7.9 or Python 3 >=3.4
downloaded from `python.org <https://www.python.org>`_ or if you are working
in a :ref:`Virtual Environment <pypug:Creating and using Virtual Environments>`
created by :ref:`pypug:virtualenv` or :ref:`pyvenv <pypug:venv>`.
Just make sure to :ref:`upgrade pip <Upgrading pip>`.
.. _`get-pip`:
@ -21,25 +19,25 @@ Installing with get-pip.py
--------------------------
To install pip, securely download `get-pip.py
<https://bootstrap.pypa.io/get-pip.py>`_. [1]_
<https://bootstrap.pypa.io/get-pip.py>`_. [1]_::
Then run the following:
curl https://bootstrap.pypa.io/get-pip.py -o get-pip.py
::
Inspect ``get-pip.py`` for any malevolence. Then run the following::
python get-pip.py
.. warning::
Be cautious if you're using a Python install that's managed by your operating
system or another package manager. get-pip.py does not coordinate with
Be cautious if you are using a Python install that is managed by your operating
system or another package manager. ``get-pip.py`` does not coordinate with
those tools, and may leave your system in an inconsistent state.
get-pip.py will also install :ref:`pypug:setuptools` [2]_ and :ref:`pypug:wheel`,
if they're not already. :ref:`pypug:setuptools` is required to install
``get-pip.py`` also installs :ref:`pypug:setuptools` [2]_ and :ref:`pypug:wheel`
if they are not already. :ref:`pypug:setuptools` is required to install
:term:`source distributions <pypug:Source Distribution (or "sdist")>`. Both are
required to be able to build a :ref:`Wheel cache` (which improves installation
required in order to build a :ref:`Wheel cache` (which improves installation
speed), although neither are required to install pre-built :term:`wheels
<pypug:Wheel>`.
@ -55,14 +53,14 @@ get-pip.py options
.. option:: --no-setuptools
If set, don't attempt to install :ref:`pypug:setuptools`
If set, do not attempt to install :ref:`pypug:setuptools`
.. option:: --no-wheel
If set, don't attempt to install :ref:`pypug:wheel`
If set, do not attempt to install :ref:`pypug:wheel`
Additionally, ``get-pip.py`` supports using the :ref:`pip install options <pip
``get-pip.py`` allows :ref:`pip install options <pip
install Options>` and the :ref:`general options <General Options>`. Below are
some examples:
@ -91,16 +89,12 @@ the `Python Packaging User Guide
Upgrading pip
-------------
On Linux or macOS:
::
On Linux or macOS::
pip install -U pip
On Windows [4]_:
::
On Windows [4]_::
python -m pip install -U pip

View File

@ -5,7 +5,7 @@ REM Command file for Sphinx documentation
if "%SPHINXBUILD%" == "" (
set SPHINXBUILD=sphinx-build
)
set BUILDDIR=_build
set BUILDDIR=build
set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% .
set I18NSPHINXOPTS=%SPHINXOPTS% .
if NOT "%PAPER%" == "" (

View File

@ -6,9 +6,9 @@ from docutils import nodes
from docutils.parsers import rst
from docutils.statemachine import ViewList
from textwrap import dedent
from pip.commands import commands_dict as commands
from pip import cmdoptions
from pip.utils import get_prog
from pip._internal import cmdoptions
from pip._internal.commands import commands_dict as commands
from pip._internal.utils.misc import get_prog
class PipCommandUsage(rst.Directive):
@ -17,7 +17,7 @@ class PipCommandUsage(rst.Directive):
def run(self):
cmd = commands[self.arguments[0]]
prog = '%s %s' % (get_prog(), cmd.name)
usage = dedent(cmd.usage.replace('%prog', prog))
usage = dedent(cmd.usage.replace('%prog', prog)).strip()
node = nodes.literal_block(usage, usage)
return [node]

View File

@ -3,7 +3,7 @@ Reference Guide
===============
.. toctree::
:maxdepth: 2
:maxdepth: 1
pip
pip_install

View File

@ -1,8 +1,6 @@
pip
---
.. contents::
Usage
*****

View File

@ -3,8 +3,6 @@
pip check
---------
.. contents::
Usage
*****

View File

@ -4,8 +4,6 @@
pip config
------------
.. contents::
Usage
*****

View File

@ -4,8 +4,6 @@
pip download
------------
.. contents::
Usage
*****

View File

@ -4,8 +4,6 @@
pip freeze
-----------
.. contents::
Usage
*****

View File

@ -3,8 +3,6 @@
pip hash
------------
.. contents::
Usage
*****

View File

@ -3,8 +3,6 @@
pip install
-----------
.. contents::
Usage
*****
@ -374,6 +372,10 @@ Passing branch names, a commit hash or a tag name is possible like so::
[-e] git://git.example.com/MyProject.git@v1.0#egg=MyProject
[-e] git://git.example.com/MyProject.git@da39a3ee5e6b4b0d3255bfef95601890afd80709#egg=MyProject
When passing a commit hash, specifying a full hash is preferable to a partial
hash because a full hash allows pip to operate more efficiently (e.g. by
making fewer network calls).
Mercurial
~~~~~~~~~
@ -382,9 +384,10 @@ The supported schemes are: ``hg+http``, ``hg+https``,
Here are the supported forms::
[-e] hg+http://hg.example.com/MyProject#egg=MyProject
[-e] hg+https://hg.example.com/MyProject#egg=MyProject
[-e] hg+ssh://hg.example.com/MyProject#egg=MyProject
[-e] hg+http://hg.myproject.org/MyProject#egg=MyProject
[-e] hg+https://hg.myproject.org/MyProject#egg=MyProject
[-e] hg+ssh://hg.myproject.org/MyProject#egg=MyProject
[-e] hg+file:///home/user/projects/MyProject#egg=MyProject
You can also specify a revision number, a revision hash, a tag name or a local
branch name like so::

View File

@ -3,8 +3,6 @@
pip list
---------
.. contents::
Usage
*****

View File

@ -3,8 +3,6 @@
pip search
----------
.. contents::
Usage
*****

View File

@ -3,9 +3,6 @@
pip show
--------
.. contents::
Usage
*****

View File

@ -3,8 +3,6 @@
pip uninstall
-------------
.. contents::
Usage
*****

View File

@ -4,8 +4,6 @@
pip wheel
---------
.. contents::
Usage
*****

View File

@ -2,7 +2,28 @@
User Guide
==========
.. contents::
Running pip
***********
pip is a command line program. When you install pip, a ``pip`` command is added
to your system, which can be run from the command prompt as follows::
$ pip <pip arguments>
If you cannot run the ``pip`` command directly (possibly because the location
where it was installed isn't on your operating system's ``PATH``) then you can
run pip via the Python interpreter::
$ python -m pip <pip arguments>
On Windows, the ``py`` launcher can be used::
$ py -m pip <pip arguments>
Even though pip is available from your Python installation as an importable
module, via ``import pip``, it is *not supported* to use pip in this way. For
more details, see :ref:`Using pip from your program`.
Installing Packages
*******************
@ -398,8 +419,8 @@ This is the same as passing the option to pip directly::
pip --default-timeout=60 [...]
To set options that can be set multiple times on the command line, just add
spaces in between values. For example::
For command line options which can be repeated, use a space to separate
multiple values. For example::
export PIP_FIND_LINKS="http://mirror1.example.com http://mirror2.example.com"
@ -671,3 +692,71 @@ archives are built with identical packages.
downloaded by setuptools directly, skipping pip's protections. If you need
to use such a package, see :ref:`Controlling
setup_requires<controlling-setup-requires>`.
.. _`Using pip from your program`:
Using pip from your program
***************************
As noted previously, pip is a command line program. While it is implemented in Python,
and so is available from your Python code via ``import pip``, you must not use pip's
internal APIs in this way. There are a number of reasons for this:
#. The pip code assumes that is in sole control of the global state of the program.
Pip manages things like the logging system configuration, or the values of the
standard IO streams, without considering the possibility that user code might be
affected.
#. Pip's code is *not* thread safe. If you were to run pip in a thread, there is no
guarantee that either your code or pip's would work as you expect.
#. Pip assumes that once it has finished its work, the process will terminate. It
doesn't need to handle the possibility that other code will continue to run
after that point, so (for example) calling pip twice in the same process is
likely to have issues.
This does not mean that the pip developers are opposed in principle to the idea that
pip could be used as a library - it's just that this isn't how it was written, and it
would be a lot of work to redesign the internals for use as a library, handling all
of the above issues, and designing a usable, robust and stable API that we could
guarantee would remain available across multiple releases of pip. And we simply don't
currently have the resources to even consider such a task.
What this means in practice is that everything inside of pip is considered an
implementation detail. Even the fact that the import name is ``pip`` is subject to
change without notice. While we do try not to break things as much as possible, all
the internal APIs can change at any time, for any reason. It also means that we
generally *won't* fix issues that are a result of using pip in an unsupported way.
It should also be noted that modifying the contents of ``sys.path`` in a running Python
process is something that should only be done with care. The import system caches
certain data, and installing new packages while a program is running may not always
behave as expected. In practice, there is rarely an issue, but it is something to be
aware of.
Having said all of the above, it is worth covering the options available if you
decide that you do want to run pip from within your program. The most reliable
approach, and the one that is fully supported, is to run pip in a subprocess. This
is easily done using the standard ``subprocess`` module::
subprocess.check_call([sys.executable, '-m', 'pip', 'install', 'my_package'])
If you want to process the output further, use one of the other APIs in the module::
reqs = subprocess.check_output([sys.executable, '-m', 'pip', 'freeze'])
If you don't want to use pip's command line functionality, but are rather
trying to implement code that works with Python packages, their metadata, or
PyPI, then you should consider other, supported, packages that offer this type
of ability. Some examples that you could consider include:
* ``packaging`` - Utilities to work with standard package metadata (versions,
requirements, etc.)
* ``setuptools`` (specifically ``pkg_resources``) - Functions for querying what
packages the user has installed on their system.
* ``wheel`` - Code for manipulating (creating, querying and installing) wheels.
* ``distlib`` - Packaging and distribution utilities (including functions for
interacting with PyPI).

2
news/1130.bugfix Normal file
View File

@ -0,0 +1,2 @@
Allow pip to work if the ``GIT_DIR`` and ``GIT_WORK_TREE`` environment
variables are set.

1
news/1139.bugfix Normal file
View File

@ -0,0 +1 @@
Make ``pip install --force-reinstall`` not require passing ``--upgrade``.

1
news/3830.bugfix Normal file
View File

@ -0,0 +1 @@
pip no longer passes global options from one package to later packages in the same requirement file.

2
news/3997.bugfix Normal file
View File

@ -0,0 +1,2 @@
Shell completion scripts now use correct executable names (e.g., ``pip3``
instead of ``pip``)

1
news/4227.feature Normal file
View File

@ -0,0 +1 @@
Report the line which caused the hash error when using requirement files.

5
news/4293.bugfix Normal file
View File

@ -0,0 +1,5 @@
Fix for an incorrect ``freeze`` warning message due to a package being
included in multiple requirements files that were passed to ``freeze``.
Instead of warning incorrectly that the package is not installed, pip
now warns that the package was declared multiple times and lists the
name of each requirements file that contains the package in question.

1
news/4299.feature Normal file
View File

@ -0,0 +1 @@
Support build-numbers in wheel versions and support sorting with build-numbers.

2
news/4448.bugfix Normal file
View File

@ -0,0 +1,2 @@
Reinstalling an editable package from Git no longer assumes that the ``master``
branch exists.

4
news/4473.feature Normal file
View File

@ -0,0 +1,4 @@
pip now retries on more HTTP status codes, for intermittent failures.
Previously, it only retried on the standard 503. Now, it also retries on 500
(transient failures on AWS S3), 520 and 527 (transient failures on Cloudflare).

2
news/4507.feature Normal file
View File

@ -0,0 +1,2 @@
Don't log a warning when installing a dependency from Git if the name looks
like a commit hash.

1
news/4553.feature Normal file
View File

@ -0,0 +1 @@
pip now displays a warning when it installs scripts from a wheel outside the PATH. These warnings can be suppressed using a new --no-warn-script-location option.

3
news/4565.trivial Normal file
View File

@ -0,0 +1,3 @@
Improve sentence in docs describing how to put multiple values into a
single environment variable in those cases where a command line option
may be repeated.

1
news/4655.bugfix Normal file
View File

@ -0,0 +1 @@
Fix warning message on mismatched versions during installation.

2
news/4667.bugfix Normal file
View File

@ -0,0 +1,2 @@
pip now records installed files in a deterministic manner improving
reproducibility.

3
news/4675.bugfix Normal file
View File

@ -0,0 +1,3 @@
Fix an issue where ``pip install -e`` on a Git url would fail to update if
a branch or tag name is specified that happens to match the prefix of the
current ``HEAD`` commit hash.

2
news/4696.removal Normal file
View File

@ -0,0 +1,2 @@
Move all of pip's APIs into the pip._internal package, properly reflecting the
fact that pip does not currently have any public APIs.

2
news/4700.removal Normal file
View File

@ -0,0 +1,2 @@
Move all of pip's APIs into the pip._internal package, properly reflecting the
fact that pip does not currently have any public APIs.

2
news/4743.doc Normal file
View File

@ -0,0 +1,2 @@
Document how to call pip from your code, including the fact
that we do not provide a Python API.

2
news/4749.feature Normal file
View File

@ -0,0 +1,2 @@
The command-line autocompletion engine ``pip show`` now autocompletes installed
distribution names.

1
news/4758.feature Normal file
View File

@ -0,0 +1 @@
Change documentation theme to be in line with Python Documentation

View File

@ -1 +1 @@
Upgraded CacheControl to 0.12.2.
Upgraded CacheControl to 0.12.3.

1
news/certifi.vendor Normal file
View File

@ -0,0 +1 @@
Vendored certifi at 2017.7.27.1.

1
news/chardet.vendor Normal file
View File

@ -0,0 +1 @@
Vendored chardet at 3.0.4.

View File

@ -1 +1 @@
Upgraded distlib to 0.2.5.
Upgraded distlib to 0.2.6.

1
news/idna.vendor Normal file
View File

@ -0,0 +1 @@
Vendored idna at idna==2.6.

1
news/pytoml.vendor Normal file
View File

@ -0,0 +1 @@
Upgraded pytoml to 0.1.14.

View File

@ -1 +1 @@
Upgraded requests to 2.14.2.
Upgraded requests to 2.18.4.

View File

@ -1 +1 @@
Upgraded pkg_resources (via setuptools) to 35.0.2.
Upgraded pkg_resources (via setuptools) to 36.6.0.

1
news/six.vendor Normal file
View File

@ -0,0 +1 @@
Upgraded six to 1.11.0.

1
news/urllib3.vendor Normal file
View File

@ -0,0 +1 @@
Vendored urllib3 at 1.22.

View File

@ -1,20 +0,0 @@
try:
from urllib.parse import urljoin
except ImportError:
from urlparse import urljoin
try:
import cPickle as pickle
except ImportError:
import pickle
from pip._vendor.requests.packages.urllib3.response import HTTPResponse
from pip._vendor.requests.packages.urllib3.util import is_fp_closed
# Replicate some six behaviour
try:
text_type = unicode
except NameError:
text_type = str

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@ -1,36 +0,0 @@
'''
Debian and other distributions "unbundle" requests' vendored dependencies, and
rewrite all imports to use the global versions of ``urllib3`` and ``chardet``.
The problem with this is that not only requests itself imports those
dependencies, but third-party code outside of the distros' control too.
In reaction to these problems, the distro maintainers replaced
``requests.packages`` with a magical "stub module" that imports the correct
modules. The implementations were varying in quality and all had severe
problems. For example, a symlink (or hardlink) that links the correct modules
into place introduces problems regarding object identity, since you now have
two modules in `sys.modules` with the same API, but different identities::
requests.packages.urllib3 is not urllib3
With version ``2.5.2``, requests started to maintain its own stub, so that
distro-specific breakage would be reduced to a minimum, even though the whole
issue is not requests' fault in the first place. See
https://github.com/kennethreitz/requests/pull/2375 for the corresponding pull
request.
'''
from __future__ import absolute_import
import sys
try:
from . import urllib3
except ImportError:
import urllib3
sys.modules['%s.urllib3' % __name__] = urllib3
try:
from . import chardet
except ImportError:
import chardet
sys.modules['%s.chardet' % __name__] = chardet

View File

@ -1,80 +0,0 @@
#!/usr/bin/env python
"""
Script which takes one or more file paths and reports on their detected
encodings
Example::
% chardetect somefile someotherfile
somefile: windows-1252 with confidence 0.5
someotherfile: ascii with confidence 1.0
If no paths are provided, it takes its input from stdin.
"""
from __future__ import absolute_import, print_function, unicode_literals
import argparse
import sys
from io import open
from chardet import __version__
from chardet.universaldetector import UniversalDetector
def description_of(lines, name='stdin'):
"""
Return a string describing the probable encoding of a file or
list of strings.
:param lines: The lines to get the encoding of.
:type lines: Iterable of bytes
:param name: Name of file or collection of lines
:type name: str
"""
u = UniversalDetector()
for line in lines:
u.feed(line)
u.close()
result = u.result
if result['encoding']:
return '{0}: {1} with confidence {2}'.format(name, result['encoding'],
result['confidence'])
else:
return '{0}: no result'.format(name)
def main(argv=None):
'''
Handles command line arguments and gets things started.
:param argv: List of arguments, as if specified on the command-line.
If None, ``sys.argv[1:]`` is used instead.
:type argv: list of str
'''
# Get command line arguments
parser = argparse.ArgumentParser(
description="Takes one or more file paths and reports their detected \
encodings",
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
conflict_handler='resolve')
parser.add_argument('input',
help='File whose encoding we would like to determine.',
type=argparse.FileType('rb'), nargs='*',
default=[sys.stdin])
parser.add_argument('--version', action='version',
version='%(prog)s {0}'.format(__version__))
args = parser.parse_args(argv)
for f in args.input:
if f.isatty():
print("You are running chardetect interactively. Press " +
"CTRL-D twice at the start of a blank line to signal the " +
"end of your input. If you want help, run chardetect " +
"--help\n", file=sys.stderr)
print(description_of(f, f.name))
if __name__ == '__main__':
main()

View File

@ -1,39 +0,0 @@
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 2001
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
# Shy Shalom - original C code
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
_debug = 0
eDetecting = 0
eFoundIt = 1
eNotMe = 2
eStart = 0
eError = 1
eItsMe = 2
SHORTCUT_THRESHOLD = 0.95

View File

@ -1 +0,0 @@
from .core import *

View File

@ -1,71 +0,0 @@
"""Cache Management
"""
import errno
import logging
import os
from pip._vendor.packaging.utils import canonicalize_name
import pip.index
from pip.compat import expanduser
from pip.download import path_to_url
from pip.utils.cache import get_cache_path_for_link
from pip.wheel import InvalidWheelFilename, Wheel
logger = logging.getLogger(__name__)
class WheelCache(object):
"""A cache of wheels for future installs."""
def __init__(self, cache_dir, format_control):
"""Create a wheel cache.
:param cache_dir: The root of the cache.
:param format_control: A pip.index.FormatControl object to limit
binaries being read from the cache.
"""
self._cache_dir = expanduser(cache_dir) if cache_dir else None
self._format_control = format_control
def cached_wheel(self, link, package_name):
not_cached = (
not self._cache_dir or
not link or
link.is_wheel or
not link.is_artifact or
not package_name
)
if not_cached:
return link
canonical_name = canonicalize_name(package_name)
formats = pip.index.fmt_ctl_formats(
self._format_control, canonical_name
)
if "binary" not in formats:
return link
root = get_cache_path_for_link(self._cache_dir, link)
try:
wheel_names = os.listdir(root)
except OSError as err:
if err.errno in {errno.ENOENT, errno.ENOTDIR}:
return link
raise
candidates = []
for wheel_name in wheel_names:
try:
wheel = Wheel(wheel_name)
except InvalidWheelFilename:
continue
if not wheel.supported():
# Built for a different python/arch/etc
continue
candidates.append((wheel.support_index_min(), wheel_name))
if not candidates:
return link
candidates.sort()
path = os.path.join(root, candidates[0][1])
return pip.index.Link(path_to_url(path))

View File

@ -1,4 +0,0 @@
from pip.models.index import Index, PyPI
__all__ = ["Index", "PyPI"]

View File

@ -1,343 +0,0 @@
"""Prepares a distribution for installation
"""
import logging
import os
from pip._vendor import pkg_resources, requests
from pip.compat import expanduser
from pip.download import (
is_dir_url, is_file_url, is_vcs_url, unpack_url, url_to_path
)
from pip.exceptions import (
DirectoryUrlHashUnsupported, HashUnpinned, InstallationError,
PreviousBuildDirError, VcsHashUnsupported
)
from pip.utils import display_path, dist_in_usersite, normalize_path
from pip.utils.hashes import MissingHashes
from pip.utils.logging import indent_log
from pip.vcs import vcs
logger = logging.getLogger(__name__)
def make_abstract_dist(req):
"""Factory to make an abstract dist object.
Preconditions: Either an editable req with a source_dir, or satisfied_by or
a wheel link, or a non-editable req with a source_dir.
:return: A concrete DistAbstraction.
"""
if req.editable:
return IsSDist(req)
elif req.link and req.link.is_wheel:
return IsWheel(req)
else:
return IsSDist(req)
class DistAbstraction(object):
"""Abstracts out the wheel vs non-wheel Resolver.resolve() logic.
The requirements for anything installable are as follows:
- we must be able to determine the requirement name
(or we can't correctly handle the non-upgrade case).
- we must be able to generate a list of run-time dependencies
without installing any additional packages (or we would
have to either burn time by doing temporary isolated installs
or alternatively violate pips 'don't start installing unless
all requirements are available' rule - neither of which are
desirable).
- for packages with setup requirements, we must also be able
to determine their requirements without installing additional
packages (for the same reason as run-time dependencies)
- we must be able to create a Distribution object exposing the
above metadata.
"""
def __init__(self, req):
self.req = req
def dist(self, finder):
"""Return a setuptools Dist object."""
raise NotImplementedError(self.dist)
def prep_for_dist(self):
"""Ensure that we can get a Dist for this requirement."""
raise NotImplementedError(self.dist)
class IsWheel(DistAbstraction):
def dist(self, finder):
return list(pkg_resources.find_distributions(
self.req.source_dir))[0]
def prep_for_dist(self):
# FIXME:https://github.com/pypa/pip/issues/1112
pass
class IsSDist(DistAbstraction):
def dist(self, finder):
dist = self.req.get_dist()
# FIXME: shouldn't be globally added.
if dist.has_metadata('dependency_links.txt'):
finder.add_dependency_links(
dist.get_metadata_lines('dependency_links.txt')
)
return dist
def prep_for_dist(self):
self.req.run_egg_info()
self.req.assert_source_matches_version()
class Installed(DistAbstraction):
def dist(self, finder):
return self.req.satisfied_by
def prep_for_dist(self):
pass
class RequirementPreparer(object):
"""Prepares a Requirement
"""
def __init__(self, build_dir, download_dir, src_dir, wheel_download_dir,
progress_bar):
super(RequirementPreparer, self).__init__()
self.src_dir = src_dir
self.build_dir = build_dir
# Where still packed archives should be written to. If None, they are
# not saved, and are deleted immediately after unpacking.
self.download_dir = download_dir
# Where still-packed .whl files should be written to. If None, they are
# written to the download_dir parameter. Separate to download_dir to
# permit only keeping wheel archives for pip wheel.
if wheel_download_dir:
wheel_download_dir = normalize_path(wheel_download_dir)
self.wheel_download_dir = wheel_download_dir
# NOTE
# download_dir and wheel_download_dir overlap semantically and may
# be combined if we're willing to have non-wheel archives present in
# the wheelhouse output by 'pip wheel'.
self.progress_bar = progress_bar
@property
def _download_should_save(self):
# TODO: Modify to reduce indentation needed
if self.download_dir:
self.download_dir = expanduser(self.download_dir)
if os.path.exists(self.download_dir):
return True
else:
logger.critical('Could not find download directory')
raise InstallationError(
"Could not find or access download directory '%s'"
% display_path(self.download_dir))
return False
def prepare_requirement(self, req, resolver):
# TODO: Breakup into smaller functions
# TODO: Add a nice docstring
if req.editable:
logger.info('Obtaining %s', req)
else:
# satisfied_by is only evaluated by calling _check_skip_installed,
# so it must be None here.
assert req.satisfied_by is None
if not resolver.ignore_installed:
skip_reason = resolver._check_skip_installed(req)
if req.satisfied_by:
assert skip_reason is not None, (
'_check_skip_installed returned None but '
'req.satisfied_by is set to %r'
% (req.satisfied_by,))
logger.info(
'Requirement %s: %s (%s)', skip_reason,
req,
req.satisfied_by.version)
else:
if (req.link and
req.link.scheme == 'file'):
path = url_to_path(req.link.url)
logger.info('Processing %s', display_path(path))
else:
logger.info('Collecting %s', req)
assert resolver.require_hashes is not None, \
"This should have been set in resolve()"
with indent_log():
# ################################ #
# # vcs update or unpack archive # #
# ################################ #
if req.editable:
if resolver.require_hashes:
raise InstallationError(
'The editable requirement %s cannot be installed when '
'requiring hashes, because there is no single file to '
'hash.' % req)
req.ensure_has_source_dir(self.src_dir)
req.update_editable(not self._download_should_save)
abstract_dist = make_abstract_dist(req)
abstract_dist.prep_for_dist()
if self._download_should_save:
req.archive(self.download_dir)
req.check_if_exists()
elif req.satisfied_by:
if resolver.require_hashes:
logger.debug(
'Since it is already installed, we are trusting this '
'package without checking its hash. To ensure a '
'completely repeatable environment, install into an '
'empty virtualenv.')
abstract_dist = Installed(req)
else:
# @@ if filesystem packages are not marked
# editable in a req, a non deterministic error
# occurs when the script attempts to unpack the
# build directory
req.ensure_has_source_dir(self.build_dir)
# If a checkout exists, it's unwise to keep going. version
# inconsistencies are logged later, but do not fail the
# installation.
# FIXME: this won't upgrade when there's an existing
# package unpacked in `req.source_dir`
# package unpacked in `req.source_dir`
if os.path.exists(
os.path.join(req.source_dir, 'setup.py')):
raise PreviousBuildDirError(
"pip can't proceed with requirements '%s' due to a"
" pre-existing build directory (%s). This is "
"likely due to a previous installation that failed"
". pip is being responsible and not assuming it "
"can delete this. Please delete it and try again."
% (req, req.source_dir)
)
req.populate_link(
resolver.finder,
resolver._is_upgrade_allowed(req),
resolver.require_hashes
)
# We can't hit this spot and have populate_link return None.
# req.satisfied_by is None here (because we're
# guarded) and upgrade has no impact except when satisfied_by
# is not None.
# Then inside find_requirement existing_applicable -> False
# If no new versions are found, DistributionNotFound is raised,
# otherwise a result is guaranteed.
assert req.link
link = req.link
# Now that we have the real link, we can tell what kind of
# requirements we have and raise some more informative errors
# than otherwise. (For example, we can raise VcsHashUnsupported
# for a VCS URL rather than HashMissing.)
if resolver.require_hashes:
# We could check these first 2 conditions inside
# unpack_url and save repetition of conditions, but then
# we would report less-useful error messages for
# unhashable requirements, complaining that there's no
# hash provided.
if is_vcs_url(link):
raise VcsHashUnsupported()
elif is_file_url(link) and is_dir_url(link):
raise DirectoryUrlHashUnsupported()
if (not req.original_link and
not req.is_pinned):
# Unpinned packages are asking for trouble when a new
# version is uploaded. This isn't a security check, but
# it saves users a surprising hash mismatch in the
# future.
#
# file:/// URLs aren't pinnable, so don't complain
# about them not being pinned.
raise HashUnpinned()
hashes = req.hashes(
trust_internet=not resolver.require_hashes)
if resolver.require_hashes and not hashes:
# Known-good hashes are missing for this requirement, so
# shim it with a facade object that will provoke hash
# computation and then raise a HashMissing exception
# showing the user what the hash should be.
hashes = MissingHashes()
try:
download_dir = self.download_dir
# We always delete unpacked sdists after pip ran.
autodelete_unpacked = True
if req.link.is_wheel \
and self.wheel_download_dir:
# when doing 'pip wheel` we download wheels to a
# dedicated dir.
download_dir = self.wheel_download_dir
if req.link.is_wheel:
if download_dir:
# When downloading, we only unpack wheels to get
# metadata.
autodelete_unpacked = True
else:
# When installing a wheel, we use the unpacked
# wheel.
autodelete_unpacked = False
unpack_url(
req.link, req.source_dir,
download_dir, autodelete_unpacked,
session=resolver.session, hashes=hashes,
progress_bar=self.progress_bar)
except requests.HTTPError as exc:
logger.critical(
'Could not install requirement %s because '
'of error %s',
req,
exc,
)
raise InstallationError(
'Could not install requirement %s because '
'of HTTP error %s for URL %s' %
(req, exc, req.link)
)
abstract_dist = make_abstract_dist(req)
abstract_dist.prep_for_dist()
if self._download_should_save:
# Make a .zip of the source_dir we already created.
if req.link.scheme in vcs.all_schemes:
req.archive(self.download_dir)
# req.req is only avail after unpack for URL
# pkgs repeat check_if_exists to uninstall-on-upgrade
# (#14)
if not resolver.ignore_installed:
req.check_if_exists()
if req.satisfied_by:
should_modify = (
resolver.upgrade_strategy != "to-satisfy-only" or
resolver.ignore_installed
)
if should_modify:
# don't uninstall conflict if user install and
# conflict is not user install
if not (resolver.use_user_site and not
dist_in_usersite(req.satisfied_by)):
req.conflicts_with = \
req.satisfied_by
req.satisfied_by = None
else:
logger.info(
'Requirement already satisfied (use '
'--upgrade to upgrade): %s',
req,
)
return abstract_dist

View File

@ -1,46 +0,0 @@
"""Helpers for caches
"""
import hashlib
import os.path
def get_cache_path_for_link(cache_dir, link):
"""
Return a directory to store cached wheels in for link.
Because there are M wheels for any one sdist, we provide a directory
to cache them in, and then consult that directory when looking up
cache hits.
We only insert things into the cache if they have plausible version
numbers, so that we don't contaminate the cache with things that were not
unique. E.g. ./package might have dozens of installs done for it and build
a version of 0.0...and if we built and cached a wheel, we'd end up using
the same wheel even if the source has been edited.
:param cache_dir: The cache_dir being used by pip.
:param link: The link of the sdist for which this will cache wheels.
"""
# We want to generate an url to use as our cache key, we don't want to just
# re-use the URL because it might have other items in the fragment and we
# don't care about those.
key_parts = [link.url_without_fragment]
if link.hash_name is not None and link.hash is not None:
key_parts.append("=".join([link.hash_name, link.hash]))
key_url = "#".join(key_parts)
# Encode our key url with sha224, we'll use this because it has similar
# security properties to sha256, but with a shorter total output (and thus
# less secure). However the differences don't make a lot of difference for
# our use case here.
hashed = hashlib.sha224(key_url.encode()).hexdigest()
# We want to nest the directories some to prevent having a ton of top level
# directories where we might run out of sub directories on some FS.
parts = [hashed[:2], hashed[2:4], hashed[4:6], hashed[6:]]
# Inside of the base location for cached wheels, expand our parts and join
# them all together.
return os.path.join(cache_dir, "wheels", *parts)

View File

@ -11,11 +11,24 @@ known_first_party =
default_section = THIRDPARTY
[flake8]
# Ignoring unused imports since mypy would warn of that.
ignore = F401
exclude = .tox,.idea,*.egg,build,_vendor,data
select = E,W,F
[mypy]
follow_imports = silent
ignore_missing_imports = True
[mypy-pip/_vendor/*]
follow_imports = skip
ignore_errors = True
[tool:pytest]
addopts = --ignore pip/_vendor --ignore tests/tests_cache
addopts = --ignore src/pip/_vendor --ignore tests/tests_cache -r aR
[bdist_wheel]
universal=1
universal = 1
[metadata]
license_file = LICENSE.txt

View File

@ -12,7 +12,8 @@ here = os.path.abspath(os.path.dirname(__file__))
def read(*parts):
# intentionally *not* adding an encoding option to open, See:
# https://github.com/pypa/virtualenv/issues/201#issuecomment-3145690
return codecs.open(os.path.join(here, *parts), 'r').read()
with codecs.open(os.path.join(here, *parts), 'r') as fp:
return fp.read()
def find_version(*file_paths):
@ -38,7 +39,7 @@ tests_require = [
setup(
name="pip",
version=find_version("pip", "__init__.py"),
version=find_version("src", "pip", "__init__.py"),
description="The PyPA recommended tool for installing Python packages.",
long_description=long_description,
classifiers=[
@ -46,6 +47,7 @@ setup(
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Topic :: Software Development :: Build Tools",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
@ -53,6 +55,7 @@ setup(
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy"
],
keywords='easy_install distutils setuptools egg virtualenv',
@ -60,7 +63,11 @@ setup(
author_email='python-virtualenv@groups.google.com',
url='https://pip.pypa.io/',
license='MIT',
packages=find_packages(exclude=["contrib", "docs", "tests*", "tasks"]),
package_dir={"": "src"},
packages=find_packages(
where="src",
exclude=["contrib", "docs", "tests*", "tasks"],
),
package_data={
"pip._vendor.certifi": ["*.pem"],
"pip._vendor.requests": ["*.pem"],
@ -69,9 +76,9 @@ setup(
},
entry_points={
"console_scripts": [
"pip=pip:main",
"pip%s=pip:main" % sys.version[:1],
"pip%s=pip:main" % sys.version[:3],
"pip=pip._internal:main",
"pip%s=pip._internal:main" % sys.version[:1],
"pip%s=pip._internal:main" % sys.version[:3],
],
},
tests_require=tests_require,

1
src/pip/__init__.py Normal file
View File

@ -0,0 +1 @@
__version__ = "10.0.0.dev0"

View File

@ -13,7 +13,7 @@ if __package__ == '':
path = os.path.dirname(os.path.dirname(__file__))
sys.path.insert(0, path)
import pip # noqa
from pip._internal import main as _main # noqa
if __name__ == '__main__':
sys.exit(pip.main())
sys.exit(_main())

View File

@ -17,7 +17,7 @@ import sys
# to add socks as yet another dependency for pip, nor do I want to allow-stder
# in the DEP-8 tests, so just suppress the warning. pdb tells me this has to
# be done before the import of pip.vcs.
from pip._vendor.requests.packages.urllib3.exceptions import DependencyWarning
from pip._vendor.urllib3.exceptions import DependencyWarning
warnings.filterwarnings("ignore", category=DependencyWarning) # noqa
# We want to inject the use of SecureTransport as early as possible so that any
@ -32,37 +32,24 @@ else:
if (sys.platform == "darwin" and
ssl.OPENSSL_VERSION_NUMBER < 0x1000100f): # OpenSSL 1.0.1
try:
from pip._vendor.requests.packages.urllib3.contrib import (
securetransport,
)
from pip._vendor.urllib3.contrib import securetransport
except (ImportError, OSError):
pass
else:
securetransport.inject_into_urllib3()
from pip.exceptions import CommandError, PipError
from pip.utils import get_installed_distributions, get_prog
from pip.utils import deprecation
from pip.vcs import git, mercurial, subversion, bazaar # noqa
from pip.baseparser import ConfigOptionParser, UpdatingDefaultsHelpFormatter
from pip.commands import get_summaries, get_similar_commands
from pip.commands import commands_dict
from pip._vendor.requests.packages.urllib3.exceptions import (
InsecureRequestWarning,
from pip import __version__
from pip._internal import cmdoptions
from pip._internal.exceptions import CommandError, PipError
from pip._internal.utils.misc import get_installed_distributions, get_prog
from pip._internal.utils import deprecation
from pip._internal.vcs import git, mercurial, subversion, bazaar # noqa
from pip._internal.baseparser import (
ConfigOptionParser, UpdatingDefaultsHelpFormatter,
)
# assignment for flake8 to be happy
# This fixes a peculiarity when importing via __import__ - as we are
# initialising the pip module, "from pip import cmdoptions" is recursive
# and appears not to work properly in that situation.
import pip.cmdoptions
cmdoptions = pip.cmdoptions
# The version as used in the setup.py and the docs conf.py
__version__ = "10.0.0.dev0"
from pip._internal.commands import get_summaries, get_similar_commands
from pip._internal.commands import commands_dict
from pip._vendor.urllib3.exceptions import InsecureRequestWarning
logger = logging.getLogger(__name__)
@ -100,8 +87,12 @@ def autocomplete():
# special case: 'help' subcommand has no options
if subcommand_name == 'help':
sys.exit(1)
# special case: list locally installed dists for uninstall command
if subcommand_name == 'uninstall' and not current.startswith('-'):
# special case: list locally installed dists for show and uninstall
should_list_installed = (
subcommand_name in ['show', 'uninstall'] and
not current.startswith('-')
)
if should_list_installed:
installed = []
lc = current.lower()
for dist in get_installed_distributions(local_only=True):
@ -241,7 +232,7 @@ def main(args=None):
sys.exit(1)
# Needed for locale.getpreferredencoding(False) to work
# in pip.utils.encoding.auto_decode
# in pip._internal.utils.encoding.auto_decode
try:
locale.setlocale(locale.LC_ALL, '')
except locale.Error as e:
@ -249,7 +240,3 @@ def main(args=None):
logger.debug("Ignoring error %s when setting locale", e)
command = commands_dict[cmd_name](isolated=check_isolated(cmd_args))
return command.main(cmd_args)
if __name__ == '__main__':
sys.exit(main())

View File

@ -8,36 +8,43 @@ import os
import sys
import warnings
from pip import cmdoptions
from pip.baseparser import ConfigOptionParser, UpdatingDefaultsHelpFormatter
from pip.compat import WINDOWS
from pip.download import PipSession
from pip.exceptions import (
from pip._internal import cmdoptions
from pip._internal.baseparser import (
ConfigOptionParser, UpdatingDefaultsHelpFormatter
)
from pip._internal.compat import WINDOWS
from pip._internal.download import PipSession
from pip._internal.exceptions import (
BadCommand, CommandError, InstallationError, PreviousBuildDirError,
UninstallationError
)
from pip.index import PackageFinder
from pip.locations import running_under_virtualenv
from pip.req import InstallRequirement, parse_requirements
from pip.status_codes import (
from pip._internal.index import PackageFinder
from pip._internal.locations import running_under_virtualenv
from pip._internal.req.req_file import parse_requirements
from pip._internal.req.req_install import InstallRequirement
from pip._internal.status_codes import (
ERROR, PREVIOUS_BUILD_DIR_ERROR, SUCCESS, UNKNOWN_ERROR,
VIRTUALENV_NOT_FOUND
)
from pip.utils import deprecation, get_prog, normalize_path
from pip.utils.logging import IndentingFormatter
from pip.utils.outdated import pip_version_check
from pip._internal.utils import deprecation
from pip._internal.utils.logging import IndentingFormatter
from pip._internal.utils.misc import get_prog, normalize_path
from pip._internal.utils.outdated import pip_version_check
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
if MYPY_CHECK_RUNNING:
from typing import Optional
__all__ = ['Command']
logger = logging.getLogger(__name__)
class Command(object):
name = None
usage = None
hidden = False
ignore_require_venv = False
name = None # type: Optional[str]
usage = None # type: Optional[str]
hidden = False # type: bool
ignore_require_venv = False # type: bool
log_streams = ("ext://sys.stdout", "ext://sys.stderr")
def __init__(self, isolated=False):
@ -130,7 +137,7 @@ class Command(object):
"disable_existing_loggers": False,
"filters": {
"exclude_warnings": {
"()": "pip.utils.logging.MaxLevelFilter",
"()": "pip._internal.utils.logging.MaxLevelFilter",
"level": logging.WARNING,
},
},
@ -143,20 +150,24 @@ class Command(object):
"handlers": {
"console": {
"level": level,
"class": "pip.utils.logging.ColorizedStreamHandler",
"class":
"pip._internal.utils.logging.ColorizedStreamHandler",
"stream": self.log_streams[0],
"filters": ["exclude_warnings"],
"formatter": "indent",
},
"console_errors": {
"level": "WARNING",
"class": "pip.utils.logging.ColorizedStreamHandler",
"class":
"pip._internal.utils.logging.ColorizedStreamHandler",
"stream": self.log_streams[1],
"formatter": "indent",
},
"user_log": {
"level": "DEBUG",
"class": "pip.utils.logging.BetterRotatingFileHandler",
"class":
("pip._internal.utils.logging"
".BetterRotatingFileHandler"),
"filename": options.log or "/dev/null",
"delay": True,
"formatter": "indent",

View File

@ -9,8 +9,8 @@ from distutils.util import strtobool
from pip._vendor.six import string_types
from pip.configuration import Configuration
from pip.utils import get_terminal_size
from pip._internal.configuration import Configuration
from pip._internal.utils.misc import get_terminal_size
logger = logging.getLogger(__name__)

150
src/pip/_internal/cache.py Normal file
View File

@ -0,0 +1,150 @@
"""Cache Management
"""
import errno
import hashlib
import logging
import os
from pip._vendor.packaging.utils import canonicalize_name
from pip._internal import index
from pip._internal.compat import expanduser
from pip._internal.download import path_to_url
from pip._internal.wheel import InvalidWheelFilename, Wheel
logger = logging.getLogger(__name__)
class Cache(object):
"""An abstract class - provides cache directories for data from links
:param cache_dir: The root of the cache.
:param format_control: A pip.index.FormatControl object to limit
binaries being read from the cache.
:param allowed_formats: which formats of files the cache should store.
('binary' and 'source' are the only allowed values)
"""
def __init__(self, cache_dir, format_control, allowed_formats):
super(Cache, self).__init__()
self.cache_dir = expanduser(cache_dir) if cache_dir else None
self.format_control = format_control
self.allowed_formats = allowed_formats
_valid_formats = {"source", "binary"}
assert self.allowed_formats.union(_valid_formats) == _valid_formats
def _get_cache_path_parts(self, link):
"""Get parts of part that must be os.path.joined with cache_dir
"""
# We want to generate an url to use as our cache key, we don't want to
# just re-use the URL because it might have other items in the fragment
# and we don't care about those.
key_parts = [link.url_without_fragment]
if link.hash_name is not None and link.hash is not None:
key_parts.append("=".join([link.hash_name, link.hash]))
key_url = "#".join(key_parts)
# Encode our key url with sha224, we'll use this because it has similar
# security properties to sha256, but with a shorter total output (and
# thus less secure). However the differences don't make a lot of
# difference for our use case here.
hashed = hashlib.sha224(key_url.encode()).hexdigest()
# We want to nest the directories some to prevent having a ton of top
# level directories where we might run out of sub directories on some
# FS.
parts = [hashed[:2], hashed[2:4], hashed[4:6], hashed[6:]]
return parts
def _get_candidates(self, link, package_name):
can_not_cache = (
not self.cache_dir or
not package_name or
not link
)
if can_not_cache:
return []
canonical_name = canonicalize_name(package_name)
formats = index.fmt_ctl_formats(
self.format_control, canonical_name
)
if not self.allowed_formats.intersection(formats):
return []
root = self.get_path_for_link(link)
try:
return os.listdir(root)
except OSError as err:
if err.errno in {errno.ENOENT, errno.ENOTDIR}:
return []
raise
def get_path_for_link(self, link):
"""Return a directory to store cached items in for link.
"""
raise NotImplementedError()
def get(self, link, package_name):
"""Returns a link to a cached item if it exists, otherwise returns the
passed link.
"""
raise NotImplementedError()
def _link_for_candidate(self, link, candidate):
root = self.get_path_for_link(link)
path = os.path.join(root, candidate)
return index.Link(path_to_url(path))
class WheelCache(Cache):
"""A cache of wheels for future installs.
"""
def __init__(self, cache_dir, format_control):
super(WheelCache, self).__init__(cache_dir, format_control, {"binary"})
def get_path_for_link(self, link):
"""Return a directory to store cached wheels for link
Because there are M wheels for any one sdist, we provide a directory
to cache them in, and then consult that directory when looking up
cache hits.
We only insert things into the cache if they have plausible version
numbers, so that we don't contaminate the cache with things that were
not unique. E.g. ./package might have dozens of installs done for it
and build a version of 0.0...and if we built and cached a wheel, we'd
end up using the same wheel even if the source has been edited.
:param link: The link of the sdist for which this will cache wheels.
"""
parts = self._get_cache_path_parts(link)
# Inside of the base location for cached wheels, expand our parts and
# join them all together.
return os.path.join(self.cache_dir, "wheels", *parts)
def get(self, link, package_name):
candidates = []
for wheel_name in self._get_candidates(link, package_name):
try:
wheel = Wheel(wheel_name)
except InvalidWheelFilename:
continue
if not wheel.supported():
# Built for a different python/arch/etc
continue
candidates.append((wheel.support_index_min(), wheel_name))
if not candidates:
return link
return self._link_for_candidate(link, min(candidates)[1])

View File

@ -13,13 +13,17 @@ import warnings
from functools import partial
from optparse import SUPPRESS_HELP, Option, OptionGroup
from pip.index import (
from pip._internal.index import (
FormatControl, fmt_ctl_handle_mutual_exclude, fmt_ctl_no_binary
)
from pip.locations import USER_CACHE_DIR, src_prefix
from pip.models import PyPI
from pip.utils.hashes import STRONG_HASHES
from pip.utils.ui import BAR_TYPES
from pip._internal.locations import USER_CACHE_DIR, src_prefix
from pip._internal.models import PyPI
from pip._internal.utils.hashes import STRONG_HASHES
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
from pip._internal.utils.ui import BAR_TYPES
if MYPY_CHECK_RUNNING:
from typing import Any
def make_option_group(group, parser):
@ -64,7 +68,8 @@ help_ = partial(
'-h', '--help',
dest='help',
action='help',
help='Show help.')
help='Show help.',
) # type: Any
isolated_mode = partial(
Option,
@ -85,7 +90,8 @@ require_virtualenv = partial(
dest='require_venv',
action='store_true',
default=False,
help=SUPPRESS_HELP)
help=SUPPRESS_HELP
) # type: Any
verbose = partial(
Option,
@ -101,7 +107,8 @@ version = partial(
'-V', '--version',
dest='version',
action='store_true',
help='Show version and exit.')
help='Show version and exit.',
) # type: Any
quiet = partial(
Option,
@ -109,10 +116,12 @@ quiet = partial(
dest='quiet',
action='count',
default=0,
help=('Give less output. Option is additive, and can be used up to 3'
' times (corresponding to WARNING, ERROR, and CRITICAL logging'
' levels).')
)
help=(
'Give less output. Option is additive, and can be used up to 3'
' times (corresponding to WARNING, ERROR, and CRITICAL logging'
' levels).'
),
) # type: Any
progress_bar = partial(
Option,
@ -121,8 +130,11 @@ progress_bar = partial(
type='choice',
choices=list(BAR_TYPES.keys()),
default='on',
help='Specify type of progress to be displayed [' +
'|'.join(BAR_TYPES.keys()) + '] (default: %default)')
help=(
'Specify type of progress to be displayed [' +
'|'.join(BAR_TYPES.keys()) + '] (default: %default)'
),
) # type: Any
log = partial(
Option,
@ -130,7 +142,7 @@ log = partial(
dest="log",
metavar="path",
help="Path to a verbose appending log."
)
) # type: Any
no_input = partial(
Option,
@ -139,7 +151,8 @@ no_input = partial(
dest='no_input',
action='store_true',
default=False,
help=SUPPRESS_HELP)
help=SUPPRESS_HELP
) # type: Any
proxy = partial(
Option,
@ -147,7 +160,8 @@ proxy = partial(
dest='proxy',
type='str',
default='',
help="Specify a proxy in the form [user:passwd@]proxy.server:port.")
help="Specify a proxy in the form [user:passwd@]proxy.server:port."
) # type: Any
retries = partial(
Option,
@ -156,7 +170,8 @@ retries = partial(
type='int',
default=5,
help="Maximum number of retries each connection should attempt "
"(default %default times).")
"(default %default times).",
) # type: Any
timeout = partial(
Option,
@ -165,7 +180,8 @@ timeout = partial(
dest='timeout',
type='float',
default=15,
help='Set the socket timeout (default %default seconds).')
help='Set the socket timeout (default %default seconds).',
) # type: Any
skip_requirements_regex = partial(
Option,
@ -174,7 +190,8 @@ skip_requirements_regex = partial(
dest='skip_requirements_regex',
type='str',
default='',
help=SUPPRESS_HELP)
help=SUPPRESS_HELP,
) # type: Any
def exists_action():
@ -188,7 +205,8 @@ def exists_action():
action='append',
metavar='action',
help="Default action when a path already exists: "
"(s)witch, (i)gnore, (w)ipe, (b)ackup, (a)bort.")
"(s)witch, (i)gnore, (w)ipe, (b)ackup, (a)bort).",
)
cert = partial(
@ -197,7 +215,8 @@ cert = partial(
dest='cert',
type='str',
metavar='path',
help="Path to alternate CA bundle.")
help="Path to alternate CA bundle.",
) # type: Any
client_cert = partial(
Option,
@ -207,7 +226,8 @@ client_cert = partial(
default=None,
metavar='path',
help="Path to SSL client certificate, a single file containing the "
"private key and the certificate in PEM format.")
"private key and the certificate in PEM format.",
) # type: Any
index_url = partial(
Option,
@ -218,7 +238,8 @@ index_url = partial(
help="Base URL of Python Package Index (default %default). "
"This should point to a repository compliant with PEP 503 "
"(the simple repository API) or a local directory laid out "
"in the same format.")
"in the same format.",
) # type: Any
def extra_index_url():
@ -230,7 +251,7 @@ def extra_index_url():
default=[],
help="Extra URLs of package indexes to use in addition to "
"--index-url. Should follow the same rules as "
"--index-url."
"--index-url.",
)
@ -240,7 +261,8 @@ no_index = partial(
dest='no_index',
action='store_true',
default=False,
help='Ignore package index (only looking at --find-links URLs instead).')
help='Ignore package index (only looking at --find-links URLs instead).',
) # type: Any
def find_links():
@ -252,7 +274,8 @@ def find_links():
metavar='url',
help="If a url or path to an html file, then parse for links to "
"archives. If a local path or file:// url that's a directory, "
"then look for archives in the directory listing.")
"then look for archives in the directory listing.",
)
def trusted_host():
@ -275,7 +298,7 @@ process_dependency_links = partial(
action="store_true",
default=False,
help="Enable the processing of dependency links.",
)
) # type: Any
def constraints():
@ -286,7 +309,8 @@ def constraints():
default=[],
metavar='file',
help='Constrain versions using the given constraints file. '
'This option can be used multiple times.')
'This option can be used multiple times.'
)
def requirements():
@ -297,7 +321,8 @@ def requirements():
default=[],
metavar='file',
help='Install from the given requirements file. '
'This option can be used multiple times.')
'This option can be used multiple times.'
)
def editable():
@ -321,7 +346,7 @@ src = partial(
help='Directory to check out editable projects into. '
'The default in a virtualenv is "<venv path>/src". '
'The default for global installs is "<current dir>/src".'
)
) # type: Any
def _get_format_control(values, option):
@ -351,7 +376,8 @@ def no_binary():
"disable all binary packages, :none: to empty the set, or one or "
"more package names with commas between them. Note that some "
"packages are tricky to compile and may fail to install when "
"this option is used on them.")
"this option is used on them.",
)
def only_binary():
@ -364,7 +390,8 @@ def only_binary():
"disable all source packages, :none: to empty the set, or one or "
"more package names with commas between them. Packages without "
"binary distributions will fail to install when this option is "
"used on them.")
"used on them.",
)
cache_dir = partial(
@ -390,7 +417,8 @@ no_deps = partial(
dest='ignore_dependencies',
action='store_true',
default=False,
help="Don't install package dependencies.")
help="Don't install package dependencies)."
) # type: Any
build_dir = partial(
Option,
@ -398,14 +426,15 @@ build_dir = partial(
dest='build_dir',
metavar='dir',
help='Directory to unpack packages into and build in.'
)
) # type: Any
ignore_requires_python = partial(
Option,
'--ignore-requires-python',
dest='ignore_requires_python',
action='store_true',
help='Ignore the Requires-Python information.')
help='Ignore the Requires-Python information.'
) # type: Any
install_options = partial(
Option,
@ -417,7 +446,8 @@ install_options = partial(
"command (use like --install-option=\"--install-scripts=/usr/local/"
"bin\"). Use multiple --install-option options to pass multiple "
"options to setup.py install. If you are using an option with a "
"directory path, be sure to use absolute path.")
"directory path, be sure to use absolute path.",
) # type: Any
global_options = partial(
Option,
@ -426,14 +456,16 @@ global_options = partial(
action='append',
metavar='options',
help="Extra global options to be supplied to the setup.py "
"call before the install command.")
"call before the install command.",
) # type: Any
no_clean = partial(
Option,
'--no-clean',
action='store_true',
default=False,
help="Don't clean up build directories.")
help="Don't clean up build directories)."
) # type: Any
pre = partial(
Option,
@ -441,7 +473,8 @@ pre = partial(
action='store_true',
default=False,
help="Include pre-release and development versions. By default, "
"pip only finds stable versions.")
"pip only finds stable versions.",
) # type: Any
disable_pip_version_check = partial(
Option,
@ -450,7 +483,8 @@ disable_pip_version_check = partial(
action="store_true",
default=False,
help="Don't periodically check PyPI to determine whether a new version "
"of pip is available for download. Implied with --no-index.")
"of pip is available for download. Implied with --no-index.",
) # type: Any
# Deprecated, Remove later
@ -460,7 +494,7 @@ always_unzip = partial(
dest='always_unzip',
action='store_true',
help=SUPPRESS_HELP,
)
) # type: Any
def _merge_hash(option, opt_str, value, parser):
@ -490,7 +524,8 @@ hash = partial(
callback=_merge_hash,
type='string',
help="Verify that the package's archive matches this "
'hash before installing. Example: --hash=sha256:abcdef...')
'hash before installing. Example: --hash=sha256:abcdef...',
) # type: Any
require_hashes = partial(
@ -501,7 +536,8 @@ require_hashes = partial(
default=False,
help='Require a hash to check each requirement against, for '
'repeatable installs. This option is implied when any package in a '
'requirements file has a --hash option.')
'requirements file has a --hash option.',
) # type: Any
##########

View File

@ -3,19 +3,25 @@ Package containing all pip commands
"""
from __future__ import absolute_import
from pip.commands.completion import CompletionCommand
from pip.commands.configuration import ConfigurationCommand
from pip.commands.download import DownloadCommand
from pip.commands.freeze import FreezeCommand
from pip.commands.hash import HashCommand
from pip.commands.help import HelpCommand
from pip.commands.list import ListCommand
from pip.commands.check import CheckCommand
from pip.commands.search import SearchCommand
from pip.commands.show import ShowCommand
from pip.commands.install import InstallCommand
from pip.commands.uninstall import UninstallCommand
from pip.commands.wheel import WheelCommand
from pip._internal.commands.completion import CompletionCommand
from pip._internal.commands.configuration import ConfigurationCommand
from pip._internal.commands.download import DownloadCommand
from pip._internal.commands.freeze import FreezeCommand
from pip._internal.commands.hash import HashCommand
from pip._internal.commands.help import HelpCommand
from pip._internal.commands.list import ListCommand
from pip._internal.commands.check import CheckCommand
from pip._internal.commands.search import SearchCommand
from pip._internal.commands.show import ShowCommand
from pip._internal.commands.install import InstallCommand
from pip._internal.commands.uninstall import UninstallCommand
from pip._internal.commands.wheel import WheelCommand
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
if MYPY_CHECK_RUNNING:
from typing import List, Type
from pip._internal.basecommand import Command
commands_order = [
InstallCommand,
@ -31,7 +37,7 @@ commands_order = [
HashCommand,
CompletionCommand,
HelpCommand,
]
] # type: List[Type[Command]]
commands_dict = {c.name: c for c in commands_order}

View File

@ -1,8 +1,8 @@
import logging
from pip.basecommand import Command
from pip.operations.check import check_requirements
from pip.utils import get_installed_distributions
from pip._internal.basecommand import Command
from pip._internal.operations.check import check_requirements
from pip._internal.utils.misc import get_installed_distributions
logger = logging.getLogger(__name__)

View File

@ -3,7 +3,8 @@ from __future__ import absolute_import
import sys
import textwrap
from pip.basecommand import Command
from pip._internal.basecommand import Command
from pip._internal.utils.misc import get_prog
BASE_COMPLETION = """
# pip %(shell)s completion start%(script)s# pip %(shell)s completion end
@ -17,7 +18,7 @@ COMPLETION_SCRIPTS = {
COMP_CWORD=$COMP_CWORD \\
PIP_AUTO_COMPLETE=1 $1 ) )
}
complete -o default -F _pip_completion pip
complete -o default -F _pip_completion %(prog)s
""",
'zsh': """
function _pip_completion {
@ -28,17 +29,19 @@ COMPLETION_SCRIPTS = {
COMP_CWORD=$(( cword-1 )) \\
PIP_AUTO_COMPLETE=1 $words[1] ) )
}
compctl -K _pip_completion pip
compctl -K _pip_completion %(prog)s
""",
'fish': """
function __fish_complete_pip
set -lx COMP_WORDS (commandline -o) ""
set -lx COMP_CWORD {cword}
set -lx COMP_CWORD ( \\
math (contains -i -- (commandline -t) $COMP_WORDS)-1 \\
)
set -lx PIP_AUTO_COMPLETE 1
string split \\ -- (eval $COMP_WORDS[1])
end
complete -fa "(__fish_complete_pip)" -c pip
""".format(cword="(math (contains -i -- (commandline -t) $COMP_WORDS)-1)")
complete -fa "(__fish_complete_pip)" -c %(prog)s
""",
}
@ -80,7 +83,9 @@ class CompletionCommand(Command):
shell_options = ['--' + shell for shell in sorted(shells)]
if options.shell in shells:
script = textwrap.dedent(
COMPLETION_SCRIPTS.get(options.shell, '')
COMPLETION_SCRIPTS.get(options.shell, '') % {
'prog': get_prog(),
}
)
print(BASE_COMPLETION % {'script': script, 'shell': options.shell})
else:

View File

@ -2,12 +2,12 @@ import logging
import os
import subprocess
from pip.basecommand import Command
from pip.configuration import Configuration, kinds
from pip.exceptions import PipError
from pip.locations import venv_config_file
from pip.status_codes import ERROR, SUCCESS
from pip.utils import get_prog
from pip._internal.basecommand import Command
from pip._internal.configuration import Configuration, kinds
from pip._internal.exceptions import PipError
from pip._internal.locations import venv_config_file
from pip._internal.status_codes import ERROR, SUCCESS
from pip._internal.utils.misc import get_prog
logger = logging.getLogger(__name__)

View File

@ -3,16 +3,16 @@ from __future__ import absolute_import
import logging
import os
from pip import cmdoptions
from pip.basecommand import RequirementCommand
from pip.exceptions import CommandError
from pip.index import FormatControl
from pip.operations.prepare import RequirementPreparer
from pip.req import RequirementSet
from pip.resolve import Resolver
from pip.utils import ensure_dir, normalize_path
from pip.utils.filesystem import check_path_owner
from pip.utils.temp_dir import TempDirectory
from pip._internal import cmdoptions
from pip._internal.basecommand import RequirementCommand
from pip._internal.exceptions import CommandError
from pip._internal.index import FormatControl
from pip._internal.operations.prepare import RequirementPreparer
from pip._internal.req import RequirementSet
from pip._internal.resolve import Resolver
from pip._internal.utils.filesystem import check_path_owner
from pip._internal.utils.misc import ensure_dir, normalize_path
from pip._internal.utils.temp_dir import TempDirectory
logger = logging.getLogger(__name__)

View File

@ -2,11 +2,11 @@ from __future__ import absolute_import
import sys
import pip
from pip.basecommand import Command
from pip.cache import WheelCache
from pip.compat import stdlib_pkgs
from pip.operations.freeze import freeze
from pip._internal import index
from pip._internal.basecommand import Command
from pip._internal.cache import WheelCache
from pip._internal.compat import stdlib_pkgs
from pip._internal.operations.freeze import freeze
DEV_PKGS = {'pip', 'setuptools', 'distribute', 'wheel'}
@ -71,7 +71,7 @@ class FreezeCommand(Command):
self.parser.insert_option_group(0, self.cmd_opts)
def run(self, options, args):
format_control = pip.index.FormatControl(set(), set())
format_control = index.FormatControl(set(), set())
wheel_cache = WheelCache(options.cache_dir, format_control)
skip = set(stdlib_pkgs)
if not options.freeze_all:

View File

@ -4,10 +4,10 @@ import hashlib
import logging
import sys
from pip.basecommand import Command
from pip.status_codes import ERROR
from pip.utils import read_chunks
from pip.utils.hashes import FAVORITE_HASH, STRONG_HASHES
from pip._internal.basecommand import Command
from pip._internal.status_codes import ERROR
from pip._internal.utils.hashes import FAVORITE_HASH, STRONG_HASHES
from pip._internal.utils.misc import read_chunks
logger = logging.getLogger(__name__)

View File

@ -1,7 +1,7 @@
from __future__ import absolute_import
from pip.basecommand import SUCCESS, Command
from pip.exceptions import CommandError
from pip._internal.basecommand import SUCCESS, Command
from pip._internal.exceptions import CommandError
class HelpCommand(Command):
@ -13,7 +13,7 @@ class HelpCommand(Command):
ignore_require_venv = True
def run(self, options, args):
from pip.commands import commands_dict, get_similar_commands
from pip._internal.commands import commands_dict, get_similar_commands
try:
# 'pip help' with no args is handled by pip.__init__.parseopt()

View File

@ -6,27 +6,21 @@ import operator
import os
import shutil
from pip import cmdoptions
from pip.basecommand import RequirementCommand
from pip.cache import WheelCache
from pip.exceptions import (
from pip._internal import cmdoptions
from pip._internal.basecommand import RequirementCommand
from pip._internal.cache import WheelCache
from pip._internal.exceptions import (
CommandError, InstallationError, PreviousBuildDirError
)
from pip.locations import distutils_scheme, virtualenv_no_global
from pip.operations.prepare import RequirementPreparer
from pip.req import RequirementSet
from pip.resolve import Resolver
from pip.status_codes import ERROR
from pip.utils import ensure_dir, get_installed_version
from pip.utils.filesystem import check_path_owner
from pip.utils.temp_dir import TempDirectory
from pip.wheel import WheelBuilder
try:
import wheel
except ImportError:
wheel = None
from pip._internal.locations import distutils_scheme, virtualenv_no_global
from pip._internal.operations.prepare import RequirementPreparer
from pip._internal.req import RequirementSet
from pip._internal.resolve import Resolver
from pip._internal.status_codes import ERROR
from pip._internal.utils.filesystem import check_path_owner
from pip._internal.utils.misc import ensure_dir, get_installed_version
from pip._internal.utils.temp_dir import TempDirectory
from pip._internal.wheel import WheelBuilder
try:
import wheel
@ -135,8 +129,8 @@ class InstallCommand(RequirementCommand):
'--force-reinstall',
dest='force_reinstall',
action='store_true',
help='When upgrading, reinstall all packages even if they are '
'already up-to-date.')
help='Reinstall all packages even if they are already '
'up-to-date.')
cmd_opts.add_option(
'-I', '--ignore-installed',
@ -164,6 +158,14 @@ class InstallCommand(RequirementCommand):
help="Do not compile Python source files to bytecode",
)
cmd_opts.add_option(
"--no-warn-script-location",
action="store_false",
dest="warn_script_location",
default=True,
help="Do not warn when installing scripts outside PATH",
)
cmd_opts.add_option(cmdoptions.no_binary())
cmd_opts.add_option(cmdoptions.only_binary())
cmd_opts.add_option(cmdoptions.no_clean())
@ -291,11 +293,12 @@ class InstallCommand(RequirementCommand):
# installed from the sdist/vcs whatever.
wb.build(session=session, autobuilding=True)
requirement_set.install(
installed = requirement_set.install(
install_options,
global_options,
root=options.root_path,
prefix=options.prefix_path,
warn_script_location=options.warn_script_location,
)
possible_lib_locations = get_lib_location_guesses(
@ -305,9 +308,7 @@ class InstallCommand(RequirementCommand):
prefix=options.prefix_path,
isolated=options.isolated_mode,
)
reqs = sorted(
requirement_set.successfully_installed,
key=operator.attrgetter('name'))
reqs = sorted(installed, key=operator.attrgetter('name'))
items = []
for req in reqs:
item = req.name

Some files were not shown because too many files have changed in this diff Show More