Merge branch 'master' into python-3.7

This commit is contained in:
Pradyun Gedam 2018-07-24 23:08:09 +05:30 committed by GitHub
commit b3473b329d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
213 changed files with 4173 additions and 3278 deletions

View File

@ -6,5 +6,5 @@ require either a news file fragment or a marker to indicate they don't require
one.
To read more about adding a news file fragment for your PR, please check out
our documentation at: https://pip.pypa.io/en/latest/development/#adding-a-news-entry
our documentation at: https://pip.pypa.io/en/latest/development/contributing/#adding-a-news-entry
-->

View File

@ -1,16 +1,20 @@
Adam Chainz <adam@adamj.eu>
Adam Wentz <awentz@theonion.com>
Adrien Morison <adrien.morison@gmail.com>
Alan Yee <alyee@ucsd.edu>
Aleks Bunin <github@compuix.com>
Alex Gaynor <alex.gaynor@gmail.com>
Alex Grönholm <alex.gronholm@nextday.fi>
Alex Morega <alex@grep.ro>
Alex Stachowiak <alexander@computer.org>
Alexander Shtyrov <rawzausho@gmail.com>
Alexandre Conrad <alexandre.conrad@gmail.com>
Alli <alzeih@users.noreply.github.com>
Anatoly Techtonik <techtonik@gmail.com>
Andrei Geacar <andrei.geacar@gmail.com>
Andrey Bulgakov <mail@andreiko.ru>
Andrés Delfino <34587441+andresdelfino@users.noreply.github.com>
Andrés Delfino <adelfino@gmail.com>
Andy Freeland <andy.freeland@redjack.com>
Andy Kluger <AndydeCleyre@users.noreply.github.com>
Anish Tambe <anish.tambe@yahoo.in>
@ -33,6 +37,7 @@ Atsushi Odagiri <aodagx@gmail.com>
Avner Cohen <israbirding@gmail.com>
Baptiste Mispelon <bmispelon@gmail.com>
Bartek Ogryczak <b.ogryczak@gmail.com>
Bastian Venthur <mail@venthur.de>
Ben Darnell <ben@bendarnell.com>
Ben Hoyt <benhoyt@gmail.com>
Ben Rosser <rosser.bjr@gmail.com>
@ -45,6 +50,7 @@ Bogdan Opanchuk <bogdan@opanchuk.net>
Brad Erickson <eosrei@gmail.com>
Bradley Ayers <bradley.ayers@gmail.com>
Brandon L. Reiss <brandon@damyata.co>
Brett Randall <javabrett@gmail.com>
Brian Rosner <brosner@gmail.com>
BrownTruck <BrownTruck@users.noreply.github.com>
Bruno Oliveira <nicoddemus@gmail.com>
@ -81,6 +87,7 @@ Craig Kerstiens <craig.kerstiens@gmail.com>
Cristian Sorinel <cristian.sorinel@gmail.com>
Curtis Doty <Curtis@GreenKey.net>
Damian Quiroga <qdamian@gmail.com>
Dan Black <dyspop@gmail.com>
Dan Savilonis <djs@n-cube.org>
Dan Sully <daniel-github@electricrain.com>
daniel <mcdonaldd@unimelb.edu.au>
@ -88,7 +95,9 @@ Daniel Collins <accounts@dac.io>
Daniel Hahler <git@thequod.de>
Daniel Holth <dholth@fastmail.fm>
Daniel Jost <torpedojost@gmail.com>
Daniel Shaulov <daniel.shaulov@gmail.com>
Daniele Procida <daniele@vurt.org>
Danny Hermes <daniel.j.hermes@gmail.com>
Dav Clark <davclark@gmail.com>
Dave Abrahams <dave@boostpro.com>
David Aguilar <davvid@gmail.com>
@ -106,10 +115,12 @@ Domen Kožar <domen@dev.si>
Donald Stufft <donald@stufft.io>
Dongweiming <dongweiming@admaster.com.cn>
Douglas Thor <dougthor42@users.noreply.github.com>
DrFeathers <WilliamGeorgeBurgess@gmail.com>
Dustin Ingram <di@di.codes>
Dwayne Bailey <dwayne@translate.org.za>
Ed Morley <501702+edmorley@users.noreply.github.com>
Ed Morley <emorley@mozilla.com>
Eli Schwartz <eschwartz93@gmail.com>
Emil Styrke <emil.styrke@gmail.com>
Endoh Takanao <djmchl@gmail.com>
enoch <lanxenet@gmail.com>
@ -118,6 +129,8 @@ Eric Hanchrow <eric.hanchrow@gmail.com>
Eric Hopper <hopper@omnifarious.org>
Erik M. Bray <embray@stsci.edu>
Erik Rose <erik@mozilla.com>
Ernest W Durbin III <ewdurbin@gmail.com>
Ernest W. Durbin III <ewdurbin@gmail.com>
Erwin Janssen <erwinjanssen@outlook.com>
Eugene Vereshchagin <evvers@gmail.com>
fiber-space <fiber-space@users.noreply.github.com>
@ -135,6 +148,7 @@ George Song <george@55minutes.com>
Georgi Valkov <georgi.t.valkov@gmail.com>
Giftlin Rajaiah <giftlin.rgn@gmail.com>
gizmoguy1 <gizmoguy1@gmail.com>
gkdoc <40815324+gkdoc@users.noreply.github.com>
GOTO Hayato <3532528+gh640@users.noreply.github.com>
Guilherme Espada <porcariadagata@gmail.com>
Guy Rozendorn <guy@rzn.co.il>
@ -171,6 +185,7 @@ Jay Graves <jay@skabber.com>
Jeff Barber <jbarber@computer.org>
Jeff Dairiki <dairiki@dairiki.org>
Jeremy Stanley <fungi@yuggoth.org>
Jeremy Zafran <jzafran@users.noreply.github.com>
Jim Garrison <jim@garrison.cc>
Jivan Amara <Development@JivanAmara.net>
John-Scott Atlakson <john.scott.atlakson@gmail.com>
@ -190,6 +205,7 @@ jwg4 <jack.grahl@yahoo.co.uk>
Jyrki Pulliainen <jyrki@spotify.com>
Kamal Bin Mustafa <kamal@smach.net>
kaustav haldar <hi@kaustav.me>
keanemind <keanemind@gmail.com>
Kelsey Hightower <kelsey.hightower@gmail.com>
Kenneth Belitzky <kenny@belitzky.com>
Kenneth Reitz <me@kennethreitz.com>
@ -197,6 +213,7 @@ Kenneth Reitz <me@kennethreitz.org>
Kevin Burke <kev@inburke.com>
Kevin Carter <kevin.carter@rackspace.com>
Kevin Frommelt <kevin.frommelt@webfilings.com>
Kexuan Sun <me@kianasun.com>
Kit Randel <kit@nocturne.net.nz>
kpinc <kop@meme.com>
Kumar McMillan <kumar.mcmillan@gmail.com>
@ -246,16 +263,19 @@ Miguel Araujo Perez <miguel.araujo.perez@gmail.com>
Mihir Singh <git.service@mihirsingh.com>
Min RK <benjaminrk@gmail.com>
MinRK <benjaminrk@gmail.com>
Miro Hrončok <miro@hroncok.cz>
montefra <franz.bergesund@gmail.com>
Monty Taylor <mordred@inaugust.com>
Nate Coraor <nate@bx.psu.edu>
Nathaniel J. Smith <njs@pobox.com>
Nehal J Wani <nehaljw.kkd1@gmail.com>
Nick Coghlan <ncoghlan@gmail.com>
Nick Stenning <nick@whiteink.com>
Nikhil Benesch <nikhil.benesch@gmail.com>
Nowell Strite <nowell@strite.org>
nvdv <modestdev@gmail.com>
Ofekmeister <ofekmeister@gmail.com>
Oliver Jeeves <oliver.jeeves@ocado.com>
Oliver Tonnhofer <olt@bogosoft.com>
Olivier Girardot <ssaboum@gmail.com>
Olivier Grisel <olivier.grisel@ensta.org>
@ -281,6 +301,7 @@ Phaneendra Chiruvella <hi@pcx.io>
Phil Freo <phil@philfreo.com>
Phil Pennock <phil@pennock-tech.com>
Phil Whelan <phil123@gmail.com>
Philip Molloy <pamolloy@users.noreply.github.com>
Philippe Ombredanne <pombredanne@gmail.com>
Pi Delport <pjdelport@gmail.com>
Pierre-Yves Rofes <github@rofes.fr>
@ -323,6 +344,7 @@ Sebastian Schaetz <sschaetz@butterflynetinc.com>
Segev Finer <segev208@gmail.com>
Sergey Vasilyev <nolar@nolar.info>
Seth Woodworth <seth@sethish.com>
Shlomi Fish <shlomif@shlomifish.org>
Simeon Visser <svisser@users.noreply.github.com>
Simon Cross <hodgestar@gmail.com>
Simon Pichugin <simon.pichugin@gmail.com>
@ -340,6 +362,7 @@ Stéphane Bidoul (ACSONE) <stephane.bidoul@acsone.eu>
Stéphane Bidoul <stephane.bidoul@acsone.eu>
Stéphane Klein <contact@stephane-klein.info>
Takayuki SHIMIZUKAWA <shimizukawa@gmail.com>
Thijs Triemstra <info@collab.nl>
Thomas Fenzl <thomas.fenzl@gmail.com>
Thomas Grainger <tagrain@gmail.com>
Thomas Guettler <tguettler@tbz-pariv.de>
@ -351,6 +374,7 @@ Tim Harder <radhermit@gmail.com>
Tim Heap <tim@timheap.me>
tim smith <github@tim-smith.us>
tinruufu <tinruufu@gmail.com>
Tom Freudenheim <tom.freudenheim@onepeloton.com>
Tom V <tom@viner.tv>
Tomer Chachamu <tomer.chachamu@gmail.com>
Tony Zhaocheng Tan <tony@tonytan.io>
@ -380,4 +404,5 @@ Zearin <zearin@gonk.net>
Zearin <Zearin@users.noreply.github.com>
Zhiping Deng <kofreestyler@gmail.com>
Zvezdan Petkovic <zpetkovic@acm.org>
Łukasz Langa <lukasz@langa.pl>
Семён Марьясин <simeon@maryasin.name>

View File

@ -7,6 +7,83 @@
.. towncrier release notes start
18.0 (2018-07-22)
=================
Process
-------
- Switch to a Calendar based versioning scheme.
- Formally document our deprecation process as a minimum of 6 months of deprecation
warnings.
- Adopt and document NEWS fragment writing style.
- Switch to releasing a new, non bug fix version of pip every 3 months.
Deprecations and Removals
-------------------------
- Remove the legacy format from pip list. (#3651, #3654)
- Dropped support for Python 3.3. (#3796)
- Remove support for cleaning up #egg fragment postfixes. (#4174)
- Remove the shim for the old get-pip.py location. (#5520)
For the past 2 years, it's only been redirecting users to use the newer
https://bootstrap.pypa.io/get-pip.py location.
Features
--------
- Introduce a new --prefer-binary flag, to prefer older wheels over newer source packages. (#3785)
- Improve autocompletion function on file name completion after options
which have ``<file>``, ``<dir>`` or ``<path>`` as metavar. (#4842, #5125)
- Add support for installing PEP 518 build dependencies from source. (#5229)
- Improve status message when upgrade is skipped due to only-if-needed strategy. (#5319)
Bug Fixes
---------
- Update pip's self-check logic to not use a virtualenv specific file and honor cache-dir. (#3905)
- Remove compiled pyo files for wheel packages. (#4471)
- Speed up printing of newly installed package versions. (#5127)
- Restrict install time dependency warnings to directly-dependant packages. (#5196, #5457)
Warning about the entire package set has resulted in users getting confused as
to why pip is printing these warnings.
- Improve handling of PEP 518 build requirements: support environment markers and extras. (#5230, #5265)
- Remove username/password from log message when using index with basic auth. (#5249)
- Remove trailing os.sep from PATH directories to avoid false negatives. (#5293)
- Fix "pip wheel pip" being blocked by the "don't use pip to modify itself" check. (#5311, #5312)
- Disable pip's version check (and upgrade message) when installed by a different package manager. (#5346)
This works better with Linux distributions where pip's upgrade message may
result in users running pip in a manner that modifies files that should be
managed by the OS's package manager.
- Check for file existence and unlink first when clobbering existing files during a wheel install. (#5366)
- Improve error message to be more specific when no files are found as listed in as listed in PKG-INFO. (#5381)
- Always read ``pyproject.toml`` as UTF-8. This fixes Unicode handling on Windows and Python 2. (#5482)
- Fix a crash that occurs when PATH not set, while generating script location warning. (#5558)
- Disallow packages with ``pyproject.toml`` files that have an empty build-system table. (#5627)
Vendored Libraries
------------------
- Update CacheControl to 0.12.5.
- Update certifi to 2018.4.16.
- Update distro to 1.3.0.
- Update idna to 2.7.
- Update ipaddress to 1.0.22.
- Update pkg_resources to 39.2.0 (via setuptools).
- Update progress to 1.4.
- Update pytoml to 0.1.16.
- Update requests to 2.19.1.
- Update urllib3 to 1.23.
Improved Documentation
----------------------
- Document how to use pip with a proxy server. (#512, #5574)
- Document that the output of pip show is in RFC-compliant mail header format. (#5261)
10.0.1 (2018-04-19)
===================

View File

@ -28,10 +28,37 @@ cache:
- '%LOCALAPPDATA%\pip\Cache'
test_script:
# Shorten paths, workaround https://bugs.python.org/issue18199
- "subst T: %TEMP%"
- "set TEMP=T:\\"
- "set TMP=T:\\"
- "tox -e py -- -m unit -n 3"
- "if \"%RUN_INTEGRATION_TESTS%\" == \"True\" (
tox -e py -- -m integration -n 3 --duration=5 )"
- ps: |
function should_run_tests {
if ("$env:APPVEYOR_PULL_REQUEST_NUMBER" -eq "") {
Write-Host "Not a pull request - running tests"
return $true
}
Write-Host "Pull request $env:APPVEYOR_PULL_REQUEST_NUMBER based on branch $env:APPVEYOR_REPO_BRANCH"
git fetch -q origin +refs/heads/$env:APPVEYOR_REPO_BRANCH
$changes = (git diff --name-only HEAD (git merge-base HEAD FETCH_HEAD))
Write-Host "Files changed:"
Write-Host $changes
$important = $changes | Where-Object { $_ -NotLike "*.rst" } |
Where-Object { $_ -NotLike "docs*" } |
Where-Object { $_ -NotLike "news*" } |
Where-Object { $_ -NotLike ".github*" }
if (!$important) {
Write-Host "Only documentation changes - skipping tests"
return $false
}
Write-Host "Pull request $env:APPVEYOR_PULL_REQUEST_NUMBER alters code - running tests"
return $true
}
if (should_run_tests) {
# Shorten paths, workaround https://bugs.python.org/issue18199
subst T: $env:TEMP
$env:TEMP = "T:\"
$env:TMP = "T:\"
tox -e py -- -m unit -n 3
if ($env:RUN_INTEGRATION_TESTS -eq "True") {
tox -e py -- -m integration -n 3 --duration=5
}
}

View File

@ -1,177 +0,0 @@
# Makefile for Sphinx documentation
#
# You can set these variables from the command line.
SPHINXOPTS =
SPHINXBUILD = sphinx-build
PAPER =
BUILDDIR = build
# User-friendly check for sphinx-build
ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
endif
# Internal variables.
PAPEROPT_a4 = -D latex_paper_size=a4
PAPEROPT_letter = -D latex_paper_size=letter
ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
# the i18n builder cannot share the environment and doctrees with the others
I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
help:
@echo "Please use \`make <target>' where <target> is one of"
@echo " html to make standalone HTML files"
@echo " dirhtml to make HTML files named index.html in directories"
@echo " singlehtml to make a single large HTML file"
@echo " pickle to make pickle files"
@echo " json to make JSON files"
@echo " htmlhelp to make HTML files and a HTML help project"
@echo " qthelp to make HTML files and a qthelp project"
@echo " devhelp to make HTML files and a Devhelp project"
@echo " epub to make an epub"
@echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
@echo " latexpdf to make LaTeX files and run them through pdflatex"
@echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
@echo " text to make text files"
@echo " man to make manual pages"
@echo " texinfo to make Texinfo files"
@echo " info to make Texinfo files and run them through makeinfo"
@echo " gettext to make PO message catalogs"
@echo " changes to make an overview of all changed/added/deprecated items"
@echo " xml to make Docutils-native XML files"
@echo " pseudoxml to make pseudoxml-XML files for display purposes"
@echo " linkcheck to check all external links for integrity"
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
clean:
rm -rf $(BUILDDIR)/*
html:
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
dirhtml:
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
singlehtml:
$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
@echo
@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
pickle:
$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
@echo
@echo "Build finished; now you can process the pickle files."
json:
$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
@echo
@echo "Build finished; now you can process the JSON files."
htmlhelp:
$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
@echo
@echo "Build finished; now you can run HTML Help Workshop with the" \
".hhp project file in $(BUILDDIR)/htmlhelp."
qthelp:
$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
@echo
@echo "Build finished; now you can run "qcollectiongenerator" with the" \
".qhcp project file in $(BUILDDIR)/qthelp, like this:"
@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/pip-installer.qhcp"
@echo "To view the help file:"
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/pip-installer.qhc"
devhelp:
$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
@echo
@echo "Build finished."
@echo "To view the help file:"
@echo "# mkdir -p $$HOME/.local/share/devhelp/pip-installer"
@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/pip-installer"
@echo "# devhelp"
epub:
$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
@echo
@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
latex:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo
@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
@echo "Run \`make' in that directory to run these through (pdf)latex" \
"(use \`make latexpdf' here to do that automatically)."
latexpdf:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo "Running LaTeX files through pdflatex..."
$(MAKE) -C $(BUILDDIR)/latex all-pdf
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
latexpdfja:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo "Running LaTeX files through platex and dvipdfmx..."
$(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
text:
$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
@echo
@echo "Build finished. The text files are in $(BUILDDIR)/text."
man:
$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
@echo
@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
texinfo:
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
@echo
@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
@echo "Run \`make' in that directory to run these through makeinfo" \
"(use \`make info' here to do that automatically)."
info:
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
@echo "Running Texinfo files through makeinfo..."
make -C $(BUILDDIR)/texinfo info
@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
gettext:
$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
@echo
@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
changes:
$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
@echo
@echo "The overview file is in $(BUILDDIR)/changes."
linkcheck:
$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
@echo
@echo "Link check complete; look for any errors in the above output " \
"or in $(BUILDDIR)/linkcheck/output.txt."
doctest:
$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
@echo "Testing of doctests in the sources finished, look at the " \
"results in $(BUILDDIR)/doctest/output.txt."
xml:
$(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
@echo
@echo "Build finished. The XML files are in $(BUILDDIR)/xml."
pseudoxml:
$(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
@echo
@echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."

View File

@ -11,10 +11,10 @@
# All configuration values have a default; values that are commented out
# serve to show the default.
import glob
import os
import re
import sys
import glob
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
@ -127,6 +127,7 @@ pygments_style = 'sphinx'
extlinks = {
'issue': ('https://github.com/pypa/pip/issues/%s', '#'),
'pull': ('https://github.com/pypa/pip/pull/%s', 'PR #'),
'pypi': ('https://pypi.org/project/%s', ''),
}
# -- Options for HTML output --------------------------------------------------

View File

@ -1,265 +0,0 @@
===========
Development
===========
Pull Requests
=============
- Submit Pull Requests against the `master` branch.
- Provide a good description of what you're doing and why.
- Provide tests that cover your changes and try to run the tests locally first.
**Example**. Assuming you set up GitHub account, forked pip repository from
https://github.com/pypa/pip to your own page via web interface, and your
fork is located at https://github.com/yourname/pip
::
$ git clone git@github.com:pypa/pip.git
$ cd pip
# ...
$ git diff
$ git add <modified> ...
$ git status
$ git commit
You may reference relevant issues in commit messages (like #1259) to
make GitHub link issues and commits together, and with phrase like
"fixes #1259" you can even close relevant issues automatically. Now
push the changes to your fork::
$ git push git@github.com:yourname/pip.git
Open Pull Requests page at https://github.com/yourname/pip/pulls and
click "New pull request" and select your fork. That's it.
Pull requests should be self-contained, and limited in scope. Before being
merged, a pull request must be reviewed, and keeping individual PRs limited
in scope makes this far easier. In particular, pull requests must not be
treated as "feature branches", with ongoing development work happening
within the PR. Instead, the feature should be broken up into smaller,
independent parts which can be reviewed and merged individually.
When creating a pull request, avoid including "cosmetic" changes to
code that is unrelated to your change, as these make reviewing the PR
more difficult. Examples include re-flowing text in comments or
documentation, or addition or removal of blank lines or whitespace
within lines. Such changes can be made separately, as a "formatting
cleanup" PR, if needed.
Automated Testing
=================
All pull requests and merges to 'master' branch are tested using `Travis CI`_
and `Appveyor CI`_ based on our `.travis.yml`_ and `appveyor.yml`_ files.
You can find the status and results to the CI runs for your PR on GitHub's Web
UI for the pull request. You can also find links to the CI services' pages for
the specific builds in the form of "Details" links, in case the CI run fails
and you wish to view the output.
To trigger CI to run again for a pull request, you can close and open the pull
request or submit another change to the pull request. If needed, project
maintainers can manually trigger a restart of a job/build.
Running tests
=============
OS Requirements: subversion, bazaar, git, and mercurial.
Python Requirements: tox or install all packages listed in
`tools/test-requirements.txt`_
Ways to run the tests locally::
$ tox -e py36 # The preferred way to run the tests, can use pyNN to
# run for a particular version or leave off the -e to
# run for all versions.
$ python setup.py test # Using the setuptools test plugin
$ py.test # Using py.test directly
$ tox # Using tox against pip's tox.ini
If you are missing one of the VCS tools, you can tell ``py.test`` to skip it::
# When using tox
$ tox -e py36 -- -k 'not svn'
$ tox -e py36 -- -k 'not (svn or git)'
# Directly with py.test
$ py.test -k 'not svn'
$ py.test -k 'not (svn or git)'
Getting Involved
================
The pip project welcomes help in the following ways:
- Making Pull Requests for code, tests, or docs.
- Commenting on open issues and pull requests.
- Helping to answer questions on the `mailing list`_.
If you want to become an official maintainer, start by helping out.
Later, when you think you're ready, get in touch with one of the maintainers,
and they will initiate a vote.
Adding a NEWS Entry
===================
The ``NEWS.rst`` file is managed using `towncrier`_ and all non trivial changes
must be accompanied by a news entry.
To add an entry to the news file, first you need to have created an issue
describing the change you want to make. A Pull Request itself *may* function as
such, but it is preferred to have a dedicated issue (for example, in case the
PR ends up rejected due to code quality reasons).
Once you have an issue or pull request, you take the number and you create a
file inside of the ``news/`` directory named after that issue number with an
extension of ``removal``, ``feature``, ``bugfix``, or ``doc``. Thus if your
issue or PR number is ``1234`` and this change is fixing a bug, then you would
create a file ``news/1234.bugfix``. PRs can span multiple categories by creating
multiple files (for instance, if you added a feature and deprecated/removed the
old feature at the same time, you would create ``news/NNNN.feature`` and
``news/NNNN.removal``). Likewise if a PR touches multiple issues/PRs you may
create a file for each of them with the exact same contents and Towncrier will
deduplicate them.
The contents of this file are reStructuredText formatted text that will be used
as the content of the news file entry. You do not need to reference the issue
or PR numbers here as towncrier will automatically add a reference to all of
the affected issues when rendering the news file.
In order to maintain a consistent style in the ``NEWS.rst`` file, it is
preferred to keep the news entry to the point, in sentence case, shorter than
80 characters and in an imperative tone -- an entry should complete the sentence
"This change will ...". In rare cases, where one line is not enough, use a
summary line in an imperative tone followed by a blank line separating it
from a description of the feature/change in one or more paragraphs, each wrapped
at 80 characters. Remember that a news entry is meant for end users and should
only contain details relevant to an end user.
A trivial change is anything that does not warrant an entry in the news file.
Some examples are: Code refactors that don't change anything as far as the
public is concerned, typo fixes, white space modification, etc. To mark a PR
as trivial a contributor simply needs to add a randomly named, empty file to
the ``news/`` directory with the extension of ``.trivial``. If you are on a
POSIX like operating system, one can be added by running
``touch news/$(uuidgen).trivial``. On Windows, the same result can be achieved
in Powershell using ``New-Item "news/$([guid]::NewGuid()).trivial"``. Core
committers may also add a "trivial" label to the PR which will accomplish the
same thing.
Upgrading, removing, or adding a new vendored library gets a special mention
using a ``news/<library>.vendor`` file. This is in addition to any features,
bugfixes, or other kinds of news that pulling in this library may have. This
uses the library name as the key so that updating the same library twice doesn't
produce two news file entries.
Changes to the processes, policies, or other non code related changed that are
otherwise notable can be done using a ``news/<name>.process`` file. This is not
typically used, but can be used for things like changing version schemes,
updating deprecation policy, etc.
Release Cadence
===============
The pip project has a release cadence of releasing whatever is on ``master``
every 3 months. This gives users a predictable pattern for when releases
are going to happen and prevents locking up improvements for fixes for long
periods of time, while still preventing massively fracturing the user base
with version numbers.
Our release months are January, April, July, October. The release date within
that month will be up to the release manager for that release. If there are
no changes, then that release month is skipped and the next release will be
3 month later.
The release manager may, at their discretion, choose whether or not there
will be a pre-release period for a release, and if there is may extend that
period into the next month if needed.
Because releases are made direct from the ``master`` branch, it is essential
that ``master`` is always in a releasable state. It is acceptable to merge
PRs that partially implement a new feature, but only if the partially
implemented version is usable in that state (for example, with reduced
functionality or disabled by default). In the case where a merged PR is found
to need extra work before being released, the release manager always has the
option to back out the partial change prior to a release. The PR can then be
reworked and resubmitted for the next release.
Deprecation Policy
==================
Any change to pip that removes or significantly alters user-visible behaviour
that is described in the pip documentation will be deprecated for a minimum of
one released version before the change occurs. Deprecation will take the form of
a warning being issued by pip when the feature is used. Longer deprecation
periods, or deprecation warnings for behaviour changes that would not normally
be covered by this policy, are also possible depending on circumstances, but
this is at the discretion of the pip developers.
Note that the documentation is the sole reference for what counts as agreed
behaviour. If something isn't explicitly mentioned in the documentation, it can
be changed without warning, or any deprecation period, in a pip release.
However, we are aware that the documentation isn't always complete - PRs that
document existing behaviour with the intention of covering that behaviour with
the above deprecation process are always acceptable, and will be considered on
their merits.
Release Process
===============
#. On the current pip ``master`` branch, generate a new ``AUTHORS.txt`` by
running ``invoke generate.authors`` and commit the results.
#. On the current pip ``master`` branch, make a new commit which bumps the
version in ``pip/__init__.py`` to the release version and adjust the
``CHANGES.txt`` file to reflect the current date. The release version should
follow a YY.N scheme, where YY is the two digit year, and N is the Nth
release within that year.
#. On the current pip ``master`` branch, generate a new ``NEWS.rst`` by running
``invoke generate.news`` and commit the results.
#. Create a signed tag of the ``master`` branch of the form ``X.Y.Z`` using the
command ``git tag -s X.Y.Z``.
#. Checkout the tag using ``git checkout X.Y.Z`` and create the distribution
files using ``python setup.py sdist bdist_wheel``.
#. Upload the distribution files to PyPI using twine
(``twine upload -s dist/*``). The upload should include GPG signatures of
the distribution files.
#. Push all of the changes.
#. Regenerate the ``get-pip.py`` script by running
``invoke generate.installer`` in the get-pip repository, and committing the
results.
Creating a Bugfix Release
=========================
Sometimes we need to release a bugfix release of the form ``X.Y.Z+1``. In order
to create one of these the changes should already be merged into the
``master`` branch.
#. Create a new ``release/YY.N.Z+1`` branch off of the ``YY.N`` tag using the
command ``git checkout -b release/YY.N.Z+1 YY.N``.
#. Cherry pick the fixed commits off of the ``master`` branch, fixing any
conflicts and moving any changelog entries from the development version's
changelog section to the ``YY.N.Z+1`` section.
#. Push the ``release/YY.N.Z+1`` branch to github and submit a PR for it against
the ``master`` branch and wait for the tests to run.
#. Once tests run, merge the ``release/YY.N.Z+1`` branch into master, and follow
the above release process starting with step 4.
.. _`mailing list`: https://mail.python.org/mailman/listinfo/distutils-sig
.. _`towncrier`: https://pypi.org/project/towncrier/
.. _`Travis CI`: https://travis-ci.org/
.. _`Appveyor CI`: https://www.appveyor.com/
.. _`.travis.yml`: https://github.com/pypa/pip/blob/master/.travis.yml
.. _`appveyor.yml`: https://github.com/pypa/pip/blob/master/appveyor.yml
.. _`Travis CI Pull Requests`: https://travis-ci.org/pypa/pip/pull_requests
.. _`tools/test-requirements.txt`: https://github.com/pypa/pip/blob/master/tools/test-requirements.txt

View File

@ -0,0 +1,249 @@
============
Contributing
============
.. todo
Create a "guide" to pip's internals and link to it from here saying
"you might want to take a look at the guide"
Submitting Pull Requests
========================
Submit pull requests against the ``master`` branch, providing a good
description of what you're doing and why. You must have legal permission to
distribute any code you contribute to pip and it must be available under the
MIT License.
Provide tests that cover your changes and run the tests locally first. pip
:ref:`supports <compatibility-requirements>` multiple Python versions and
operating systems. Any pull request must consider and work on all these
platforms.
Pull Requests should be small to facilitate easier review. Keep them
self-contained, and limited in scope. `Studies have shown`_ that review quality
falls off as patch size grows. Sometimes this will result in many small PRs to
land a single large feature. In particular, pull requests must not be treated
as "feature branches", with ongoing development work happening within the PR.
Instead, the feature should be broken up into smaller, independent parts which
can be reviewed and merged individually.
Additionally, avoid including "cosmetic" changes to code that
is unrelated to your change, as these make reviewing the PR more difficult.
Examples include re-flowing text in comments or documentation, or addition or
removal of blank lines or whitespace within lines. Such changes can be made
separately, as a "formatting cleanup" PR, if needed.
Automated Testing
=================
All pull requests and merges to 'master' branch are tested using `Travis CI`_
and `Appveyor CI`_ based on our `.travis.yml`_ and `appveyor.yml`_ files.
You can find the status and results to the CI runs for your PR on GitHub's Web
UI for the pull request. You can also find links to the CI services' pages for
the specific builds in the form of "Details" links, in case the CI run fails
and you wish to view the output.
To trigger CI to run again for a pull request, you can close and open the pull
request or submit another change to the pull request. If needed, project
maintainers can manually trigger a restart of a job/build.
NEWS Entries
============
The ``NEWS.rst`` file is managed using `towncrier`_ and all non trivial changes
must be accompanied by a news entry.
To add an entry to the news file, first you need to have created an issue
describing the change you want to make. A Pull Request itself *may* function as
such, but it is preferred to have a dedicated issue (for example, in case the
PR ends up rejected due to code quality reasons).
Once you have an issue or pull request, you take the number and you create a
file inside of the ``news/`` directory named after that issue number with an
extension of ``removal``, ``feature``, ``bugfix``, or ``doc``. Thus if your
issue or PR number is ``1234`` and this change is fixing a bug, then you would
create a file ``news/1234.bugfix``. PRs can span multiple categories by creating
multiple files (for instance, if you added a feature and deprecated/removed the
old feature at the same time, you would create ``news/NNNN.feature`` and
``news/NNNN.removal``). Likewise if a PR touches multiple issues/PRs you may
create a file for each of them with the exact same contents and Towncrier will
deduplicate them.
Contents of a NEWS entry
------------------------
The contents of this file are reStructuredText formatted text that will be used
as the content of the news file entry. You do not need to reference the issue
or PR numbers here as towncrier will automatically add a reference to all of
the affected issues when rendering the news file.
In order to maintain a consistent style in the ``NEWS.rst`` file, it is
preferred to keep the news entry to the point, in sentence case, shorter than
80 characters and in an imperative tone -- an entry should complete the sentence
"This change will ...". In rare cases, where one line is not enough, use a
summary line in an imperative tone followed by a blank line separating it
from a description of the feature/change in one or more paragraphs, each wrapped
at 80 characters. Remember that a news entry is meant for end users and should
only contain details relevant to an end user.
Choosing the type of NEWS entry
-------------------------------
A trivial change is anything that does not warrant an entry in the news file.
Some examples are: Code refactors that don't change anything as far as the
public is concerned, typo fixes, white space modification, etc. To mark a PR
as trivial a contributor simply needs to add a randomly named, empty file to
the ``news/`` directory with the extension of ``.trivial``. If you are on a
POSIX like operating system, one can be added by running
``touch news/$(uuidgen).trivial``. On Windows, the same result can be achieved
in Powershell using ``New-Item "news/$([guid]::NewGuid()).trivial"``. Core
committers may also add a "trivial" label to the PR which will accomplish the
same thing.
Upgrading, removing, or adding a new vendored library gets a special mention
using a ``news/<library>.vendor`` file. This is in addition to any features,
bugfixes, or other kinds of news that pulling in this library may have. This
uses the library name as the key so that updating the same library twice doesn't
produce two news file entries.
Changes to the processes, policies, or other non code related changed that are
otherwise notable can be done using a ``news/<name>.process`` file. This is not
typically used, but can be used for things like changing version schemes,
updating deprecation policy, etc.
Updating your branch
====================
As you work, you might need to update your local master branch up-to-date with
the ``master`` branch in the main pip repository, which moves forward as the
maintainers merge pull requests. Most people working on the project use the
following workflow.
This assumes that you have Git configured so that when you run the following
command:
.. code-block:: console
git remote -v
Your output looks like this:
.. code-block:: console
origin https://github.com/USERNAME/pip.git (fetch)
origin https://github.com/USERNAME/pip.git (push)
upstream https://github.com/pypa/pip.git (fetch)
upstream https://github.com/pypa/pip.git (push)
In the example above, ``USERNAME`` is your username on GitHub.
First, fetch the latest changes from the main pip repository, ``upstream``:
.. code-block:: console
git fetch upstream
Then, check out your local ``master`` branch, and rebase the changes on top of
it:
.. code-block:: console
git checkout master
git rebase upstream/master
At this point, you might have to `resolve merge conflicts`_. Once this is done,
push the updates you have just made to your local ``master`` branch to your
``origin`` repository on GitHub:
.. code-block:: console
git checkout master
git push origin master
Now your local ``master`` branch and the ``master`` branch in your ``origin``
repo have been updated with the most recent changes from the main pip
repository.
To keep your branches updated, the process is similar:
.. code-block:: console
git checkout awesome-feature
git fetch upstream
git rebase upstream/master
Now your branch has been updated with the latest changes from the
``master`` branch on the upstream pip repository.
It's good practice to back up your branches by pushing them to your
``origin`` on GitHub as you are working on them. To push a branch,
run this command:
.. code-block:: console
git push origin awesome-feature
In this example, ``<awesome-feature>`` is the name of your branch. This
will push the branch you are working on to GitHub, but will not
create a PR.
Once you have pushed your branch to your ``origin``, if you need to
update it again, you will have to force push your changes by running the
following command:
.. code-block:: console
git push -f origin awesome-feature
The ``-f`` (or ``--force``) flag after ``push`` forces updates from your local
branch to update your ``origin`` branch. If you have a PR open on your
branch, force pushing will update your PR. (This is a useful command
when someone requests changes on a PR.)
If you get an error message like this:
.. code-block:: console
! [rejected] awesome-feature -> awesome-feature (non-fast-forward)
error: failed to push some refs to 'https://github.com/USERNAME/pip.git'
hint: Updates were rejected because the tip of your current branch is behind
hint: its remote counterpart. Integrate the remote changes (e.g.
hint: 'git pull ...') before pushing again.
hint: See the 'Note about fast-forwards' in 'git push --help' for details.
Try force-pushing your branch with ``push -f``.
The ``master`` branch in the main pip repository gets updated frequently, so
you might have to update your branch at least once while you are working on it.
Becoming a maintainer
=====================
If you want to become an official maintainer, start by helping out.
Later, when you think you're ready, get in touch with one of the maintainers
and they will initiate a vote.
.. note::
Upon becoming a maintainer, a person should be given access to various
pip-related tooling across multiple platforms. These are noted here for
future reference by the maintainers:
- GitHub Push Access
- PyPI Publishing Access
- CI Administration capabilities
- ReadTheDocs Administration capabilities
.. _`Studies have shown`: https://smartbear.com/smartbear/media/pdfs/wp-cc-11-best-practices-of-peer-code-review.pdf
.. _`resolve merge conflicts`: https://help.github.com/articles/resolving-a-merge-conflict-using-the-command-line/
.. _`Travis CI`: https://travis-ci.org/
.. _`Appveyor CI`: https://www.appveyor.com/
.. _`.travis.yml`: https://github.com/pypa/pip/blob/master/.travis.yml
.. _`appveyor.yml`: https://github.com/pypa/pip/blob/master/appveyor.yml
.. _`towncrier`: https://pypi.org/project/towncrier/

View File

@ -0,0 +1,110 @@
===============
Getting Started
===============
Were pleased that you are interested in working on pip.
This document is meant to get you setup to work on pip and to act as a guide and
reference to the the development setup. If you face any issues during this
process, please `open an issue`_ about it on the issue tracker.
Development tools
=================
pip uses :pypi:`tox` for testing against multiple different Python environments
and ensuring reproducible environments for linting and building documentation.
For developing pip, you need to install ``tox`` on your system. Often, you can
just do ``python -m pip install tox`` to install and use it.
Running Tests
-------------
pip uses the :pypi:`pytest` test framework, :pypi:`mock` and :pypi:`pretend`
for testing. These are automatically installed by tox for running the tests.
To run tests locally, run:
.. code-block:: console
$ tox -e py36
The example above runs tests against Python 3.6. You can also use other
versions like ``py27`` and ``pypy3``.
``tox`` has been configured to any additional arguments it is given to
``pytest``. This enables the use of pytest's `rich CLI`_. As an example, you
can select tests using the various ways that pytest provides:
.. code-block:: console
$ # Using file name
$ tox -e py36 -- tests/functional/test_install.py
$ # Using markers
$ tox -e py36 -- -m unit
$ # Using keywords
$ tox -e py36 -- -k "install and not wheel"
Running pip's test suite requires supported version control tools (subversion,
bazaar, git, and mercurial) to be installed. If you are missing one of the VCS
tools, you can tell pip to skip those tests:
.. code-block:: console
$ tox -e py36 -- -k "not svn"
$ tox -e py36 -- -k "not (svn or git)"
Running Linters
---------------
pip uses :pypi:`flake8` and :pypi:`isort` for linting the codebase. These
ensure that the codebase is in compliance with :pep:`8` and the imports are
consistently ordered and styled.
To use linters locally, run:
.. code-block:: console
$ tox -e lint-py2
$ tox -e lint-py3
The above commands run the linters on Python 2 followed by Python 3.
.. note::
Do not silence errors from flake8 with ``# noqa`` comments or otherwise.
The only exception to this is silencing unused-import errors for imports
related to static type checking as currently `flake8 does not understand
PEP 484 type-comments`_.
Running mypy
------------
pip uses :pypi:`mypy` to run static type analysis, which helps catch certain
kinds of bugs. The codebase uses `PEP 484 type-comments`_ due to compatibility
requirements with Python 2.7.
To run the ``mypy`` type checker, run:
.. code-block:: console
$ tox -e mypy
Building Documentation
----------------------
pip's documentation is built using :pypi:`Sphinx`. The documentation is written
in reStructuredText.
To build it locally, run:
.. code-block:: console
$ tox -e docs
The built documentation can be found in the ``docs/build`` folder.
.. _`open an issue`: https://github.com/pypa/pip/issues/new?title=Trouble+with+pip+development+environment
.. _`flake8 does not understand PEP 484 type-comments`: https://gitlab.com/pycqa/flake8/issues/118
.. _`PEP 484 type-comments`: https://www.python.org/dev/peps/pep-0484/#suggested-syntax-for-python-2-7-and-straddling-code
.. _`rich CLI`: https://docs.pytest.org/en/latest/usage.html#specifying-tests-selecting-tests

View File

@ -0,0 +1,25 @@
Development
===========
pip is a volunteer maintained open source project and we welcome contributions
of all forms. The sections below will help you get started with development,
testing, and documentation.
You can also join ``#pypa`` (general packaging discussion and user support) and
``#pypa-dev`` (discussion about development of packaging tools) `on Freenode`_,
or the `pypa-dev mailing list`_, to ask questions or get involved.
.. toctree::
:maxdepth: 2
getting-started
contributing
release-process
.. note::
pip's development documentation has been rearranged and some older
references might be broken.
.. _`on Freenode`: https://webchat.freenode.net/?channels=%23pypa-dev,pypa
.. _`pypa-dev mailing list`: https://groups.google.com/forum/#!forum/pypa-dev

View File

@ -0,0 +1,103 @@
===============
Release process
===============
Release Cadence
===============
The pip project has a release cadence of releasing whatever is on ``master``
every 3 months. This gives users a predictable pattern for when releases
are going to happen and prevents locking up improvements for fixes for long
periods of time, while still preventing massively fracturing the user base
with version numbers.
Our release months are January, April, July, October. The release date within
that month will be up to the release manager for that release. If there are
no changes, then that release month is skipped and the next release will be
3 month later.
The release manager may, at their discretion, choose whether or not there
will be a pre-release period for a release, and if there is may extend that
period into the next month if needed.
Because releases are made direct from the ``master`` branch, it is essential
that ``master`` is always in a releasable state. It is acceptable to merge
PRs that partially implement a new feature, but only if the partially
implemented version is usable in that state (for example, with reduced
functionality or disabled by default). In the case where a merged PR is found
to need extra work before being released, the release manager always has the
option to back out the partial change prior to a release. The PR can then be
reworked and resubmitted for the next release.
Deprecation Policy
==================
Any change to pip that removes or significantly alters user-visible behavior
that is described in the pip documentation will be deprecated for a minimum of
6 months before the change occurs. Deprecation will take the form of a warning
being issued by pip when the feature is used. Longer deprecation periods, or
deprecation warnings for behavior changes that would not normally be covered by
this policy, are also possible depending on circumstances, but this is at the
discretion of the pip developers.
Note that the documentation is the sole reference for what counts as agreed
behavior. If something isn't explicitly mentioned in the documentation, it can
be changed without warning, or any deprecation period, in a pip release.
However, we are aware that the documentation isn't always complete - PRs that
document existing behavior with the intention of covering that behavior with
the above deprecation process are always acceptable, and will be considered on
their merits.
.. note::
pip has a helper function for making deprecation easier for pip maintainers.
The supporting documentation can be found in the source code of
``pip._internal.utils.deprecation.deprecated``. The function is not a part of
pip's public API.
Release Process
===============
Creating a new release
----------------------
#. On the current pip ``master`` branch, generate a new ``AUTHORS.txt`` by
running ``invoke generate.authors`` and commit the results.
#. On the current pip ``master`` branch, make a new commit which bumps the
version in ``pip/__init__.py`` to the release version and adjust the
``CHANGES.txt`` file to reflect the current date. The release version should
follow a YY.N scheme, where YY is the two digit year, and N is the Nth
release within that year.
#. On the current pip ``master`` branch, generate a new ``NEWS.rst`` by running
``invoke generate.news`` and commit the results.
#. Create a signed tag of the ``master`` branch of the form ``YY.N`` using the
command ``git tag -s YY.N``.
#. Checkout the tag using ``git checkout YY.N`` and create the distribution
files using ``python setup.py sdist bdist_wheel``.
#. Upload the distribution files to PyPI using twine
(``twine upload -s dist/*``). The upload should include GPG signatures of
the distribution files.
#. Push all of the changes.
#. Regenerate the ``get-pip.py`` script by running
``invoke generate.installer`` in the get-pip repository, and committing the
results.
Creating a bug-fix release
--------------------------
Sometimes we need to release a bugfix release of the form ``YY.N.Z+1``. In
order to create one of these the changes should already be merged into the
``master`` branch.
#. Create a new ``release/YY.N.Z+1`` branch off of the ``YY.N`` tag using the
command ``git checkout -b release/YY.N.Z+1 YY.N``.
#. Cherry pick the fixed commits off of the ``master`` branch, fixing any
conflicts and moving any changelog entries from the development version's
changelog section to the ``YY.N.Z+1`` section.
#. Push the ``release/YY.N.Z+1`` branch to github and submit a PR for it against
the ``master`` branch and wait for the tests to run.
#. Once tests run, merge the ``release/YY.N.Z+1`` branch into master, and follow
the above release process starting with step 4.

View File

@ -1,2 +0,0 @@
[restructuredtext parser]
smart_quotes = no

View File

@ -18,5 +18,5 @@ for installing Python packages.
installing
user_guide
reference/index
development
development/index
news

View File

@ -6,10 +6,10 @@ Installation
Do I need to install pip?
-------------------------
pip is already installed if you are using Python 2 >=2.7.9 or Python 3 >=3.4
downloaded from `python.org <https://www.python.org>`_ or if you are working
in a :ref:`Virtual Environment <pypug:Creating and using Virtual Environments>`
created by :ref:`pypug:virtualenv` or :ref:`pyvenv <pypug:venv>`.
pip is already installed if you are using Python 2 >=2.7.9 or Python 3 >=3.4
downloaded from `python.org <https://www.python.org>`_ or if you are working
in a :ref:`Virtual Environment <pypug:Creating and using Virtual Environments>`
created by :ref:`pypug:virtualenv` or :ref:`pyvenv <pypug:venv>`.
Just make sure to :ref:`upgrade pip <Upgrading pip>`.
@ -23,7 +23,9 @@ To install pip, securely download `get-pip.py
curl https://bootstrap.pypa.io/get-pip.py -o get-pip.py
Inspect ``get-pip.py`` for any malevolence. Then run the following::
As when running any script downloaded from the web, ensure that you have
reviewed the code and are happy that it works as you expect.
Then run the following::
python get-pip.py
@ -34,7 +36,7 @@ Inspect ``get-pip.py`` for any malevolence. Then run the following::
system or another package manager. ``get-pip.py`` does not coordinate with
those tools, and may leave your system in an inconsistent state.
``get-pip.py`` also installs :ref:`pypug:setuptools` [2]_ and :ref:`pypug:wheel`
``get-pip.py`` also installs :ref:`pypug:setuptools` [2]_ and :ref:`pypug:wheel`
if they are not already. :ref:`pypug:setuptools` is required to install
:term:`source distributions <pypug:Source Distribution (or "sdist")>`. Both are
required in order to build a :ref:`Wheel cache` (which improves installation
@ -104,6 +106,8 @@ On Windows [4]_::
python -m pip install -U pip
.. _compatibility-requirements:
Python and OS Compatibility
---------------------------

View File

@ -1,253 +0,0 @@
@ECHO OFF
REM Command file for Sphinx documentation
if "%SPHINXBUILD%" == "" (
set SPHINXBUILD=sphinx-build
)
set BUILDDIR=build
set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% .
set I18NSPHINXOPTS=%SPHINXOPTS% .
if NOT "%PAPER%" == "" (
set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS%
)
if "%1" == "" goto help
if "%1" == "help" (
:help
echo.Please use `make ^<target^>` where ^<target^> is one of
echo. html to make standalone HTML files
echo. dirhtml to make HTML files named index.html in directories
echo. singlehtml to make a single large HTML file
echo. pickle to make pickle files
echo. json to make JSON files
echo. htmlhelp to make HTML files and a HTML help project
echo. qthelp to make HTML files and a qthelp project
echo. devhelp to make HTML files and a Devhelp project
echo. epub to make an epub
echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter
echo. text to make text files
echo. man to make manual pages
echo. texinfo to make Texinfo files
echo. gettext to make PO message catalogs
echo. changes to make an overview over all changed/added/deprecated items
echo. xml to make Docutils-native XML files
echo. pseudoxml to make pseudoxml-XML files for display purposes
echo. linkcheck to check all external links for integrity
echo. doctest to run all doctests embedded in the documentation if enabled
goto end
)
if "%1" == "clean" (
for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i
del /q /s %BUILDDIR%\*
goto end
)
REM Check if sphinx-build is available and fallback to Python version if any
%SPHINXBUILD% 2> nul
if errorlevel 9009 goto sphinx_python
goto sphinx_ok
:sphinx_python
set SPHINXBUILD=python -m sphinx.__init__
%SPHINXBUILD% 2> nul
if errorlevel 9009 (
echo.
echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
echo.installed, then set the SPHINXBUILD environment variable to point
echo.to the full path of the 'sphinx-build' executable. Alternatively you
echo.may add the Sphinx directory to PATH.
echo.
echo.If you don't have Sphinx installed, grab it from
echo.http://sphinx-doc.org/
exit /b 1
)
:sphinx_ok
if "%1" == "html" (
%SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The HTML pages are in %BUILDDIR%/html.
goto end
)
if "%1" == "dirhtml" (
%SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml.
goto end
)
if "%1" == "singlehtml" (
%SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml.
goto end
)
if "%1" == "pickle" (
%SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle
if errorlevel 1 exit /b 1
echo.
echo.Build finished; now you can process the pickle files.
goto end
)
if "%1" == "json" (
%SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json
if errorlevel 1 exit /b 1
echo.
echo.Build finished; now you can process the JSON files.
goto end
)
if "%1" == "htmlhelp" (
%SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp
if errorlevel 1 exit /b 1
echo.
echo.Build finished; now you can run HTML Help Workshop with the ^
.hhp project file in %BUILDDIR%/htmlhelp.
goto end
)
if "%1" == "qthelp" (
%SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp
if errorlevel 1 exit /b 1
echo.
echo.Build finished; now you can run "qcollectiongenerator" with the ^
.qhcp project file in %BUILDDIR%/qthelp, like this:
echo.^> qcollectiongenerator %BUILDDIR%\qthelp\pip-installer.qhcp
echo.To view the help file:
echo.^> assistant -collectionFile %BUILDDIR%\qthelp\pip-installer.ghc
goto end
)
if "%1" == "devhelp" (
%SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp
if errorlevel 1 exit /b 1
echo.
echo.Build finished.
goto end
)
if "%1" == "epub" (
%SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The epub file is in %BUILDDIR%/epub.
goto end
)
if "%1" == "latex" (
%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
if errorlevel 1 exit /b 1
echo.
echo.Build finished; the LaTeX files are in %BUILDDIR%/latex.
goto end
)
if "%1" == "latexpdf" (
%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
cd %BUILDDIR%/latex
make all-pdf
cd %BUILDDIR%/..
echo.
echo.Build finished; the PDF files are in %BUILDDIR%/latex.
goto end
)
if "%1" == "latexpdfja" (
%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
cd %BUILDDIR%/latex
make all-pdf-ja
cd %BUILDDIR%/..
echo.
echo.Build finished; the PDF files are in %BUILDDIR%/latex.
goto end
)
if "%1" == "text" (
%SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The text files are in %BUILDDIR%/text.
goto end
)
if "%1" == "man" (
%SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The manual pages are in %BUILDDIR%/man.
goto end
)
if "%1" == "texinfo" (
%SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo.
goto end
)
if "%1" == "gettext" (
%SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The message catalogs are in %BUILDDIR%/locale.
goto end
)
if "%1" == "changes" (
%SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes
if errorlevel 1 exit /b 1
echo.
echo.The overview file is in %BUILDDIR%/changes.
goto end
)
if "%1" == "linkcheck" (
%SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck
if errorlevel 1 exit /b 1
echo.
echo.Link check complete; look for any errors in the above output ^
or in %BUILDDIR%/linkcheck/output.txt.
goto end
)
if "%1" == "doctest" (
%SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest
if errorlevel 1 exit /b 1
echo.
echo.Testing of doctests in the sources finished, look at the ^
results in %BUILDDIR%/doctest/output.txt.
goto end
)
if "%1" == "xml" (
%SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The XML files are in %BUILDDIR%/xml.
goto end
)
if "%1" == "pseudoxml" (
%SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml.
goto end
)
:end

View File

@ -154,8 +154,9 @@ appropriately.
installation of build dependencies from source has been disabled until a safe
resolution of this issue is found.
* ``pip<18.0`` does not support the use of environment markers and extras, only
version specifiers are respected.
* ``pip<18.0``: only support installing build requirements from wheels, and
does not support the use of environment markers and extras (only version
specifiers are respected).
Future Developments

View File

@ -49,6 +49,21 @@ For more information and examples, see the :ref:`pip install` reference.
.. _PyPI: https://pypi.org/
Using a Proxy Server
********************
When installing packages from `PyPI`_, pip requires internet access, which
in many corporate environments requires an outbound HTTP proxy server.
pip can be configured to connect through a proxy server in various ways:
* using the ``--proxy`` command-line option to specify a proxy in the form
``[user:passwd@]proxy.server:port``
* using ``proxy`` in a :ref:`config-file`
* by setting the standard environment-variables ``http_proxy``, ``https_proxy``
and ``no_proxy``.
.. _`Requirements Files`:
Requirements Files
@ -336,13 +351,13 @@ variable ``PIP_CONFIG_FILE``.
If multiple configuration files are found by pip then they are combined in
the following order:
1. Firstly the site-wide file is read, then
2. The per-user file is read, and finally
3. The virtualenv-specific file is read.
1. The site-wide file is read
2. The per-user file is read
3. The virtualenv-specific file is read
Each file read overrides any values read from previous files, so if the
global timeout is specified in both the site-wide file and the per-user file
then the latter value is the one that will be used.
then the latter value will be used.
The names of the settings are derived from the long command line option, e.g.
if you want to use a different package index (``--index-url``) and set the

View File

@ -1 +0,0 @@
Introduce a new --prefer-binary flag, to prefer older wheels over newer source packages.

View File

@ -1 +0,0 @@
Dropped support for Python 3.3.

View File

@ -1 +0,0 @@
Adjust path to selfcheck.json - remove virtualenv specific path and honor cache-dir in pip.conf

5
news/4187.feature Normal file
View File

@ -0,0 +1,5 @@
Allow PEP 508 URL requirements to be used as dependencies.
As a security measure, pip will raise an exception when installing packages from
PyPI if those packages depend on packages not also hosted on PyPI.
In the future, PyPI will block uploading packages with such external URL dependencies directly.

View File

@ -1 +0,0 @@
Remove compiled pyo files for wheel packages.

View File

@ -1,2 +0,0 @@
Improve autocompletion function on file name completion after options
which have ``<file>``, ``<dir>`` or ``<path>`` as metavar.

View File

@ -1,2 +0,0 @@
Improve autocompletion function on file name completion after options
which have ``<file>``, ``<dir>`` or ``<path>`` as metavar.

View File

@ -1 +0,0 @@
Speed up printing of newly installed package versions

View File

@ -1,4 +0,0 @@
Restrict install time dependency warnings to directly-dependant packages
Warning about the entire package set has resulted in users getting confused as
to why pip is printing these warnings.

View File

@ -1 +0,0 @@
Improve handling of PEP 518 build requirements: support environment markers and extras.

View File

@ -1 +0,0 @@
Remove username/password from log message when using index with basic auth

View File

@ -1 +0,0 @@
Clarify that the output of pip show is in RFC-compliant mail header format for people who want to parse the ouput.

View File

@ -1 +0,0 @@
Improve handling of PEP 518 build requirements: support environment markers and extras.

View File

@ -1 +0,0 @@
Remove trailing os.sep from PATH directories to avoid false negatives

View File

@ -1 +0,0 @@
Fix "pip wheel pip" being blocked by the "don't use pip to modify itself" check

View File

@ -1 +0,0 @@
Fix "pip wheel pip" being blocked by the "don't use pip to modify itself" check

View File

@ -1 +0,0 @@
Improve status message when upgrade is skipped due to only-if-needed strategy

View File

@ -1 +0,0 @@
Add test for PR 5293: Remove trailing os.sep to avoid false negatives

View File

@ -1,5 +0,0 @@
Disable pip's version check (and upgrade message) when installed by a different package manager.
This works better with Linux distributions where pip's upgrade message may
result in users running pip in a manner that modifies files that should be
managed by the OS's package manager.

View File

@ -1 +0,0 @@
Fix the revendoring script no to rewrite unrelated import that starts suspicious.

View File

@ -1 +0,0 @@
Check for file existence and unlink first when clobbering existing files during a wheel install.

View File

@ -1 +0,0 @@
Improve error message to be more specific when no files are found as listed in as listed in PKG-INFO.

View File

@ -1 +0,0 @@
Start refusing to install packages with non PEP-518 compliant pyproject.toml

View File

@ -1,4 +0,0 @@
Restrict install time dependency warnings to directly-dependant packages
Warning about the entire package set has resulted in users getting confused as
to why pip is printing these warnings.

View File

@ -1 +0,0 @@
Always read ``pyproject.toml`` as UTF-8. This fixes Unicode handling on Windows and Python 2.

View File

@ -1 +0,0 @@
Start refusing to install packages with non PEP-518 compliant pyproject.toml

View File

@ -1,4 +0,0 @@
Remove the shim for the old get-pip.py location.
For the past 2 years, it's only been redirecting users to use the newer
https://bootstrap.pypa.io/get-pip.py location.

View File

@ -1 +0,0 @@
Switch to a Calendar based versioning scheme.

View File

@ -1 +0,0 @@
Formally document our deprecation process

View File

@ -1 +0,0 @@
Adopt and document NEWS fragment writing style

View File

@ -1 +0,0 @@
Switch to releasing a new, non bug fix version of pip every 3 months.

View File

@ -0,0 +1 @@
Fix "Requirements Files" reference in User Guide

View File

@ -1,7 +1,9 @@
[isort]
skip =
_vendor
__main__.py
.tox,
.scratch,
_vendor,
data
multi_line_output = 5
known_third_party =
pip._vendor
@ -12,7 +14,11 @@ default_section = THIRDPARTY
include_trailing_comma = true
[flake8]
exclude = .tox,.idea,.scratch,*.egg,build,_vendor,data
exclude =
.tox,
.scratch,
_vendor,
data
select = E,W,F
[mypy]

View File

@ -3,8 +3,7 @@ import os
import re
import sys
from setuptools import setup, find_packages
from setuptools import find_packages, setup
here = os.path.abspath(os.path.dirname(__file__))

View File

@ -1 +1 @@
__version__ = "18.0.dev0"
__version__ = "18.1.dev0"

View File

@ -13,7 +13,7 @@ if __package__ == '':
path = os.path.dirname(os.path.dirname(__file__))
sys.path.insert(0, path)
from pip._internal import main as _main # noqa
from pip._internal import main as _main # isort:skip # noqa
if __name__ == '__main__':
sys.exit(_main())

View File

@ -274,15 +274,6 @@ def parseopts(args):
return cmd_name, cmd_args
def check_isolated(args):
isolated = False
if "--isolated" in args:
isolated = True
return isolated
def main(args=None):
if args is None:
args = sys.argv[1:]
@ -306,5 +297,5 @@ def main(args=None):
except locale.Error as e:
# setlocale can apparently crash if locale are uninitialized
logger.debug("Ignoring error %s when setting locale", e)
command = commands_dict[cmd_name](isolated=check_isolated(cmd_args))
command = commands_dict[cmd_name](isolated=("--isolated" in cmd_args))
return command.main(cmd_args)

View File

@ -24,7 +24,7 @@ from pip._internal.status_codes import (
ERROR, PREVIOUS_BUILD_DIR_ERROR, SUCCESS, UNKNOWN_ERROR,
VIRTUALENV_NOT_FOUND,
)
from pip._internal.utils.logging import IndentingFormatter
from pip._internal.utils.logging import setup_logging
from pip._internal.utils.misc import get_prog, normalize_path
from pip._internal.utils.outdated import pip_version_check
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
@ -42,7 +42,6 @@ class Command(object):
usage = None # type: Optional[str]
hidden = False # type: bool
ignore_require_venv = False # type: bool
log_streams = ("ext://sys.stdout", "ext://sys.stderr")
def __init__(self, isolated=False):
parser_kw = {
@ -114,89 +113,15 @@ class Command(object):
# Set verbosity so that it can be used elsewhere.
self.verbosity = options.verbose - options.quiet
if self.verbosity >= 1:
level = "DEBUG"
elif self.verbosity == -1:
level = "WARNING"
elif self.verbosity == -2:
level = "ERROR"
elif self.verbosity <= -3:
level = "CRITICAL"
else:
level = "INFO"
setup_logging(
verbosity=self.verbosity,
no_color=options.no_color,
user_log_file=options.log,
)
# The root logger should match the "console" level *unless* we
# specified "--log" to send debug logs to a file.
root_level = level
if options.log:
root_level = "DEBUG"
logger_class = "pip._internal.utils.logging.ColorizedStreamHandler"
handler_class = "pip._internal.utils.logging.BetterRotatingFileHandler"
logging.config.dictConfig({
"version": 1,
"disable_existing_loggers": False,
"filters": {
"exclude_warnings": {
"()": "pip._internal.utils.logging.MaxLevelFilter",
"level": logging.WARNING,
},
},
"formatters": {
"indent": {
"()": IndentingFormatter,
"format": "%(message)s",
},
},
"handlers": {
"console": {
"level": level,
"class": logger_class,
"no_color": options.no_color,
"stream": self.log_streams[0],
"filters": ["exclude_warnings"],
"formatter": "indent",
},
"console_errors": {
"level": "WARNING",
"class": logger_class,
"no_color": options.no_color,
"stream": self.log_streams[1],
"formatter": "indent",
},
"user_log": {
"level": "DEBUG",
"class": handler_class,
"filename": options.log or "/dev/null",
"delay": True,
"formatter": "indent",
},
},
"root": {
"level": root_level,
"handlers": list(filter(None, [
"console",
"console_errors",
"user_log" if options.log else None,
])),
},
# Disable any logging besides WARNING unless we have DEBUG level
# logging enabled. These use both pip._vendor and the bare names
# for the case where someone unbundles our libraries.
"loggers": {
name: {
"level": (
"WARNING" if level in ["INFO", "ERROR"] else "DEBUG"
)
} for name in [
"pip._vendor", "distlib", "requests", "urllib3"
]
},
})
# TODO: try to get these passing down from the command?
# without resorting to os.environ to hold these.
# TODO: Try to get these passing down from the command?
# without resorting to os.environ to hold these.
# This also affects isolated builds and it should.
if options.no_input:
os.environ['PIP_NO_INPUT'] = '1'
@ -212,8 +137,6 @@ class Command(object):
)
sys.exit(VIRTUALENV_NOT_FOUND)
original_root_handlers = set(logging.root.handlers)
try:
status = self.run(options, args)
# FIXME: all commands should return an exit status
@ -240,7 +163,7 @@ class Command(object):
logger.debug('Exception information:', exc_info=True)
return ERROR
except:
except BaseException:
logger.critical('Exception:', exc_info=True)
return UNKNOWN_ERROR
@ -258,10 +181,9 @@ class Command(object):
)
with session:
pip_version_check(session, options)
# Avoid leaking loggers
for handler in set(logging.root.handlers) - original_root_handlers:
# this method benefit from the Logger class internal lock
logging.root.removeHandler(handler)
# Shutdown the logging module
logging.shutdown()
return SUCCESS

View File

@ -11,7 +11,6 @@ from pip._internal.utils.misc import call_subprocess
from pip._internal.utils.temp_dir import TempDirectory
from pip._internal.utils.ui import open_spinner
logger = logging.getLogger(__name__)
@ -80,10 +79,13 @@ class BuildEnvironment(object):
args = [
sys.executable, '-m', 'pip', 'install', '--ignore-installed',
'--no-user', '--prefix', self.path, '--no-warn-script-location',
'--only-binary', ':all:',
]
if logger.getEffectiveLevel() <= logging.DEBUG:
args.append('-v')
for format_control in ('no_binary', 'only_binary'):
formats = getattr(finder.format_control, format_control)
args.extend(('--' + format_control.replace('_', '-'),
','.join(sorted(formats or {':none:'}))))
if finder.index_urls:
args.extend(['-i', finder.index_urls[0]])
for extra_index in finder.index_urls[1:]:

View File

@ -11,6 +11,7 @@ from pip._vendor.packaging.utils import canonicalize_name
from pip._internal import index
from pip._internal.compat import expanduser
from pip._internal.download import path_to_url
from pip._internal.models.link import Link
from pip._internal.utils.temp_dir import TempDirectory
from pip._internal.wheel import InvalidWheelFilename, Wheel
@ -101,7 +102,7 @@ class Cache(object):
root = self.get_path_for_link(link)
path = os.path.join(root, candidate)
return index.Link(path_to_url(path))
return Link(path_to_url(path))
def cleanup(self):
pass

View File

@ -9,6 +9,7 @@ from pip._internal.exceptions import CommandError
from pip._internal.index import FormatControl
from pip._internal.operations.prepare import RequirementPreparer
from pip._internal.req import RequirementSet
from pip._internal.req.req_tracker import RequirementTracker
from pip._internal.resolve import Resolver
from pip._internal.utils.filesystem import check_path_owner
from pip._internal.utils.misc import ensure_dir, normalize_path
@ -180,7 +181,7 @@ class DownloadCommand(RequirementCommand):
)
options.cache_dir = None
with TempDirectory(
with RequirementTracker() as req_tracker, TempDirectory(
options.build_dir, delete=build_delete, kind="download"
) as directory:
@ -204,6 +205,7 @@ class DownloadCommand(RequirementCommand):
wheel_download_dir=None,
progress_bar=options.progress_bar,
build_isolation=options.build_isolation,
req_tracker=req_tracker,
)
resolver = Resolver(

View File

@ -19,6 +19,7 @@ from pip._internal.locations import distutils_scheme, virtualenv_no_global
from pip._internal.operations.check import check_install_conflicts
from pip._internal.operations.prepare import RequirementPreparer
from pip._internal.req import RequirementSet, install_given_reqs
from pip._internal.req.req_tracker import RequirementTracker
from pip._internal.resolve import Resolver
from pip._internal.status_codes import ERROR
from pip._internal.utils.filesystem import check_path_owner
@ -260,7 +261,7 @@ class InstallCommand(RequirementCommand):
)
options.cache_dir = None
with TempDirectory(
with RequirementTracker() as req_tracker, TempDirectory(
options.build_dir, delete=build_delete, kind="install"
) as directory:
requirement_set = RequirementSet(
@ -279,6 +280,7 @@ class InstallCommand(RequirementCommand):
wheel_download_dir=None,
progress_bar=options.progress_bar,
build_isolation=options.build_isolation,
req_tracker=req_tracker,
)
resolver = Resolver(

View File

@ -2,7 +2,6 @@ from __future__ import absolute_import
import json
import logging
import warnings
from pip._vendor import six
from pip._vendor.six.moves import zip_longest
@ -11,7 +10,6 @@ from pip._internal.basecommand import Command
from pip._internal.cmdoptions import index_group, make_option_group
from pip._internal.exceptions import CommandError
from pip._internal.index import PackageFinder
from pip._internal.utils.deprecation import RemovedInPip11Warning
from pip._internal.utils.misc import (
dist_is_editable, get_installed_distributions,
)
@ -78,9 +76,9 @@ class ListCommand(Command):
action='store',
dest='list_format',
default="columns",
choices=('legacy', 'columns', 'freeze', 'json'),
choices=('columns', 'freeze', 'json'),
help="Select the output format among: columns (default), freeze, "
"json, or legacy.",
"or json",
)
cmd_opts.add_option(
@ -123,13 +121,6 @@ class ListCommand(Command):
)
def run(self, options, args):
if options.list_format == "legacy":
warnings.warn(
"The legacy format has been deprecated and will be removed "
"in the future.",
RemovedInPip11Warning,
)
if options.outdated and options.uptodate:
raise CommandError(
"Options --outdated and --uptodate cannot be combined.")
@ -208,30 +199,6 @@ class ListCommand(Command):
dist.latest_filetype = typ
yield dist
def output_legacy(self, dist, options):
if options.verbose >= 1:
return '%s (%s, %s, %s)' % (
dist.project_name,
dist.version,
dist.location,
get_installer(dist),
)
elif dist_is_editable(dist):
return '%s (%s, %s)' % (
dist.project_name,
dist.version,
dist.location,
)
else:
return '%s (%s)' % (dist.project_name, dist.version)
def output_legacy_latest(self, dist, options):
return '%s - Latest: %s [%s]' % (
self.output_legacy(dist, options),
dist.latest_version,
dist.latest_filetype,
)
def output_package_listing(self, packages, options):
packages = sorted(
packages,
@ -249,12 +216,6 @@ class ListCommand(Command):
logger.info("%s==%s", dist.project_name, dist.version)
elif options.list_format == 'json':
logger.info(format_for_json(packages, options))
elif options.list_format == "legacy":
for dist in packages:
if options.outdated:
logger.info(self.output_legacy_latest(dist, options))
else:
logger.info(self.output_legacy(dist, options))
def output_package_listing_columns(self, data, header):
# insert the header first: we need to know the size of column names

View File

@ -10,6 +10,7 @@ from pip._internal.cache import WheelCache
from pip._internal.exceptions import CommandError, PreviousBuildDirError
from pip._internal.operations.prepare import RequirementPreparer
from pip._internal.req import RequirementSet
from pip._internal.req.req_tracker import RequirementTracker
from pip._internal.resolve import Resolver
from pip._internal.utils.temp_dir import TempDirectory
from pip._internal.wheel import WheelBuilder
@ -120,9 +121,10 @@ class WheelCommand(RequirementCommand):
build_delete = (not (options.no_clean or options.build_dir))
wheel_cache = WheelCache(options.cache_dir, options.format_control)
with TempDirectory(
with RequirementTracker() as req_tracker, TempDirectory(
options.build_dir, delete=build_delete, kind="wheel"
) as directory:
requirement_set = RequirementSet(
require_hashes=options.require_hashes,
)
@ -140,6 +142,7 @@ class WheelCommand(RequirementCommand):
wheel_download_dir=options.wheel_dir,
progress_bar=options.progress_bar,
build_isolation=options.build_isolation,
req_tracker=req_tracker,
)
resolver = Resolver(

View File

@ -217,7 +217,7 @@ else:
'hh',
fcntl.ioctl(fd, termios.TIOCGWINSZ, '12345678')
)
except:
except Exception:
return None
if cr == (0, 0):
return None
@ -228,7 +228,7 @@ else:
fd = os.open(os.ctermid(), os.O_RDONLY)
cr = ioctl_GWINSZ(fd)
os.close(fd)
except:
except Exception:
pass
if not cr:
cr = (os.environ.get('LINES', 25), os.environ.get('COLUMNS', 80))

View File

@ -9,7 +9,6 @@ import os
import posixpath
import re
import sys
import warnings
from collections import namedtuple
from pip._vendor import html5lib, requests, six
@ -27,13 +26,15 @@ from pip._internal.exceptions import (
BestVersionAlreadyInstalled, DistributionNotFound, InvalidWheelFilename,
UnsupportedWheel,
)
from pip._internal.models.candidate import InstallationCandidate
from pip._internal.models.index import PyPI
from pip._internal.models.link import Link
from pip._internal.pep425tags import get_supported
from pip._internal.utils.deprecation import RemovedInPip11Warning
from pip._internal.utils.deprecation import deprecated
from pip._internal.utils.logging import indent_log
from pip._internal.utils.misc import (
ARCHIVE_EXTENSIONS, SUPPORTED_EXTENSIONS, cached_property, normalize_path,
remove_auth_from_url, splitext,
remove_auth_from_url,
)
from pip._internal.utils.packaging import check_requires_python
from pip._internal.wheel import Wheel, wheel_ext
@ -57,47 +58,6 @@ SECURE_ORIGINS = [
logger = logging.getLogger(__name__)
class InstallationCandidate(object):
def __init__(self, project, version, location):
self.project = project
self.version = parse_version(version)
self.location = location
self._key = (self.project, self.version, self.location)
def __repr__(self):
return "<InstallationCandidate({!r}, {!r}, {!r})>".format(
self.project, self.version, self.location,
)
def __hash__(self):
return hash(self._key)
def __lt__(self, other):
return self._compare(other, lambda s, o: s < o)
def __le__(self, other):
return self._compare(other, lambda s, o: s <= o)
def __eq__(self, other):
return self._compare(other, lambda s, o: s == o)
def __ge__(self, other):
return self._compare(other, lambda s, o: s >= o)
def __gt__(self, other):
return self._compare(other, lambda s, o: s > o)
def __ne__(self, other):
return self._compare(other, lambda s, o: s != o)
def _compare(self, other, method):
if not isinstance(other, InstallationCandidate):
return NotImplemented
return method(self._key, other._key)
class PackageFinder(object):
"""This finds packages.
@ -212,10 +172,12 @@ class PackageFinder(object):
# # dependency_links value
# # FIXME: also, we should track comes_from (i.e., use Link)
if self.process_dependency_links:
warnings.warn(
deprecated(
"Dependency Links processing has been deprecated and will be "
"removed in a future release.",
RemovedInPip11Warning,
replacement=None,
gone_in="18.2",
issue=4187,
)
self.dependency_links.extend(links)
@ -908,166 +870,6 @@ class HTMLPage(object):
lambda match: '%%%2x' % ord(match.group(0)), url)
class Link(object):
def __init__(self, url, comes_from=None, requires_python=None):
"""
Object representing a parsed link from https://pypi.org/simple/*
url:
url of the resource pointed to (href of the link)
comes_from:
instance of HTMLPage where the link was found, or string.
requires_python:
String containing the `Requires-Python` metadata field, specified
in PEP 345. This may be specified by a data-requires-python
attribute in the HTML link tag, as described in PEP 503.
"""
# url can be a UNC windows share
if url.startswith('\\\\'):
url = path_to_url(url)
self.url = url
self.comes_from = comes_from
self.requires_python = requires_python if requires_python else None
def __str__(self):
if self.requires_python:
rp = ' (requires-python:%s)' % self.requires_python
else:
rp = ''
if self.comes_from:
return '%s (from %s)%s' % (self.url, self.comes_from, rp)
else:
return str(self.url)
def __repr__(self):
return '<Link %s>' % self
def __eq__(self, other):
if not isinstance(other, Link):
return NotImplemented
return self.url == other.url
def __ne__(self, other):
if not isinstance(other, Link):
return NotImplemented
return self.url != other.url
def __lt__(self, other):
if not isinstance(other, Link):
return NotImplemented
return self.url < other.url
def __le__(self, other):
if not isinstance(other, Link):
return NotImplemented
return self.url <= other.url
def __gt__(self, other):
if not isinstance(other, Link):
return NotImplemented
return self.url > other.url
def __ge__(self, other):
if not isinstance(other, Link):
return NotImplemented
return self.url >= other.url
def __hash__(self):
return hash(self.url)
@property
def filename(self):
_, netloc, path, _, _ = urllib_parse.urlsplit(self.url)
name = posixpath.basename(path.rstrip('/')) or netloc
name = urllib_parse.unquote(name)
assert name, ('URL %r produced no filename' % self.url)
return name
@property
def scheme(self):
return urllib_parse.urlsplit(self.url)[0]
@property
def netloc(self):
return urllib_parse.urlsplit(self.url)[1]
@property
def path(self):
return urllib_parse.unquote(urllib_parse.urlsplit(self.url)[2])
def splitext(self):
return splitext(posixpath.basename(self.path.rstrip('/')))
@property
def ext(self):
return self.splitext()[1]
@property
def url_without_fragment(self):
scheme, netloc, path, query, fragment = urllib_parse.urlsplit(self.url)
return urllib_parse.urlunsplit((scheme, netloc, path, query, None))
_egg_fragment_re = re.compile(r'[#&]egg=([^&]*)')
@property
def egg_fragment(self):
match = self._egg_fragment_re.search(self.url)
if not match:
return None
return match.group(1)
_subdirectory_fragment_re = re.compile(r'[#&]subdirectory=([^&]*)')
@property
def subdirectory_fragment(self):
match = self._subdirectory_fragment_re.search(self.url)
if not match:
return None
return match.group(1)
_hash_re = re.compile(
r'(sha1|sha224|sha384|sha256|sha512|md5)=([a-f0-9]+)'
)
@property
def hash(self):
match = self._hash_re.search(self.url)
if match:
return match.group(2)
return None
@property
def hash_name(self):
match = self._hash_re.search(self.url)
if match:
return match.group(1)
return None
@property
def show_url(self):
return posixpath.basename(self.url.split('#', 1)[0].split('?', 1)[0])
@property
def is_wheel(self):
return self.ext == wheel_ext
@property
def is_artifact(self):
"""
Determines if this points to an actual artifact (e.g. a tarball) or if
it points to an "abstract" thing like a path or a VCS location.
"""
from pip._internal.vcs import vcs
if self.scheme in vcs.all_schemes:
return False
return True
FormatControl = namedtuple('FormatControl', 'no_binary only_binary')
"""This object has two fields, no_binary and only_binary.

View File

@ -0,0 +1,23 @@
from pip._vendor.packaging.version import parse as parse_version
from pip._internal.utils.models import KeyBasedCompareMixin
class InstallationCandidate(KeyBasedCompareMixin):
"""Represents a potential "candidate" for installation.
"""
def __init__(self, project, version, location):
self.project = project
self.version = parse_version(version)
self.location = location
super(InstallationCandidate, self).__init__(
key=(self.project, self.version, self.location),
defining_class=InstallationCandidate
)
def __repr__(self):
return "<InstallationCandidate({!r}, {!r}, {!r})>".format(
self.project, self.version, self.location,
)

View File

@ -1,15 +1,29 @@
from pip._vendor.six.moves.urllib import parse as urllib_parse
class Index(object):
def __init__(self, url):
class PackageIndex(object):
"""Represents a Package Index and provides easier access to endpoints
"""
def __init__(self, url, file_storage_domain):
super(PackageIndex, self).__init__()
self.url = url
self.netloc = urllib_parse.urlsplit(url).netloc
self.simple_url = self.url_to_path('simple')
self.pypi_url = self.url_to_path('pypi')
self.simple_url = self._url_for_path('simple')
self.pypi_url = self._url_for_path('pypi')
def url_to_path(self, path):
# This is part of a temporary hack used to block installs of PyPI
# packages which depend on external urls only necessary until PyPI can
# block such packages themselves
self.file_storage_domain = file_storage_domain
def _url_for_path(self, path):
return urllib_parse.urljoin(self.url, path)
PyPI = Index('https://pypi.org/')
PyPI = PackageIndex(
'https://pypi.org/', file_storage_domain='files.pythonhosted.org'
)
TestPyPI = PackageIndex(
'https://test.pypi.org/', file_storage_domain='test-files.pythonhosted.org'
)

View File

@ -0,0 +1,141 @@
import posixpath
import re
from pip._vendor.six.moves.urllib import parse as urllib_parse
from pip._internal.download import path_to_url
from pip._internal.utils.misc import splitext
from pip._internal.utils.models import KeyBasedCompareMixin
from pip._internal.wheel import wheel_ext
class Link(KeyBasedCompareMixin):
"""Represents a parsed link from a Package Index's simple URL
"""
def __init__(self, url, comes_from=None, requires_python=None):
"""
url:
url of the resource pointed to (href of the link)
comes_from:
instance of HTMLPage where the link was found, or string.
requires_python:
String containing the `Requires-Python` metadata field, specified
in PEP 345. This may be specified by a data-requires-python
attribute in the HTML link tag, as described in PEP 503.
"""
# url can be a UNC windows share
if url.startswith('\\\\'):
url = path_to_url(url)
self.url = url
self.comes_from = comes_from
self.requires_python = requires_python if requires_python else None
super(Link, self).__init__(
key=(self.url),
defining_class=Link
)
def __str__(self):
if self.requires_python:
rp = ' (requires-python:%s)' % self.requires_python
else:
rp = ''
if self.comes_from:
return '%s (from %s)%s' % (self.url, self.comes_from, rp)
else:
return str(self.url)
def __repr__(self):
return '<Link %s>' % self
@property
def filename(self):
_, netloc, path, _, _ = urllib_parse.urlsplit(self.url)
name = posixpath.basename(path.rstrip('/')) or netloc
name = urllib_parse.unquote(name)
assert name, ('URL %r produced no filename' % self.url)
return name
@property
def scheme(self):
return urllib_parse.urlsplit(self.url)[0]
@property
def netloc(self):
return urllib_parse.urlsplit(self.url)[1]
@property
def path(self):
return urllib_parse.unquote(urllib_parse.urlsplit(self.url)[2])
def splitext(self):
return splitext(posixpath.basename(self.path.rstrip('/')))
@property
def ext(self):
return self.splitext()[1]
@property
def url_without_fragment(self):
scheme, netloc, path, query, fragment = urllib_parse.urlsplit(self.url)
return urllib_parse.urlunsplit((scheme, netloc, path, query, None))
_egg_fragment_re = re.compile(r'[#&]egg=([^&]*)')
@property
def egg_fragment(self):
match = self._egg_fragment_re.search(self.url)
if not match:
return None
return match.group(1)
_subdirectory_fragment_re = re.compile(r'[#&]subdirectory=([^&]*)')
@property
def subdirectory_fragment(self):
match = self._subdirectory_fragment_re.search(self.url)
if not match:
return None
return match.group(1)
_hash_re = re.compile(
r'(sha1|sha224|sha384|sha256|sha512|md5)=([a-f0-9]+)'
)
@property
def hash(self):
match = self._hash_re.search(self.url)
if match:
return match.group(2)
return None
@property
def hash_name(self):
match = self._hash_re.search(self.url)
if match:
return match.group(1)
return None
@property
def show_url(self):
return posixpath.basename(self.url.split('#', 1)[0].split('?', 1)[0])
@property
def is_wheel(self):
return self.ext == wheel_ext
@property
def is_artifact(self):
"""
Determines if this points to an actual artifact (e.g. a tarball) or if
it points to an "abstract" thing like a path or a VCS location.
"""
from pip._internal.vcs import vcs
if self.scheme in vcs.all_schemes:
return False
return True

View File

@ -6,7 +6,6 @@ from collections import namedtuple
from pip._vendor.packaging.utils import canonicalize_name
from pip._internal.operations.prepare import make_abstract_dist
from pip._internal.utils.misc import get_installed_distributions
from pip._internal.utils.typing import MYPY_CHECK_RUNNING

View File

@ -4,7 +4,6 @@ import collections
import logging
import os
import re
import warnings
from pip._vendor import pkg_resources, six
from pip._vendor.packaging.utils import canonicalize_name
@ -13,7 +12,7 @@ from pip._vendor.pkg_resources import RequirementParseError
from pip._internal.exceptions import InstallationError
from pip._internal.req import InstallRequirement
from pip._internal.req.req_file import COMMENT_RE
from pip._internal.utils.deprecation import RemovedInPip11Warning
from pip._internal.utils.deprecation import deprecated
from pip._internal.utils.misc import (
dist_is_editable, get_installed_distributions,
)
@ -216,10 +215,12 @@ class FrozenRequirement(object):
'for this package:'
)
else:
warnings.warn(
deprecated(
"SVN editable detection based on dependency links "
"will be dropped in the future.",
RemovedInPip11Warning,
replacement=None,
gone_in="18.2",
issue=4187,
)
comments.append(
'# Installing as editable to satisfy requirement %s:' %

View File

@ -141,11 +141,12 @@ class RequirementPreparer(object):
"""
def __init__(self, build_dir, download_dir, src_dir, wheel_download_dir,
progress_bar, build_isolation):
progress_bar, build_isolation, req_tracker):
super(RequirementPreparer, self).__init__()
self.src_dir = src_dir
self.build_dir = build_dir
self.req_tracker = req_tracker
# Where still packed archives should be written to. If None, they are
# not saved, and are deleted immediately after unpacking.
@ -293,7 +294,8 @@ class RequirementPreparer(object):
(req, exc, req.link)
)
abstract_dist = make_abstract_dist(req)
abstract_dist.prep_for_dist(finder, self.build_isolation)
with self.req_tracker.track(req):
abstract_dist.prep_for_dist(finder, self.build_isolation)
if self._download_should_save:
# Make a .zip of the source_dir we already created.
if req.link.scheme in vcs.all_schemes:
@ -319,7 +321,8 @@ class RequirementPreparer(object):
req.update_editable(not self._download_should_save)
abstract_dist = make_abstract_dist(req)
abstract_dist.prep_for_dist(finder, self.build_isolation)
with self.req_tracker.track(req):
abstract_dist.prep_for_dist(finder, self.build_isolation)
if self._download_should_save:
req.archive(self.download_dir)

View File

@ -48,7 +48,7 @@ def install_given_reqs(to_install, install_options, global_options=(),
*args,
**kwargs
)
except:
except Exception:
should_rollback = (
requirement.conflicts_with and
not requirement.install_succeeded

View File

@ -8,7 +8,6 @@ import shutil
import sys
import sysconfig
import traceback
import warnings
import zipfile
from distutils.util import change_root
from email.parser import FeedParser # type: ignore
@ -18,8 +17,8 @@ from pip._vendor.packaging import specifiers
from pip._vendor.packaging.markers import Marker
from pip._vendor.packaging.requirements import InvalidRequirement, Requirement
from pip._vendor.packaging.utils import canonicalize_name
from pip._vendor.packaging.version import parse as parse_version
from pip._vendor.packaging.version import Version
from pip._vendor.packaging.version import parse as parse_version
from pip._vendor.pkg_resources import RequirementParseError, parse_requirements
from pip._internal import wheel
@ -32,10 +31,9 @@ from pip._internal.exceptions import InstallationError
from pip._internal.locations import (
PIP_DELETE_MARKER_FILENAME, running_under_virtualenv,
)
from pip._internal.models.index import PyPI, TestPyPI
from pip._internal.models.link import Link
from pip._internal.req.req_uninstall import UninstallPathSet
from pip._internal.utils.deprecation import (
RemovedInPip11Warning, RemovedInPip12Warning,
)
from pip._internal.utils.hashes import Hashes
from pip._internal.utils.logging import indent_log
from pip._internal.utils.misc import (
@ -137,8 +135,6 @@ class InstallRequirement(object):
@classmethod
def from_editable(cls, editable_req, comes_from=None, isolated=False,
options=None, wheel_cache=None, constraint=False):
from pip._internal.index import Link
name, url, extras_override = parse_editable(editable_req)
if url.startswith('file:'):
source_dir = url_to_path(url)
@ -169,11 +165,19 @@ class InstallRequirement(object):
req = Requirement(req)
except InvalidRequirement:
raise InstallationError("Invalid requirement: '%s'" % req)
if req.url:
domains_not_allowed = [
PyPI.file_storage_domain,
TestPyPI.file_storage_domain,
]
if req.url and comes_from.link.netloc in domains_not_allowed:
# Explicitly disallow pypi packages that depend on external urls
raise InstallationError(
"Direct url requirement (like %s) are not allowed for "
"dependencies" % req
"Packages installed from PyPI cannot depend on packages "
"which are not also hosted on PyPI.\n"
"%s depends on %s " % (comes_from.name, req)
)
return cls(req, comes_from, isolated=isolated, wheel_cache=wheel_cache)
@classmethod
@ -568,44 +572,40 @@ class InstallRequirement(object):
specified as per PEP 518 within the package. If `pyproject.toml` is not
present, returns None to signify not using the same.
"""
# If pyproject.toml does not exist, don't do anything.
if not os.path.isfile(self.pyproject_toml):
return None
error_template = (
"{package} has a pyproject.toml file that does not comply "
"with PEP 518: {reason}"
)
with io.open(self.pyproject_toml, encoding="utf-8") as f:
pp_toml = pytoml.load(f)
# Extract the build requirements
requires = pp_toml.get("build-system", {}).get("requires", None)
# If there is no build-system table, just use setuptools and wheel.
if "build-system" not in pp_toml:
return ["setuptools", "wheel"]
template = (
"%s does not comply with PEP 518 since pyproject.toml "
"does not contain a valid '[build-system].requires' key: %s"
)
if requires is None:
logging.warn(template, self, "it is missing.")
warnings.warn(
"Future versions of pip will reject packages with "
"pyproject.toml files that do not comply with PEP 518.",
RemovedInPip12Warning,
# Specifying the build-system table but not the requires key is invalid
build_system = pp_toml["build-system"]
if "requires" not in build_system:
raise InstallationError(
error_template.format(package=self, reason=(
"it has a 'build-system' table but not "
"'build-system.requires' which is mandatory in the table"
))
)
# NOTE: Currently allowing projects to skip this key so that they
# can transition to a PEP 518 compliant pyproject.toml or
# push to update the PEP.
# Come pip 19.0, bring this to compliance with PEP 518.
return None
else:
# Error out if it's not a list of strings
is_list_of_str = isinstance(requires, list) and all(
isinstance(req, six.string_types) for req in requires
)
if not is_list_of_str:
raise InstallationError(
template % (self, "it is not a list of strings.")
)
# Error out if it's not a list of strings
requires = build_system["requires"]
if not _is_list_of_str(requires):
raise InstallationError(error_template.format(
package=self,
reason="'build-system.requires' is not a list of strings.",
))
# If control flow reaches here, we're good to go.
return requires
def run_egg_info(self):
@ -1049,22 +1049,6 @@ class InstallRequirement(object):
return install_args
def _strip_postfix(req):
"""
Strip req postfix ( -dev, 0.2, etc )
"""
# FIXME: use package_to_requirement?
match = re.search(r'^(.*?)(?:-dev|-\d.*)$', req)
if match:
# Strip off -dev, -0.2, etc.
warnings.warn(
"#egg cleanup for editable urls will be dropped in the future",
RemovedInPip11Warning,
)
req = match.group(1)
return req
def parse_editable(editable_req):
"""Parses an editable requirement into:
- a requirement name
@ -1129,7 +1113,7 @@ def parse_editable(editable_req):
"Could not detect requirement name for '%s', please specify one "
"with #egg=your_package_name" % editable_req
)
return _strip_postfix(package_name), url, None
return package_name, url, None
def deduce_helpful_msg(req):
@ -1157,3 +1141,10 @@ def deduce_helpful_msg(req):
else:
msg += " File '%s' does not exist." % (req)
return msg
def _is_list_of_str(obj):
return (
isinstance(obj, list) and
all(isinstance(item, six.string_types) for item in obj)
)

View File

@ -0,0 +1,76 @@
from __future__ import absolute_import
import contextlib
import errno
import hashlib
import logging
import os
from pip._internal.utils.temp_dir import TempDirectory
logger = logging.getLogger(__name__)
class RequirementTracker(object):
def __init__(self):
self._root = os.environ.get('PIP_REQ_TRACKER')
if self._root is None:
self._temp_dir = TempDirectory(delete=False, kind='req-tracker')
self._temp_dir.create()
self._root = os.environ['PIP_REQ_TRACKER'] = self._temp_dir.path
logger.debug('Created requirements tracker %r', self._root)
else:
self._temp_dir = None
logger.debug('Re-using requirements tracker %r', self._root)
self._entries = set()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.cleanup()
def _entry_path(self, link):
hashed = hashlib.sha224(link.url_without_fragment.encode()).hexdigest()
return os.path.join(self._root, hashed)
def add(self, req):
link = req.link
info = str(req)
entry_path = self._entry_path(link)
try:
with open(entry_path) as fp:
# Error, these's already a build in progress.
raise LookupError('%s is already being built: %s'
% (link, fp.read()))
except IOError as e:
if e.errno != errno.ENOENT:
raise
assert req not in self._entries
with open(entry_path, 'w') as fp:
fp.write(info)
self._entries.add(req)
logger.debug('Added %s to build tracker %r', req, self._root)
def remove(self, req):
link = req.link
self._entries.remove(req)
os.unlink(self._entry_path(link))
logger.debug('Removed %s from build tracker %r', req, self._root)
def cleanup(self):
for req in set(self._entries):
self.remove(req)
remove = self._temp_dir is not None
if remove:
self._temp_dir.cleanup()
logger.debug('%s build tracker %r',
'Removed' if remove else 'Cleaned',
self._root)
@contextlib.contextmanager
def track(self, req):
self.add(req)
yield
self.remove(req)

View File

@ -120,6 +120,8 @@ def compress_for_output_listing(paths):
folders.add(os.path.dirname(path))
files.add(path)
_normcased_files = set(map(os.path.normcase, files))
folders = compact(folders)
# This walks the tree using os.walk to not miss extra folders
@ -130,8 +132,9 @@ def compress_for_output_listing(paths):
if fname.endswith(".pyc"):
continue
file_ = os.path.normcase(os.path.join(dirpath, fname))
if os.path.isfile(file_) and file_ not in files:
file_ = os.path.join(dirpath, fname)
if (os.path.isfile(file_) and
os.path.normcase(file_) not in _normcased_files):
# We are skipping this file. Add it to the set.
will_skip.add(file_)

View File

@ -18,7 +18,6 @@ from pip._internal.exceptions import (
BestVersionAlreadyInstalled, DistributionNotFound, HashError, HashErrors,
UnsupportedPythonVersion,
)
from pip._internal.req.req_install import InstallRequirement
from pip._internal.utils.logging import indent_log
from pip._internal.utils.misc import dist_in_usersite, ensure_dir

View File

@ -6,72 +6,84 @@ from __future__ import absolute_import
import logging
import warnings
from pip._vendor.packaging.version import parse
from pip import __version__ as current_version
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
if MYPY_CHECK_RUNNING:
from typing import Any # noqa: F401
from typing import Any, Optional # noqa: F401
class PipDeprecationWarning(Warning):
pass
class Pending(object):
pass
class RemovedInPip11Warning(PipDeprecationWarning):
pass
class RemovedInPip12Warning(PipDeprecationWarning, Pending):
pass
_original_showwarning = None # type: Any
# Warnings <-> Logging Integration
_warnings_showwarning = None # type: Any
def _showwarning(message, category, filename, lineno, file=None, line=None):
if file is not None:
if _warnings_showwarning is not None:
_warnings_showwarning(
if _original_showwarning is not None:
_original_showwarning(
message, category, filename, lineno, file, line,
)
elif issubclass(category, PipDeprecationWarning):
# We use a specially named logger which will handle all of the
# deprecation messages for pip.
logger = logging.getLogger("pip._internal.deprecations")
logger.warning(message)
else:
if issubclass(category, PipDeprecationWarning):
# We use a specially named logger which will handle all of the
# deprecation messages for pip.
logger = logging.getLogger("pip._internal.deprecations")
# This is purposely using the % formatter here instead of letting
# the logging module handle the interpolation. This is because we
# want it to appear as if someone typed this entire message out.
log_message = "DEPRECATION: %s" % message
# PipDeprecationWarnings that are Pending still have at least 2
# versions to go until they are removed so they can just be
# warnings. Otherwise, they will be removed in the very next
# version of pip. We want these to be more obvious so we use the
# ERROR logging level.
if issubclass(category, Pending):
logger.warning(log_message)
else:
logger.error(log_message)
else:
_warnings_showwarning(
message, category, filename, lineno, file, line,
)
_original_showwarning(
message, category, filename, lineno, file, line,
)
def install_warning_logger():
# Enable our Deprecation Warnings
warnings.simplefilter("default", PipDeprecationWarning, append=True)
global _warnings_showwarning
global _original_showwarning
if _warnings_showwarning is None:
_warnings_showwarning = warnings.showwarning
if _original_showwarning is None:
_original_showwarning = warnings.showwarning
warnings.showwarning = _showwarning
def deprecated(reason, replacement, gone_in, issue=None):
# type: (str, Optional[str], Optional[str], Optional[int]) -> None
"""Helper to deprecate existing functionality.
reason:
Textual reason shown to the user about why this functionality has
been deprecated.
replacement:
Textual suggestion shown to the user about what alternative
functionality they can use.
gone_in:
The version of pip does this functionality should get removed in.
Raises errors if pip's current version is greater than or equal to
this.
issue:
Issue number on the tracker that would serve as a useful place for
users to find related discussion and provide feedback.
Always pass replacement, gone_in and issue as keyword arguments for clarity
at the call site.
"""
# Construct a nice message.
# This is purposely eagerly formatted as we want it to appear as if someone
# typed this entire message out.
message = "DEPRECATION: " + reason
if replacement is not None:
message += " A possible replacement is {}.".format(replacement)
if issue is not None:
url = "https://github.com/pypa/pip/issues/" + str(issue)
message += " You can find discussion regarding this at {}.".format(url)
# Raise as an error if it has to be removed.
if gone_in is not None and parse(current_version) >= parse(gone_in):
raise PipDeprecationWarning(message)
warnings.warn(message, category=PipDeprecationWarning, stacklevel=2)

View File

@ -130,3 +130,96 @@ class MaxLevelFilter(logging.Filter):
def filter(self, record):
return record.levelno < self.level
def setup_logging(verbosity, no_color, user_log_file):
"""Configures and sets up all of the logging
"""
# Determine the level to be logging at.
if verbosity >= 1:
level = "DEBUG"
elif verbosity == -1:
level = "WARNING"
elif verbosity == -2:
level = "ERROR"
elif verbosity <= -3:
level = "CRITICAL"
else:
level = "INFO"
# The "root" logger should match the "console" level *unless* we also need
# to log to a user log file.
include_user_log = user_log_file is not None
if include_user_log:
additional_log_file = user_log_file
root_level = "DEBUG"
else:
additional_log_file = "/dev/null"
root_level = level
# Disable any logging besides WARNING unless we have DEBUG level logging
# enabled for vendored libraries.
vendored_log_level = "WARNING" if level in ["INFO", "ERROR"] else "DEBUG"
# Shorthands for clarity
log_streams = {
"stdout": "ext://sys.stdout",
"stderr": "ext://sys.stderr",
}
handler_classes = {
"stream": "pip._internal.utils.logging.ColorizedStreamHandler",
"file": "pip._internal.utils.logging.BetterRotatingFileHandler",
}
logging.config.dictConfig({
"version": 1,
"disable_existing_loggers": False,
"filters": {
"exclude_warnings": {
"()": "pip._internal.utils.logging.MaxLevelFilter",
"level": logging.WARNING,
},
},
"formatters": {
"indent": {
"()": IndentingFormatter,
"format": "%(message)s",
},
},
"handlers": {
"console": {
"level": level,
"class": handler_classes["stream"],
"no_color": no_color,
"stream": log_streams["stdout"],
"filters": ["exclude_warnings"],
"formatter": "indent",
},
"console_errors": {
"level": "WARNING",
"class": handler_classes["stream"],
"no_color": no_color,
"stream": log_streams["stderr"],
"formatter": "indent",
},
"user_log": {
"level": "DEBUG",
"class": handler_classes["file"],
"filename": additional_log_file,
"delay": True,
"formatter": "indent",
},
},
"root": {
"level": root_level,
"handlers": ["console", "console_errors"] + (
["user_log"] if include_user_log else []
),
},
"loggers": {
"pip._vendor": {
"level": vendored_log_level
}
},
})

View File

@ -0,0 +1,40 @@
"""Utilities for defining models
"""
import operator
class KeyBasedCompareMixin(object):
"""Provides comparision capabilities that is based on a key
"""
def __init__(self, key, defining_class):
self._compare_key = key
self._defining_class = defining_class
def __hash__(self):
return hash(self._compare_key)
def __lt__(self, other):
return self._compare(other, operator.__lt__)
def __le__(self, other):
return self._compare(other, operator.__le__)
def __gt__(self, other):
return self._compare(other, operator.__gt__)
def __ge__(self, other):
return self._compare(other, operator.__ge__)
def __eq__(self, other):
return self._compare(other, operator.__eq__)
def __ne__(self, other):
return self._compare(other, operator.__ne__)
def _compare(self, other, method):
if not isinstance(other, self._defining_class):
return NotImplemented
return method(self._compare_key, other._compare_key)

View File

@ -58,14 +58,14 @@ class SelfCheckState(object):
separators=(",", ":"))
def pip_installed_by_pip():
"""Checks whether pip was installed by pip
def was_installed_by_pip(pkg):
"""Checks whether pkg was installed by pip
This is used not to display the upgrade message when pip is in fact
installed by system package manager, such as dnf on Fedora.
"""
try:
dist = pkg_resources.get_distribution('pip')
dist = pkg_resources.get_distribution(pkg)
return (dist.has_metadata('INSTALLER') and
'pip' in dist.get_metadata_lines('INSTALLER'))
except pkg_resources.DistributionNotFound:
@ -125,7 +125,7 @@ def pip_version_check(session, options):
# Determine if our pypi_version is older
if (pip_version < remote_version and
pip_version.base_version != remote_version.base_version and
pip_installed_by_pip()):
was_installed_by_pip('pip')):
# Advise "python -m pip" on Windows to avoid issues
# with overwriting pip.exe.
if WINDOWS:

View File

@ -200,12 +200,6 @@ class VersionControl(object):
drive, tail = os.path.splitdrive(repo)
return repo.startswith(os.path.sep) or drive
# See issue #1083 for why this method was introduced:
# https://github.com/pypa/pip/issues/1083
def translate_egg_surname(self, surname):
# For example, Django has branches of the form "stable/1.7.x".
return surname.replace('/', '_')
def export(self, location):
"""
Export the repository at the url to the destination location
@ -213,32 +207,59 @@ class VersionControl(object):
"""
raise NotImplementedError
def get_url_rev(self):
def get_netloc_and_auth(self, netloc):
"""
Returns the correct repository URL and revision by parsing the given
repository URL
Parse the repository URL's netloc, and return the new netloc to use
along with auth information.
This is mainly for the Subversion class to override, so that auth
information can be provided via the --username and --password options
instead of through the URL. For other subclasses like Git without
such an option, auth information must stay in the URL.
Returns: (netloc, (username, password)).
"""
return netloc, (None, None)
def get_url_rev_and_auth(self, url):
"""
Parse the repository URL to use, and return the URL, revision,
and auth info to use.
Returns: (url, rev, (username, password)).
"""
error_message = (
"Sorry, '%s' is a malformed VCS url. "
"The format is <vcs>+<protocol>://<url>, "
"e.g. svn+http://myrepo/svn/MyApp#egg=MyApp"
)
assert '+' in self.url, error_message % self.url
url = self.url.split('+', 1)[1]
assert '+' in url, error_message % url
url = url.split('+', 1)[1]
scheme, netloc, path, query, frag = urllib_parse.urlsplit(url)
netloc, user_pass = self.get_netloc_and_auth(netloc)
rev = None
if '@' in path:
path, rev = path.rsplit('@', 1)
url = urllib_parse.urlunsplit((scheme, netloc, path, query, ''))
return url, rev
return url, rev, user_pass
def get_info(self, location):
def make_rev_args(self, username, password):
"""
Returns (url, revision), where both are strings
Return the RevOptions "extra arguments" to use in obtain().
"""
assert not location.rstrip('/').endswith(self.dirname), \
'Bad directory: %s' % location
return self.get_url(location), self.get_revision(location)
return []
def get_url_rev_options(self, url):
"""
Return the URL and RevOptions object to use in obtain() and in
some cases export(), as a tuple (url, rev_options).
"""
url, rev, user_pass = self.get_url_rev_and_auth(url)
username, password = user_pass
extra_args = self.make_rev_args(username, password)
rev_options = self.make_rev_options(rev, extra_args=extra_args)
return url, rev_options
def normalize_url(self, url):
"""
@ -253,10 +274,14 @@ class VersionControl(object):
"""
return (self.normalize_url(url1) == self.normalize_url(url2))
def obtain(self, dest):
def fetch_new(self, dest, url, rev_options):
"""
Called when installing or updating an editable package, takes the
source path of the checkout.
Fetch a revision from a repository, in the case that this is the
first fetch from the repository.
Args:
dest: the directory to fetch the repository to.
rev_options: a RevOptions object.
"""
raise NotImplementedError
@ -288,21 +313,22 @@ class VersionControl(object):
"""
raise NotImplementedError
def check_destination(self, dest, url, rev_options):
def obtain(self, dest):
"""
Prepare a location to receive a checkout/clone.
Return True if the location is ready for (and requires) a
checkout/clone, False otherwise.
Install or update in editable mode the package represented by this
VersionControl object.
Args:
rev_options: a RevOptions object.
dest: the repository directory in which to install or update.
"""
url, rev_options = self.get_url_rev_options(self.url)
if not os.path.exists(dest):
return True
self.fetch_new(dest, url, rev_options)
return
rev_display = rev_options.to_display()
if os.path.exists(os.path.join(dest, self.dirname)):
if self.is_repository_directory(dest):
existing_url = self.get_url(dest)
if self.compare_urls(existing_url, url):
logger.debug(
@ -321,7 +347,7 @@ class VersionControl(object):
self.update(dest, rev_options)
else:
logger.info('Skipping because already up-to-date.')
return False
return
logger.warning(
'%s %s in %s exists with URL %s',
@ -348,7 +374,25 @@ class VersionControl(object):
)
response = ask_path_exists('What to do? %s' % prompt[0], prompt[1])
checkout = False
if response == 'a':
sys.exit(-1)
if response == 'w':
logger.warning('Deleting %s', display_path(dest))
rmtree(dest)
self.fetch_new(dest, url, rev_options)
return
if response == 'b':
dest_dir = backup_dir(dest)
logger.warning(
'Backing up %s to %s', display_path(dest), dest_dir,
)
shutil.move(dest, dest_dir)
self.fetch_new(dest, url, rev_options)
return
# Do nothing if the response is "i".
if response == 's':
logger.info(
'Switching %s %s to %s%s',
@ -358,24 +402,6 @@ class VersionControl(object):
rev_display,
)
self.switch(dest, url, rev_options)
elif response == 'i':
# do nothing
pass
elif response == 'w':
logger.warning('Deleting %s', display_path(dest))
rmtree(dest)
checkout = True
elif response == 'b':
dest_dir = backup_dir(dest)
logger.warning(
'Backing up %s to %s', display_path(dest), dest_dir,
)
shutil.move(dest, dest_dir)
checkout = True
elif response == 'a':
sys.exit(-1)
return checkout
def unpack(self, location):
"""
@ -398,7 +424,6 @@ class VersionControl(object):
def get_url(self, location):
"""
Return the url used at location
Used in get_info or check_destination
"""
raise NotImplementedError
@ -435,17 +460,26 @@ class VersionControl(object):
else:
raise # re-raise exception if a different error occurred
@classmethod
def is_repository_directory(cls, path):
"""
Return whether a directory path is a repository directory.
"""
logger.debug('Checking in %s for %s (%s)...',
path, cls.dirname, cls.name)
return os.path.exists(os.path.join(path, cls.dirname))
@classmethod
def controls_location(cls, location):
"""
Check if a location is controlled by the vcs.
It is meant to be overridden to implement smarter detection
mechanisms for specific vcs.
This can do more than is_repository_directory() alone. For example,
the Git override checks that Git is actually available.
"""
logger.debug('Checking in %s for %s (%s)...',
location, cls.dirname, cls.name)
path = os.path.join(location, cls.dirname)
return os.path.exists(path)
return cls.is_repository_directory(location)
def get_src_requirement(dist, location):

View File

@ -48,6 +48,17 @@ class Bazaar(VersionControl):
cwd=temp_dir.path, show_stdout=False,
)
def fetch_new(self, dest, url, rev_options):
rev_display = rev_options.to_display()
logger.info(
'Checking out %s%s to %s',
url,
rev_display,
display_path(dest),
)
cmd_args = ['branch', '-q'] + rev_options.to_args() + [url, dest]
self.run_command(cmd_args)
def switch(self, dest, url, rev_options):
self.run_command(['switch', url], cwd=dest)
@ -55,26 +66,12 @@ class Bazaar(VersionControl):
cmd_args = ['pull', '-q'] + rev_options.to_args()
self.run_command(cmd_args, cwd=dest)
def obtain(self, dest):
url, rev = self.get_url_rev()
rev_options = self.make_rev_options(rev)
if self.check_destination(dest, url, rev_options):
rev_display = rev_options.to_display()
logger.info(
'Checking out %s%s to %s',
url,
rev_display,
display_path(dest),
)
cmd_args = ['branch', '-q'] + rev_options.to_args() + [url, dest]
self.run_command(cmd_args)
def get_url_rev(self):
def get_url_rev_and_auth(self, url):
# hotfix the URL scheme after removing bzr+ from bzr+ssh:// readd it
url, rev = super(Bazaar, self).get_url_rev()
url, rev, user_pass = super(Bazaar, self).get_url_rev_and_auth(url)
if url.startswith('ssh://'):
url = 'bzr+' + url
return url, rev
return url, rev, user_pass
def get_url(self, location):
urls = self.run_command(['info'], show_stdout=False, cwd=location)

View File

@ -155,6 +155,33 @@ class Git(VersionControl):
return self.get_revision(dest) == name
def fetch_new(self, dest, url, rev_options):
rev_display = rev_options.to_display()
logger.info(
'Cloning %s%s to %s', url, rev_display, display_path(dest),
)
self.run_command(['clone', '-q', url, dest])
if rev_options.rev:
# Then a specific revision was requested.
rev_options = self.check_rev_options(dest, rev_options)
# Only do a checkout if the current commit id doesn't match
# the requested revision.
if not self.is_commit_id_equal(dest, rev_options.rev):
rev = rev_options.rev
# Only fetch the revision if it's a ref
if rev.startswith('refs/'):
self.run_command(
['fetch', '-q', url] + rev_options.to_args(),
cwd=dest,
)
# Change the revision to the SHA of the ref we fetched
rev = 'FETCH_HEAD'
self.run_command(['checkout', '-q', rev], cwd=dest)
#: repo may contain submodules
self.update_submodules(dest)
def switch(self, dest, url, rev_options):
self.run_command(['config', 'remote.origin.url', url], cwd=dest)
cmd_args = ['checkout', '-q'] + rev_options.to_args()
@ -176,35 +203,6 @@ class Git(VersionControl):
#: update submodules
self.update_submodules(dest)
def obtain(self, dest):
url, rev = self.get_url_rev()
rev_options = self.make_rev_options(rev)
if self.check_destination(dest, url, rev_options):
rev_display = rev_options.to_display()
logger.info(
'Cloning %s%s to %s', url, rev_display, display_path(dest),
)
self.run_command(['clone', '-q', url, dest])
if rev:
rev_options = self.check_rev_options(dest, rev_options)
# Only do a checkout if the current commit id doesn't match
# the requested revision.
if not self.is_commit_id_equal(dest, rev_options.rev):
rev = rev_options.rev
# Only fetch the revision if it's a ref
if rev.startswith('refs/'):
self.run_command(
['fetch', '-q', url] + rev_options.to_args(),
cwd=dest,
)
# Change the revision to the SHA of the ref we fetched
rev = 'FETCH_HEAD'
self.run_command(['checkout', '-q', rev], cwd=dest)
#: repo may contain submodules
self.update_submodules(dest)
def get_url(self, location):
"""Return URL of the first remote encountered."""
remotes = self.run_command(
@ -267,22 +265,22 @@ class Git(VersionControl):
req += '&subdirectory=' + subdirectory
return req
def get_url_rev(self):
def get_url_rev_and_auth(self, url):
"""
Prefixes stub URLs like 'user@hostname:user/repo.git' with 'ssh://'.
That's required because although they use SSH they sometimes don't
work with a ssh:// scheme (e.g. GitHub). But we need a scheme for
parsing. Hence we remove it again afterwards and return it as a stub.
"""
if '://' not in self.url:
assert 'file:' not in self.url
self.url = self.url.replace('git+', 'git+ssh://')
url, rev = super(Git, self).get_url_rev()
if '://' not in url:
assert 'file:' not in url
url = url.replace('git+', 'git+ssh://')
url, rev, user_pass = super(Git, self).get_url_rev_and_auth(url)
url = url.replace('ssh://', '')
else:
url, rev = super(Git, self).get_url_rev()
url, rev, user_pass = super(Git, self).get_url_rev_and_auth(url)
return url, rev
return url, rev, user_pass
def update_submodules(self, location):
if not os.path.exists(os.path.join(location, '.gitmodules')):

View File

@ -31,6 +31,18 @@ class Mercurial(VersionControl):
['archive', location], show_stdout=False, cwd=temp_dir.path
)
def fetch_new(self, dest, url, rev_options):
rev_display = rev_options.to_display()
logger.info(
'Cloning hg %s%s to %s',
url,
rev_display,
display_path(dest),
)
self.run_command(['clone', '--noupdate', '-q', url, dest])
cmd_args = ['update', '-q'] + rev_options.to_args()
self.run_command(cmd_args, cwd=dest)
def switch(self, dest, url, rev_options):
repo_config = os.path.join(dest, self.dirname, 'hgrc')
config = configparser.SafeConfigParser()
@ -52,21 +64,6 @@ class Mercurial(VersionControl):
cmd_args = ['update', '-q'] + rev_options.to_args()
self.run_command(cmd_args, cwd=dest)
def obtain(self, dest):
url, rev = self.get_url_rev()
rev_options = self.make_rev_options(rev)
if self.check_destination(dest, url, rev_options):
rev_display = rev_options.to_display()
logger.info(
'Cloning hg %s%s to %s',
url,
rev_display,
display_path(dest),
)
self.run_command(['clone', '--noupdate', '-q', url, dest])
cmd_args = ['update', '-q'] + rev_options.to_args()
self.run_command(cmd_args, cwd=dest)
def get_url(self, location):
url = self.run_command(
['showconfig', 'paths.default'],

View File

@ -4,17 +4,13 @@ import logging
import os
import re
from pip._vendor.six.moves.urllib import parse as urllib_parse
from pip._internal.index import Link
from pip._internal.models.link import Link
from pip._internal.utils.logging import indent_log
from pip._internal.utils.misc import display_path, remove_auth_from_url, rmtree
from pip._internal.utils.misc import display_path, rmtree
from pip._internal.vcs import VersionControl, vcs
_svn_xml_url_re = re.compile('url="([^"]+)"')
_svn_rev_re = re.compile(r'committed-rev="(\d+)"')
_svn_url_re = re.compile(r'URL: (.+)')
_svn_revision_re = re.compile(r'Revision: (.+)')
_svn_info_xml_rev_re = re.compile(r'\s*revision="(\d+)"')
_svn_info_xml_url_re = re.compile(r'<url>(.*)</url>')
@ -31,39 +27,10 @@ class Subversion(VersionControl):
def get_base_rev_args(self, rev):
return ['-r', rev]
def get_info(self, location):
"""Returns (url, revision), where both are strings"""
assert not location.rstrip('/').endswith(self.dirname), \
'Bad directory: %s' % location
output = self.run_command(
['info', location],
show_stdout=False,
extra_environ={'LANG': 'C'},
)
match = _svn_url_re.search(output)
if not match:
logger.warning(
'Cannot determine URL of svn checkout %s',
display_path(location),
)
logger.debug('Output that cannot be parsed: \n%s', output)
return None, None
url = match.group(1).strip()
match = _svn_revision_re.search(output)
if not match:
logger.warning(
'Cannot determine revision of svn checkout %s',
display_path(location),
)
logger.debug('Output that cannot be parsed: \n%s', output)
return url, None
return url, match.group(1)
def export(self, location):
"""Export the svn repository at the url to the destination location"""
url, rev = self.get_url_rev()
rev_options = get_rev_options(self, url, rev)
url = remove_auth_from_url(url)
url, rev_options = self.get_url_rev_options(self.url)
logger.info('Exporting svn repository %s to %s', url, location)
with indent_log():
if os.path.exists(location):
@ -73,6 +40,17 @@ class Subversion(VersionControl):
cmd_args = ['export'] + rev_options.to_args() + [url, location]
self.run_command(cmd_args, show_stdout=False)
def fetch_new(self, dest, url, rev_options):
rev_display = rev_options.to_display()
logger.info(
'Checking out %s%s to %s',
url,
rev_display,
display_path(dest),
)
cmd_args = ['checkout', '-q'] + rev_options.to_args() + [url, dest]
self.run_command(cmd_args)
def switch(self, dest, url, rev_options):
cmd_args = ['switch'] + rev_options.to_args() + [url, dest]
self.run_command(cmd_args)
@ -81,21 +59,6 @@ class Subversion(VersionControl):
cmd_args = ['update'] + rev_options.to_args() + [dest]
self.run_command(cmd_args)
def obtain(self, dest):
url, rev = self.get_url_rev()
rev_options = get_rev_options(self, url, rev)
url = remove_auth_from_url(url)
if self.check_destination(dest, url, rev_options):
rev_display = rev_options.to_display()
logger.info(
'Checking out %s%s to %s',
url,
rev_display,
display_path(dest),
)
cmd_args = ['checkout', '-q'] + rev_options.to_args() + [url, dest]
self.run_command(cmd_args)
def get_location(self, dist, dependency_links):
for url in dependency_links:
egg_fragment = Link(url).egg_fragment
@ -137,12 +100,45 @@ class Subversion(VersionControl):
revision = max(revision, localrev)
return revision
def get_url_rev(self):
def get_netloc_and_auth(self, netloc):
"""
Parse out and remove from the netloc the auth information.
This allows the auth information to be provided via the --username
and --password options instead of via the URL.
"""
if '@' not in netloc:
return netloc, (None, None)
# Split from the right because that's how urllib.parse.urlsplit()
# behaves if more than one @ is present (by checking the password
# attribute of urlsplit()'s return value).
auth, netloc = netloc.rsplit('@', 1)
if ':' in auth:
# Split from the left because that's how urllib.parse.urlsplit()
# behaves if more than one : is present (again by checking the
# password attribute of the return value)
user_pass = tuple(auth.split(':', 1))
else:
user_pass = auth, None
return netloc, user_pass
def get_url_rev_and_auth(self, url):
# hotfix the URL scheme after removing svn+ from svn+ssh:// readd it
url, rev = super(Subversion, self).get_url_rev()
url, rev, user_pass = super(Subversion, self).get_url_rev_and_auth(url)
if url.startswith('ssh://'):
url = 'svn+' + url
return url, rev
return url, rev, user_pass
def make_rev_args(self, username, password):
extra_args = []
if username:
extra_args += ['--username', username]
if password:
extra_args += ['--password', password]
return extra_args
def get_url(self, location):
# In cases where the source is in a subdirectory, not alongside
@ -222,32 +218,4 @@ class Subversion(VersionControl):
return False
def get_rev_options(vcs, url, rev):
"""
Return a RevOptions object.
"""
r = urllib_parse.urlsplit(url)
if hasattr(r, 'username'):
# >= Python-2.5
username, password = r.username, r.password
else:
netloc = r[1]
if '@' in netloc:
auth = netloc.split('@')[0]
if ':' in auth:
username, password = auth.split(':', 1)
else:
username, password = auth, None
else:
username, password = None, None
extra_args = []
if username:
extra_args += ['--username', username]
if password:
extra_args += ['--password', password]
return vcs.make_rev_options(rev, extra_args=extra_args)
vcs.register(Subversion)

View File

@ -163,7 +163,7 @@ def message_about_scripts_not_on_PATH(scripts):
# We don't want to warn for directories that are on PATH.
not_warn_dirs = [
os.path.normcase(i).rstrip(os.sep) for i in
os.environ["PATH"].split(os.pathsep)
os.environ.get("PATH", "").split(os.pathsep)
]
# If an executable sits with sys.executable, we don't warn for it.
# This covers the case of venv invocations without activating the venv.
@ -506,8 +506,8 @@ if __name__ == '__main__':
row[1], row[2] = rehash(row[0])
writer.writerow(row)
for f in generated:
h, l = rehash(f)
writer.writerow((normpath(f, lib_dir), h, l))
digest, length = rehash(f)
writer.writerow((normpath(f, lib_dir), digest, length))
for f in installed:
writer.writerow((installed[f], '', ''))
shutil.move(temp_record, record)
@ -528,7 +528,7 @@ def wheel_version(source_dir):
version = wheel_data['Wheel-Version'].strip()
version = tuple(map(int, version.split('.')))
return version
except:
except Exception:
return False
@ -653,7 +653,7 @@ class WheelBuilder(object):
)
logger.info('Stored in directory: %s', output_dir)
return wheel_path
except:
except Exception:
pass
# Ignore return, we can't do anything else useful.
self._clean_one(req)
@ -685,7 +685,7 @@ class WheelBuilder(object):
call_subprocess(wheel_args, cwd=req.setup_py_dir,
show_stdout=False, spinner=spinner)
return True
except:
except Exception:
spinner.finish("error")
logger.error('Failed building wheel for %s', req.name)
return False
@ -698,7 +698,7 @@ class WheelBuilder(object):
try:
call_subprocess(clean_args, cwd=req.source_dir, show_stdout=False)
return True
except:
except Exception:
logger.error('Failed cleaning build dir for %s', req.name)
return False
@ -710,6 +710,7 @@ class WheelBuilder(object):
:return: True if all the wheels built correctly.
"""
from pip._internal import index
from pip._internal.models.link import Link
building_is_possible = self._wheel_dir or (
autobuilding and self.wheel_cache.cache_dir
@ -802,7 +803,7 @@ class WheelBuilder(object):
self.preparer.build_dir
)
# Update the link for this.
req.link = index.Link(path_to_url(wheel_file))
req.link = Link(path_to_url(wheel_file))
assert req.link.is_wheel
# extract the wheel into the dir
unpack_url(

View File

@ -22,9 +22,9 @@ Policy
Rationale
---------
Historically pip has not had any dependencies except for setuptools itself,
Historically pip has not had any dependencies except for ``setuptools`` itself,
choosing instead to implement any functionality it needed to prevent needing
a dependency. However, starting with pip 1.5 we began to replace code that was
a dependency. However, starting with pip 1.5, we began to replace code that was
implemented inside of pip with reusable libraries from PyPI. This brought the
typical benefits of reusing libraries instead of reinventing the wheel like
higher quality and more battle tested code, centralization of bug fixes
@ -43,7 +43,7 @@ way (via ``install_requires``) for pip. These issues are:
* **Making other libraries uninstallable.** One of pip's current dependencies is
the ``requests`` library, for which pip requires a fairly recent version to run.
If pip dependended on ``requests`` in the traditional manner, then we'd either
If pip depended on ``requests`` in the traditional manner, then we'd either
have to maintain compatibility with every ``requests`` version that has ever
existed (and ever will), OR allow pip to render certain versions of ``requests``
uninstallable. (The second issue, although technically true for any Python
@ -117,7 +117,7 @@ Debundling
As mentioned in the rationale, we, the pip team, would prefer it if pip was not
debundled (other than optionally ``pip/_vendor/requests/cacert.pem``) and that
pip was left intact. However, if you insist on doing so, we have a
semi-supported method that we do test in our CI, but requires a bit of
semi-supported method (that we don't test in our CI) and requires a bit of
extra work on your end in order to solve the problems described above.
1. Delete everything in ``pip/_vendor/`` **except** for
@ -131,6 +131,14 @@ extra work on your end in order to solve the problems described above.
3. Modify ``pip/_vendor/__init__.py`` so that the ``DEBUNDLED`` variable is
``True``.
4. *(Optional)* If you've placed the wheels in a location other than
4. Upon installation, the ``INSTALLER`` file in pip's own ``dist-info``
directory should be set to something other than ``pip``, so that pip
can detect that it wasn't installed using itself.
5. *(optional)* If you've placed the wheels in a location other than
``pip/_vendor/``, then modify ``pip/_vendor/__init__.py`` so that the
``WHEEL_DIR`` variable points to the location you've placed them.
6. *(optional)* Update the ``pip_version_check`` logic to use the
appropriate logic for determining the latest available version of pip and
prompt the user with the correct upgrade message.

View File

@ -107,3 +107,4 @@ if DEBUNDLED:
vendored("requests.packages.urllib3.util.ssl_")
vendored("requests.packages.urllib3.util.timeout")
vendored("requests.packages.urllib3.util.url")
vendored("urllib3")

View File

@ -2,9 +2,9 @@
Make it easy to import from cachecontrol without long namespaces.
"""
__author__ = 'Eric Larson'
__email__ = 'eric@ionrock.org'
__version__ = '0.12.4'
__author__ = "Eric Larson"
__email__ = "eric@ionrock.org"
__version__ = "0.12.5"
from .wrapper import CacheControl
from .adapter import CacheControlAdapter

View File

@ -17,14 +17,11 @@ def setup_logging():
def get_session():
adapter = CacheControlAdapter(
DictCache(),
cache_etags=True,
serializer=None,
heuristic=None,
DictCache(), cache_etags=True, serializer=None, heuristic=None
)
sess = requests.Session()
sess.mount('http://', adapter)
sess.mount('https://', adapter)
sess.mount("http://", adapter)
sess.mount("https://", adapter)
sess.cache_controller = adapter.controller
return sess
@ -32,7 +29,7 @@ def get_session():
def get_args():
parser = ArgumentParser()
parser.add_argument('url', help='The URL to try and cache')
parser.add_argument("url", help="The URL to try and cache")
return parser.parse_args()
@ -51,10 +48,10 @@ def main(args=None):
# Now try to get it
if sess.cache_controller.cached_request(resp.request):
print('Cached!')
print("Cached!")
else:
print('Not cached :(')
print("Not cached :(")
if __name__ == '__main__':
if __name__ == "__main__":
main()

View File

@ -10,25 +10,27 @@ from .filewrapper import CallbackFileWrapper
class CacheControlAdapter(HTTPAdapter):
invalidating_methods = set(['PUT', 'DELETE'])
invalidating_methods = {"PUT", "DELETE"}
def __init__(self, cache=None,
cache_etags=True,
controller_class=None,
serializer=None,
heuristic=None,
cacheable_methods=None,
*args, **kw):
def __init__(
self,
cache=None,
cache_etags=True,
controller_class=None,
serializer=None,
heuristic=None,
cacheable_methods=None,
*args,
**kw
):
super(CacheControlAdapter, self).__init__(*args, **kw)
self.cache = cache or DictCache()
self.heuristic = heuristic
self.cacheable_methods = cacheable_methods or ('GET',)
self.cacheable_methods = cacheable_methods or ("GET",)
controller_factory = controller_class or CacheController
self.controller = controller_factory(
self.cache,
cache_etags=cache_etags,
serializer=serializer,
self.cache, cache_etags=cache_etags, serializer=serializer
)
def send(self, request, cacheable_methods=None, **kw):
@ -43,20 +45,18 @@ class CacheControlAdapter(HTTPAdapter):
except zlib.error:
cached_response = None
if cached_response:
return self.build_response(request, cached_response,
from_cache=True)
return self.build_response(request, cached_response, from_cache=True)
# check for etags and add headers if appropriate
request.headers.update(
self.controller.conditional_headers(request)
)
request.headers.update(self.controller.conditional_headers(request))
resp = super(CacheControlAdapter, self).send(request, **kw)
return resp
def build_response(self, request, response, from_cache=False,
cacheable_methods=None):
def build_response(
self, request, response, from_cache=False, cacheable_methods=None
):
"""
Build a response by making a request or using the cache.
@ -101,10 +101,8 @@ class CacheControlAdapter(HTTPAdapter):
response._fp = CallbackFileWrapper(
response._fp,
functools.partial(
self.controller.cache_response,
request,
response,
)
self.controller.cache_response, request, response
),
)
if response.chunked:
super_update_chunk_length = response._update_chunk_length
@ -113,11 +111,12 @@ class CacheControlAdapter(HTTPAdapter):
super_update_chunk_length()
if self.chunk_left == 0:
self._fp._close()
response._update_chunk_length = types.MethodType(_update_chunk_length, response)
resp = super(CacheControlAdapter, self).build_response(
request, response
)
response._update_chunk_length = types.MethodType(
_update_chunk_length, response
)
resp = super(CacheControlAdapter, self).build_response(request, response)
# See if we should invalidate the cache.
if request.method in self.invalidating_methods and resp.ok:

View File

@ -8,13 +8,13 @@ from threading import Lock
class BaseCache(object):
def get(self, key):
raise NotImplemented()
raise NotImplementedError()
def set(self, key, value):
raise NotImplemented()
raise NotImplementedError()
def delete(self, key):
raise NotImplemented()
raise NotImplementedError()
def close(self):
pass

View File

@ -9,7 +9,7 @@ try:
FileNotFoundError
except NameError:
# py2.X
FileNotFoundError = OSError
FileNotFoundError = (IOError, OSError)
def _secure_open_write(filename, fmode):
@ -46,6 +46,7 @@ def _secure_open_write(filename, fmode):
fd = os.open(filename, flags, fmode)
try:
return os.fdopen(fd, "wb")
except:
# An error occurred wrapping our FD in a file object
os.close(fd)
@ -53,8 +54,16 @@ def _secure_open_write(filename, fmode):
class FileCache(BaseCache):
def __init__(self, directory, forever=False, filemode=0o0600,
dirmode=0o0700, use_dir_lock=None, lock_class=None):
def __init__(
self,
directory,
forever=False,
filemode=0o0600,
dirmode=0o0700,
use_dir_lock=None,
lock_class=None,
):
if use_dir_lock is not None and lock_class is not None:
raise ValueError("Cannot use use_dir_lock and lock_class together")
@ -63,12 +72,15 @@ class FileCache(BaseCache):
from pip._vendor.lockfile import LockFile
from pip._vendor.lockfile.mkdirlockfile import MkdirLockFile
except ImportError:
notice = dedent("""
notice = dedent(
"""
NOTE: In order to use the FileCache you must have
lockfile installed. You can install it via pip:
pip install lockfile
""")
"""
)
raise ImportError(notice)
else:
if use_dir_lock:
lock_class = MkdirLockFile
@ -95,11 +107,12 @@ class FileCache(BaseCache):
def get(self, key):
name = self._fn(key)
if not os.path.exists(name):
return None
try:
with open(name, "rb") as fh:
return fh.read()
with open(name, 'rb') as fh:
return fh.read()
except FileNotFoundError:
return None
def set(self, key, value):
name = self._fn(key)

View File

@ -4,16 +4,6 @@ from datetime import datetime
from pip._vendor.cachecontrol.cache import BaseCache
def total_seconds(td):
"""Python 2.6 compatability"""
if hasattr(td, 'total_seconds'):
return int(td.total_seconds())
ms = td.microseconds
secs = (td.seconds + td.days * 24 * 3600)
return int((ms + secs * 10**6) / 10**6)
class RedisCache(BaseCache):
def __init__(self, conn):
@ -27,7 +17,7 @@ class RedisCache(BaseCache):
self.conn.set(key, value)
else:
expires = expires - datetime.utcnow()
self.conn.setex(key, total_seconds(expires), value)
self.conn.setex(key, int(expires.total_seconds()), value)
def delete(self, key):
self.conn.delete(key)

View File

@ -30,8 +30,10 @@ def parse_uri(uri):
class CacheController(object):
"""An interface to see if request should cached or not.
"""
def __init__(self, cache=None, cache_etags=True, serializer=None,
status_codes=None):
def __init__(
self, cache=None, cache_etags=True, serializer=None, status_codes=None
):
self.cache = cache or DictCache()
self.cache_etags = cache_etags
self.serializer = serializer or Serializer()
@ -64,34 +66,35 @@ class CacheController(object):
def parse_cache_control(self, headers):
known_directives = {
# https://tools.ietf.org/html/rfc7234#section-5.2
'max-age': (int, True,),
'max-stale': (int, False,),
'min-fresh': (int, True,),
'no-cache': (None, False,),
'no-store': (None, False,),
'no-transform': (None, False,),
'only-if-cached' : (None, False,),
'must-revalidate': (None, False,),
'public': (None, False,),
'private': (None, False,),
'proxy-revalidate': (None, False,),
's-maxage': (int, True,)
"max-age": (int, True),
"max-stale": (int, False),
"min-fresh": (int, True),
"no-cache": (None, False),
"no-store": (None, False),
"no-transform": (None, False),
"only-if-cached": (None, False),
"must-revalidate": (None, False),
"public": (None, False),
"private": (None, False),
"proxy-revalidate": (None, False),
"s-maxage": (int, True),
}
cc_headers = headers.get('cache-control',
headers.get('Cache-Control', ''))
cc_headers = headers.get("cache-control", headers.get("Cache-Control", ""))
retval = {}
for cc_directive in cc_headers.split(','):
parts = cc_directive.split('=', 1)
for cc_directive in cc_headers.split(","):
if not cc_directive.strip():
continue
parts = cc_directive.split("=", 1)
directive = parts[0].strip()
try:
typ, required = known_directives[directive]
except KeyError:
logger.debug('Ignoring unknown cache-control directive: %s',
directive)
logger.debug("Ignoring unknown cache-control directive: %s", directive)
continue
if not typ or not required:
@ -101,11 +104,16 @@ class CacheController(object):
retval[directive] = typ(parts[1].strip())
except IndexError:
if required:
logger.debug('Missing value for cache-control '
'directive: %s', directive)
logger.debug(
"Missing value for cache-control " "directive: %s",
directive,
)
except ValueError:
logger.debug('Invalid value for cache-control directive '
'%s, must be %s', directive, typ.__name__)
logger.debug(
"Invalid value for cache-control directive " "%s, must be %s",
directive,
typ.__name__,
)
return retval
@ -119,24 +127,24 @@ class CacheController(object):
cc = self.parse_cache_control(request.headers)
# Bail out if the request insists on fresh data
if 'no-cache' in cc:
if "no-cache" in cc:
logger.debug('Request header has "no-cache", cache bypassed')
return False
if 'max-age' in cc and cc['max-age'] == 0:
if "max-age" in cc and cc["max-age"] == 0:
logger.debug('Request header has "max_age" as 0, cache bypassed')
return False
# Request allows serving from the cache, let's see if we find something
cache_data = self.cache.get(cache_url)
if cache_data is None:
logger.debug('No cache entry available')
logger.debug("No cache entry available")
return False
# Check whether it can be deserialized
resp = self.serializer.loads(request, cache_data)
if not resp:
logger.warning('Cache entry deserialization failed, entry ignored')
logger.warning("Cache entry deserialization failed, entry ignored")
return False
# If we have a cached 301, return it immediately. We don't
@ -148,27 +156,27 @@ class CacheController(object):
# Client can try to refresh the value by repeating the request
# with cache busting headers as usual (ie no-cache).
if resp.status == 301:
msg = ('Returning cached "301 Moved Permanently" response '
'(ignoring date and etag information)')
msg = (
'Returning cached "301 Moved Permanently" response '
"(ignoring date and etag information)"
)
logger.debug(msg)
return resp
headers = CaseInsensitiveDict(resp.headers)
if not headers or 'date' not in headers:
if 'etag' not in headers:
if not headers or "date" not in headers:
if "etag" not in headers:
# Without date or etag, the cached response can never be used
# and should be deleted.
logger.debug('Purging cached response: no date or etag')
logger.debug("Purging cached response: no date or etag")
self.cache.delete(cache_url)
logger.debug('Ignoring cached response: no date')
logger.debug("Ignoring cached response: no date")
return False
now = time.time()
date = calendar.timegm(
parsedate_tz(headers['date'])
)
date = calendar.timegm(parsedate_tz(headers["date"]))
current_age = max(0, now - date)
logger.debug('Current age based on date: %i', current_age)
logger.debug("Current age based on date: %i", current_age)
# TODO: There is an assumption that the result will be a
# urllib3 response object. This may not be best since we
@ -180,45 +188,41 @@ class CacheController(object):
freshness_lifetime = 0
# Check the max-age pragma in the cache control header
if 'max-age' in resp_cc:
freshness_lifetime = resp_cc['max-age']
logger.debug('Freshness lifetime from max-age: %i',
freshness_lifetime)
if "max-age" in resp_cc:
freshness_lifetime = resp_cc["max-age"]
logger.debug("Freshness lifetime from max-age: %i", freshness_lifetime)
# If there isn't a max-age, check for an expires header
elif 'expires' in headers:
expires = parsedate_tz(headers['expires'])
elif "expires" in headers:
expires = parsedate_tz(headers["expires"])
if expires is not None:
expire_time = calendar.timegm(expires) - date
freshness_lifetime = max(0, expire_time)
logger.debug("Freshness lifetime from expires: %i",
freshness_lifetime)
logger.debug("Freshness lifetime from expires: %i", freshness_lifetime)
# Determine if we are setting freshness limit in the
# request. Note, this overrides what was in the response.
if 'max-age' in cc:
freshness_lifetime = cc['max-age']
logger.debug('Freshness lifetime from request max-age: %i',
freshness_lifetime)
if "max-age" in cc:
freshness_lifetime = cc["max-age"]
logger.debug(
"Freshness lifetime from request max-age: %i", freshness_lifetime
)
if 'min-fresh' in cc:
min_fresh = cc['min-fresh']
if "min-fresh" in cc:
min_fresh = cc["min-fresh"]
# adjust our current age by our min fresh
current_age += min_fresh
logger.debug('Adjusted current age from min-fresh: %i',
current_age)
logger.debug("Adjusted current age from min-fresh: %i", current_age)
# Return entry if it is fresh enough
if freshness_lifetime > current_age:
logger.debug('The response is "fresh", returning cached response')
logger.debug('%i > %i', freshness_lifetime, current_age)
logger.debug("%i > %i", freshness_lifetime, current_age)
return resp
# we're not fresh. If we don't have an Etag, clear it out
if 'etag' not in headers:
logger.debug(
'The cached response is "stale" with no etag, purging'
)
if "etag" not in headers:
logger.debug('The cached response is "stale" with no etag, purging')
self.cache.delete(cache_url)
# return the original handler
@ -232,16 +236,15 @@ class CacheController(object):
if resp:
headers = CaseInsensitiveDict(resp.headers)
if 'etag' in headers:
new_headers['If-None-Match'] = headers['ETag']
if "etag" in headers:
new_headers["If-None-Match"] = headers["ETag"]
if 'last-modified' in headers:
new_headers['If-Modified-Since'] = headers['Last-Modified']
if "last-modified" in headers:
new_headers["If-Modified-Since"] = headers["Last-Modified"]
return new_headers
def cache_response(self, request, response, body=None,
status_codes=None):
def cache_response(self, request, response, body=None, status_codes=None):
"""
Algorithm for caching requests.
@ -252,9 +255,7 @@ class CacheController(object):
cacheable_status_codes = status_codes or self.cacheable_status_codes
if response.status not in cacheable_status_codes:
logger.debug(
'Status code %s not in %s',
response.status,
cacheable_status_codes
"Status code %s not in %s", response.status, cacheable_status_codes
)
return
@ -264,10 +265,12 @@ class CacheController(object):
# Content-Length is valid then we can check to see if the body we've
# been given matches the expected size, and if it doesn't we'll just
# skip trying to cache it.
if (body is not None and
"content-length" in response_headers and
response_headers["content-length"].isdigit() and
int(response_headers["content-length"]) != len(body)):
if (
body is not None
and "content-length" in response_headers
and response_headers["content-length"].isdigit()
and int(response_headers["content-length"]) != len(body)
):
return
cc_req = self.parse_cache_control(request.headers)
@ -278,53 +281,49 @@ class CacheController(object):
# Delete it from the cache if we happen to have it stored there
no_store = False
if 'no-store' in cc:
if "no-store" in cc:
no_store = True
logger.debug('Response header has "no-store"')
if 'no-store' in cc_req:
if "no-store" in cc_req:
no_store = True
logger.debug('Request header has "no-store"')
if no_store and self.cache.get(cache_url):
logger.debug('Purging existing cache entry to honor "no-store"')
self.cache.delete(cache_url)
if no_store:
return
# If we've been given an etag, then keep the response
if self.cache_etags and 'etag' in response_headers:
logger.debug('Caching due to etag')
if self.cache_etags and "etag" in response_headers:
logger.debug("Caching due to etag")
self.cache.set(
cache_url,
self.serializer.dumps(request, response, body=body),
cache_url, self.serializer.dumps(request, response, body=body)
)
# Add to the cache any 301s. We do this before looking that
# the Date headers.
elif response.status == 301:
logger.debug('Caching permanant redirect')
self.cache.set(
cache_url,
self.serializer.dumps(request, response)
)
logger.debug("Caching permanant redirect")
self.cache.set(cache_url, self.serializer.dumps(request, response))
# Add to the cache if the response headers demand it. If there
# is no date header then we can't do anything about expiring
# the cache.
elif 'date' in response_headers:
elif "date" in response_headers:
# cache when there is a max-age > 0
if 'max-age' in cc and cc['max-age'] > 0:
logger.debug('Caching b/c date exists and max-age > 0')
if "max-age" in cc and cc["max-age"] > 0:
logger.debug("Caching b/c date exists and max-age > 0")
self.cache.set(
cache_url,
self.serializer.dumps(request, response, body=body),
cache_url, self.serializer.dumps(request, response, body=body)
)
# If the request can expire, it means we should cache it
# in the meantime.
elif 'expires' in response_headers:
if response_headers['expires']:
logger.debug('Caching b/c of expires header')
elif "expires" in response_headers:
if response_headers["expires"]:
logger.debug("Caching b/c of expires header")
self.cache.set(
cache_url,
self.serializer.dumps(request, response, body=body),
cache_url, self.serializer.dumps(request, response, body=body)
)
def update_cached_response(self, request, response):
@ -336,10 +335,7 @@ class CacheController(object):
"""
cache_url = self.cache_url(request.url)
cached_response = self.serializer.loads(
request,
self.cache.get(cache_url)
)
cached_response = self.serializer.loads(request, self.cache.get(cache_url))
if not cached_response:
# we didn't have a cached response
@ -352,22 +348,20 @@ class CacheController(object):
# the cached body invalid. But... just in case, we'll be sure
# to strip out ones we know that might be problmatic due to
# typical assumptions.
excluded_headers = [
"content-length",
]
excluded_headers = ["content-length"]
cached_response.headers.update(
dict((k, v) for k, v in response.headers.items()
if k.lower() not in excluded_headers)
dict(
(k, v)
for k, v in response.headers.items()
if k.lower() not in excluded_headers
)
)
# we want a 200 b/c we have content via the cache
cached_response.status = 200
# update our cache
self.cache.set(
cache_url,
self.serializer.dumps(request, cached_response),
)
self.cache.set(cache_url, self.serializer.dumps(request, cached_response))
return cached_response

View File

@ -27,17 +27,19 @@ class CallbackFileWrapper(object):
# self.__fp hasn't been set.
#
# [0] https://docs.python.org/2/reference/expressions.html#atom-identifiers
fp = self.__getattribute__('_CallbackFileWrapper__fp')
fp = self.__getattribute__("_CallbackFileWrapper__fp")
return getattr(fp, name)
def __is_fp_closed(self):
try:
return self.__fp.fp is None
except AttributeError:
pass
try:
return self.__fp.closed
except AttributeError:
pass
@ -66,7 +68,7 @@ class CallbackFileWrapper(object):
def _safe_read(self, amt):
data = self.__fp._safe_read(amt)
if amt == 2 and data == b'\r\n':
if amt == 2 and data == b"\r\n":
# urllib executes this read to toss the CRLF at the end
# of the chunk.
return data

Some files were not shown because too many files have changed in this diff Show More