mirror of https://github.com/pypa/pip
Merge branch 'master' into refactor/introduce-models
This commit is contained in:
commit
8df8e53901
|
@ -6,5 +6,5 @@ require either a news file fragment or a marker to indicate they don't require
|
|||
one.
|
||||
|
||||
To read more about adding a news file fragment for your PR, please check out
|
||||
our documentation at: https://pip.pypa.io/en/latest/development/#adding-a-news-entry
|
||||
our documentation at: https://pip.pypa.io/en/latest/development/contributing/#adding-a-news-entry
|
||||
-->
|
||||
|
|
25
AUTHORS.txt
25
AUTHORS.txt
|
@ -1,16 +1,20 @@
|
|||
Adam Chainz <adam@adamj.eu>
|
||||
Adam Wentz <awentz@theonion.com>
|
||||
Adrien Morison <adrien.morison@gmail.com>
|
||||
Alan Yee <alyee@ucsd.edu>
|
||||
Aleks Bunin <github@compuix.com>
|
||||
Alex Gaynor <alex.gaynor@gmail.com>
|
||||
Alex Grönholm <alex.gronholm@nextday.fi>
|
||||
Alex Morega <alex@grep.ro>
|
||||
Alex Stachowiak <alexander@computer.org>
|
||||
Alexander Shtyrov <rawzausho@gmail.com>
|
||||
Alexandre Conrad <alexandre.conrad@gmail.com>
|
||||
Alli <alzeih@users.noreply.github.com>
|
||||
Anatoly Techtonik <techtonik@gmail.com>
|
||||
Andrei Geacar <andrei.geacar@gmail.com>
|
||||
Andrey Bulgakov <mail@andreiko.ru>
|
||||
Andrés Delfino <34587441+andresdelfino@users.noreply.github.com>
|
||||
Andrés Delfino <adelfino@gmail.com>
|
||||
Andy Freeland <andy.freeland@redjack.com>
|
||||
Andy Kluger <AndydeCleyre@users.noreply.github.com>
|
||||
Anish Tambe <anish.tambe@yahoo.in>
|
||||
|
@ -33,6 +37,7 @@ Atsushi Odagiri <aodagx@gmail.com>
|
|||
Avner Cohen <israbirding@gmail.com>
|
||||
Baptiste Mispelon <bmispelon@gmail.com>
|
||||
Bartek Ogryczak <b.ogryczak@gmail.com>
|
||||
Bastian Venthur <mail@venthur.de>
|
||||
Ben Darnell <ben@bendarnell.com>
|
||||
Ben Hoyt <benhoyt@gmail.com>
|
||||
Ben Rosser <rosser.bjr@gmail.com>
|
||||
|
@ -45,6 +50,7 @@ Bogdan Opanchuk <bogdan@opanchuk.net>
|
|||
Brad Erickson <eosrei@gmail.com>
|
||||
Bradley Ayers <bradley.ayers@gmail.com>
|
||||
Brandon L. Reiss <brandon@damyata.co>
|
||||
Brett Randall <javabrett@gmail.com>
|
||||
Brian Rosner <brosner@gmail.com>
|
||||
BrownTruck <BrownTruck@users.noreply.github.com>
|
||||
Bruno Oliveira <nicoddemus@gmail.com>
|
||||
|
@ -81,6 +87,7 @@ Craig Kerstiens <craig.kerstiens@gmail.com>
|
|||
Cristian Sorinel <cristian.sorinel@gmail.com>
|
||||
Curtis Doty <Curtis@GreenKey.net>
|
||||
Damian Quiroga <qdamian@gmail.com>
|
||||
Dan Black <dyspop@gmail.com>
|
||||
Dan Savilonis <djs@n-cube.org>
|
||||
Dan Sully <daniel-github@electricrain.com>
|
||||
daniel <mcdonaldd@unimelb.edu.au>
|
||||
|
@ -88,7 +95,9 @@ Daniel Collins <accounts@dac.io>
|
|||
Daniel Hahler <git@thequod.de>
|
||||
Daniel Holth <dholth@fastmail.fm>
|
||||
Daniel Jost <torpedojost@gmail.com>
|
||||
Daniel Shaulov <daniel.shaulov@gmail.com>
|
||||
Daniele Procida <daniele@vurt.org>
|
||||
Danny Hermes <daniel.j.hermes@gmail.com>
|
||||
Dav Clark <davclark@gmail.com>
|
||||
Dave Abrahams <dave@boostpro.com>
|
||||
David Aguilar <davvid@gmail.com>
|
||||
|
@ -106,10 +115,12 @@ Domen Kožar <domen@dev.si>
|
|||
Donald Stufft <donald@stufft.io>
|
||||
Dongweiming <dongweiming@admaster.com.cn>
|
||||
Douglas Thor <dougthor42@users.noreply.github.com>
|
||||
DrFeathers <WilliamGeorgeBurgess@gmail.com>
|
||||
Dustin Ingram <di@di.codes>
|
||||
Dwayne Bailey <dwayne@translate.org.za>
|
||||
Ed Morley <501702+edmorley@users.noreply.github.com>
|
||||
Ed Morley <emorley@mozilla.com>
|
||||
Eli Schwartz <eschwartz93@gmail.com>
|
||||
Emil Styrke <emil.styrke@gmail.com>
|
||||
Endoh Takanao <djmchl@gmail.com>
|
||||
enoch <lanxenet@gmail.com>
|
||||
|
@ -118,6 +129,8 @@ Eric Hanchrow <eric.hanchrow@gmail.com>
|
|||
Eric Hopper <hopper@omnifarious.org>
|
||||
Erik M. Bray <embray@stsci.edu>
|
||||
Erik Rose <erik@mozilla.com>
|
||||
Ernest W Durbin III <ewdurbin@gmail.com>
|
||||
Ernest W. Durbin III <ewdurbin@gmail.com>
|
||||
Erwin Janssen <erwinjanssen@outlook.com>
|
||||
Eugene Vereshchagin <evvers@gmail.com>
|
||||
fiber-space <fiber-space@users.noreply.github.com>
|
||||
|
@ -135,6 +148,7 @@ George Song <george@55minutes.com>
|
|||
Georgi Valkov <georgi.t.valkov@gmail.com>
|
||||
Giftlin Rajaiah <giftlin.rgn@gmail.com>
|
||||
gizmoguy1 <gizmoguy1@gmail.com>
|
||||
gkdoc <40815324+gkdoc@users.noreply.github.com>
|
||||
GOTO Hayato <3532528+gh640@users.noreply.github.com>
|
||||
Guilherme Espada <porcariadagata@gmail.com>
|
||||
Guy Rozendorn <guy@rzn.co.il>
|
||||
|
@ -171,6 +185,7 @@ Jay Graves <jay@skabber.com>
|
|||
Jeff Barber <jbarber@computer.org>
|
||||
Jeff Dairiki <dairiki@dairiki.org>
|
||||
Jeremy Stanley <fungi@yuggoth.org>
|
||||
Jeremy Zafran <jzafran@users.noreply.github.com>
|
||||
Jim Garrison <jim@garrison.cc>
|
||||
Jivan Amara <Development@JivanAmara.net>
|
||||
John-Scott Atlakson <john.scott.atlakson@gmail.com>
|
||||
|
@ -190,6 +205,7 @@ jwg4 <jack.grahl@yahoo.co.uk>
|
|||
Jyrki Pulliainen <jyrki@spotify.com>
|
||||
Kamal Bin Mustafa <kamal@smach.net>
|
||||
kaustav haldar <hi@kaustav.me>
|
||||
keanemind <keanemind@gmail.com>
|
||||
Kelsey Hightower <kelsey.hightower@gmail.com>
|
||||
Kenneth Belitzky <kenny@belitzky.com>
|
||||
Kenneth Reitz <me@kennethreitz.com>
|
||||
|
@ -197,6 +213,7 @@ Kenneth Reitz <me@kennethreitz.org>
|
|||
Kevin Burke <kev@inburke.com>
|
||||
Kevin Carter <kevin.carter@rackspace.com>
|
||||
Kevin Frommelt <kevin.frommelt@webfilings.com>
|
||||
Kexuan Sun <me@kianasun.com>
|
||||
Kit Randel <kit@nocturne.net.nz>
|
||||
kpinc <kop@meme.com>
|
||||
Kumar McMillan <kumar.mcmillan@gmail.com>
|
||||
|
@ -246,16 +263,19 @@ Miguel Araujo Perez <miguel.araujo.perez@gmail.com>
|
|||
Mihir Singh <git.service@mihirsingh.com>
|
||||
Min RK <benjaminrk@gmail.com>
|
||||
MinRK <benjaminrk@gmail.com>
|
||||
Miro Hrončok <miro@hroncok.cz>
|
||||
montefra <franz.bergesund@gmail.com>
|
||||
Monty Taylor <mordred@inaugust.com>
|
||||
Nate Coraor <nate@bx.psu.edu>
|
||||
Nathaniel J. Smith <njs@pobox.com>
|
||||
Nehal J Wani <nehaljw.kkd1@gmail.com>
|
||||
Nick Coghlan <ncoghlan@gmail.com>
|
||||
Nick Stenning <nick@whiteink.com>
|
||||
Nikhil Benesch <nikhil.benesch@gmail.com>
|
||||
Nowell Strite <nowell@strite.org>
|
||||
nvdv <modestdev@gmail.com>
|
||||
Ofekmeister <ofekmeister@gmail.com>
|
||||
Oliver Jeeves <oliver.jeeves@ocado.com>
|
||||
Oliver Tonnhofer <olt@bogosoft.com>
|
||||
Olivier Girardot <ssaboum@gmail.com>
|
||||
Olivier Grisel <olivier.grisel@ensta.org>
|
||||
|
@ -281,6 +301,7 @@ Phaneendra Chiruvella <hi@pcx.io>
|
|||
Phil Freo <phil@philfreo.com>
|
||||
Phil Pennock <phil@pennock-tech.com>
|
||||
Phil Whelan <phil123@gmail.com>
|
||||
Philip Molloy <pamolloy@users.noreply.github.com>
|
||||
Philippe Ombredanne <pombredanne@gmail.com>
|
||||
Pi Delport <pjdelport@gmail.com>
|
||||
Pierre-Yves Rofes <github@rofes.fr>
|
||||
|
@ -323,6 +344,7 @@ Sebastian Schaetz <sschaetz@butterflynetinc.com>
|
|||
Segev Finer <segev208@gmail.com>
|
||||
Sergey Vasilyev <nolar@nolar.info>
|
||||
Seth Woodworth <seth@sethish.com>
|
||||
Shlomi Fish <shlomif@shlomifish.org>
|
||||
Simeon Visser <svisser@users.noreply.github.com>
|
||||
Simon Cross <hodgestar@gmail.com>
|
||||
Simon Pichugin <simon.pichugin@gmail.com>
|
||||
|
@ -340,6 +362,7 @@ Stéphane Bidoul (ACSONE) <stephane.bidoul@acsone.eu>
|
|||
Stéphane Bidoul <stephane.bidoul@acsone.eu>
|
||||
Stéphane Klein <contact@stephane-klein.info>
|
||||
Takayuki SHIMIZUKAWA <shimizukawa@gmail.com>
|
||||
Thijs Triemstra <info@collab.nl>
|
||||
Thomas Fenzl <thomas.fenzl@gmail.com>
|
||||
Thomas Grainger <tagrain@gmail.com>
|
||||
Thomas Guettler <tguettler@tbz-pariv.de>
|
||||
|
@ -351,6 +374,7 @@ Tim Harder <radhermit@gmail.com>
|
|||
Tim Heap <tim@timheap.me>
|
||||
tim smith <github@tim-smith.us>
|
||||
tinruufu <tinruufu@gmail.com>
|
||||
Tom Freudenheim <tom.freudenheim@onepeloton.com>
|
||||
Tom V <tom@viner.tv>
|
||||
Tomer Chachamu <tomer.chachamu@gmail.com>
|
||||
Tony Zhaocheng Tan <tony@tonytan.io>
|
||||
|
@ -380,4 +404,5 @@ Zearin <zearin@gonk.net>
|
|||
Zearin <Zearin@users.noreply.github.com>
|
||||
Zhiping Deng <kofreestyler@gmail.com>
|
||||
Zvezdan Petkovic <zpetkovic@acm.org>
|
||||
Łukasz Langa <lukasz@langa.pl>
|
||||
Семён Марьясин <simeon@maryasin.name>
|
||||
|
|
77
NEWS.rst
77
NEWS.rst
|
@ -7,6 +7,83 @@
|
|||
|
||||
.. towncrier release notes start
|
||||
|
||||
18.0 (2018-07-22)
|
||||
=================
|
||||
|
||||
Process
|
||||
-------
|
||||
|
||||
- Switch to a Calendar based versioning scheme.
|
||||
- Formally document our deprecation process as a minimum of 6 months of deprecation
|
||||
warnings.
|
||||
- Adopt and document NEWS fragment writing style.
|
||||
- Switch to releasing a new, non bug fix version of pip every 3 months.
|
||||
|
||||
Deprecations and Removals
|
||||
-------------------------
|
||||
|
||||
- Remove the legacy format from pip list. (#3651, #3654)
|
||||
- Dropped support for Python 3.3. (#3796)
|
||||
- Remove support for cleaning up #egg fragment postfixes. (#4174)
|
||||
- Remove the shim for the old get-pip.py location. (#5520)
|
||||
|
||||
For the past 2 years, it's only been redirecting users to use the newer
|
||||
https://bootstrap.pypa.io/get-pip.py location.
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
- Introduce a new --prefer-binary flag, to prefer older wheels over newer source packages. (#3785)
|
||||
- Improve autocompletion function on file name completion after options
|
||||
which have ``<file>``, ``<dir>`` or ``<path>`` as metavar. (#4842, #5125)
|
||||
- Add support for installing PEP 518 build dependencies from source. (#5229)
|
||||
- Improve status message when upgrade is skipped due to only-if-needed strategy. (#5319)
|
||||
|
||||
Bug Fixes
|
||||
---------
|
||||
|
||||
- Update pip's self-check logic to not use a virtualenv specific file and honor cache-dir. (#3905)
|
||||
- Remove compiled pyo files for wheel packages. (#4471)
|
||||
- Speed up printing of newly installed package versions. (#5127)
|
||||
- Restrict install time dependency warnings to directly-dependant packages. (#5196, #5457)
|
||||
|
||||
Warning about the entire package set has resulted in users getting confused as
|
||||
to why pip is printing these warnings.
|
||||
- Improve handling of PEP 518 build requirements: support environment markers and extras. (#5230, #5265)
|
||||
- Remove username/password from log message when using index with basic auth. (#5249)
|
||||
- Remove trailing os.sep from PATH directories to avoid false negatives. (#5293)
|
||||
- Fix "pip wheel pip" being blocked by the "don't use pip to modify itself" check. (#5311, #5312)
|
||||
- Disable pip's version check (and upgrade message) when installed by a different package manager. (#5346)
|
||||
|
||||
This works better with Linux distributions where pip's upgrade message may
|
||||
result in users running pip in a manner that modifies files that should be
|
||||
managed by the OS's package manager.
|
||||
- Check for file existence and unlink first when clobbering existing files during a wheel install. (#5366)
|
||||
- Improve error message to be more specific when no files are found as listed in as listed in PKG-INFO. (#5381)
|
||||
- Always read ``pyproject.toml`` as UTF-8. This fixes Unicode handling on Windows and Python 2. (#5482)
|
||||
- Fix a crash that occurs when PATH not set, while generating script location warning. (#5558)
|
||||
- Disallow packages with ``pyproject.toml`` files that have an empty build-system table. (#5627)
|
||||
|
||||
Vendored Libraries
|
||||
------------------
|
||||
|
||||
- Update CacheControl to 0.12.5.
|
||||
- Update certifi to 2018.4.16.
|
||||
- Update distro to 1.3.0.
|
||||
- Update idna to 2.7.
|
||||
- Update ipaddress to 1.0.22.
|
||||
- Update pkg_resources to 39.2.0 (via setuptools).
|
||||
- Update progress to 1.4.
|
||||
- Update pytoml to 0.1.16.
|
||||
- Update requests to 2.19.1.
|
||||
- Update urllib3 to 1.23.
|
||||
|
||||
Improved Documentation
|
||||
----------------------
|
||||
|
||||
- Document how to use pip with a proxy server. (#512, #5574)
|
||||
- Document that the output of pip show is in RFC-compliant mail header format. (#5261)
|
||||
|
||||
|
||||
10.0.1 (2018-04-19)
|
||||
===================
|
||||
|
|
41
appveyor.yml
41
appveyor.yml
|
@ -28,10 +28,37 @@ cache:
|
|||
- '%LOCALAPPDATA%\pip\Cache'
|
||||
|
||||
test_script:
|
||||
# Shorten paths, workaround https://bugs.python.org/issue18199
|
||||
- "subst T: %TEMP%"
|
||||
- "set TEMP=T:\\"
|
||||
- "set TMP=T:\\"
|
||||
- "tox -e py -- -m unit -n 3"
|
||||
- "if \"%RUN_INTEGRATION_TESTS%\" == \"True\" (
|
||||
tox -e py -- -m integration -n 3 --duration=5 )"
|
||||
- ps: |
|
||||
function should_run_tests {
|
||||
if ("$env:APPVEYOR_PULL_REQUEST_NUMBER" -eq "") {
|
||||
Write-Host "Not a pull request - running tests"
|
||||
return $true
|
||||
}
|
||||
Write-Host "Pull request $env:APPVEYOR_PULL_REQUEST_NUMBER based on branch $env:APPVEYOR_REPO_BRANCH"
|
||||
git fetch -q origin +refs/heads/$env:APPVEYOR_REPO_BRANCH
|
||||
$changes = (git diff --name-only HEAD (git merge-base HEAD FETCH_HEAD))
|
||||
Write-Host "Files changed:"
|
||||
Write-Host $changes
|
||||
$important = $changes | Where-Object { $_ -NotLike "*.rst" } |
|
||||
Where-Object { $_ -NotLike "docs*" } |
|
||||
Where-Object { $_ -NotLike "news*" } |
|
||||
Where-Object { $_ -NotLike ".github*" }
|
||||
if (!$important) {
|
||||
Write-Host "Only documentation changes - skipping tests"
|
||||
return $false
|
||||
}
|
||||
|
||||
Write-Host "Pull request $env:APPVEYOR_PULL_REQUEST_NUMBER alters code - running tests"
|
||||
return $true
|
||||
}
|
||||
|
||||
if (should_run_tests) {
|
||||
# Shorten paths, workaround https://bugs.python.org/issue18199
|
||||
subst T: $env:TEMP
|
||||
$env:TEMP = "T:\"
|
||||
$env:TMP = "T:\"
|
||||
tox -e py -- -m unit -n 3
|
||||
if ($env:RUN_INTEGRATION_TESTS -eq "True") {
|
||||
tox -e py -- -m integration -n 3 --duration=5
|
||||
}
|
||||
}
|
||||
|
|
|
@ -11,10 +11,10 @@
|
|||
# All configuration values have a default; values that are commented out
|
||||
# serve to show the default.
|
||||
|
||||
import glob
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import glob
|
||||
|
||||
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
|
||||
|
||||
|
@ -127,6 +127,7 @@ pygments_style = 'sphinx'
|
|||
extlinks = {
|
||||
'issue': ('https://github.com/pypa/pip/issues/%s', '#'),
|
||||
'pull': ('https://github.com/pypa/pip/pull/%s', 'PR #'),
|
||||
'pypi': ('https://pypi.org/project/%s', ''),
|
||||
}
|
||||
|
||||
# -- Options for HTML output --------------------------------------------------
|
||||
|
|
|
@ -1,265 +0,0 @@
|
|||
===========
|
||||
Development
|
||||
===========
|
||||
|
||||
Pull Requests
|
||||
=============
|
||||
|
||||
- Submit Pull Requests against the `master` branch.
|
||||
- Provide a good description of what you're doing and why.
|
||||
- Provide tests that cover your changes and try to run the tests locally first.
|
||||
|
||||
**Example**. Assuming you set up GitHub account, forked pip repository from
|
||||
https://github.com/pypa/pip to your own page via web interface, and your
|
||||
fork is located at https://github.com/yourname/pip
|
||||
|
||||
::
|
||||
|
||||
$ git clone git@github.com:pypa/pip.git
|
||||
$ cd pip
|
||||
# ...
|
||||
$ git diff
|
||||
$ git add <modified> ...
|
||||
$ git status
|
||||
$ git commit
|
||||
|
||||
You may reference relevant issues in commit messages (like #1259) to
|
||||
make GitHub link issues and commits together, and with phrase like
|
||||
"fixes #1259" you can even close relevant issues automatically. Now
|
||||
push the changes to your fork::
|
||||
|
||||
$ git push git@github.com:yourname/pip.git
|
||||
|
||||
Open Pull Requests page at https://github.com/yourname/pip/pulls and
|
||||
click "New pull request" and select your fork. That's it.
|
||||
|
||||
Pull requests should be self-contained, and limited in scope. Before being
|
||||
merged, a pull request must be reviewed, and keeping individual PRs limited
|
||||
in scope makes this far easier. In particular, pull requests must not be
|
||||
treated as "feature branches", with ongoing development work happening
|
||||
within the PR. Instead, the feature should be broken up into smaller,
|
||||
independent parts which can be reviewed and merged individually.
|
||||
|
||||
When creating a pull request, avoid including "cosmetic" changes to
|
||||
code that is unrelated to your change, as these make reviewing the PR
|
||||
more difficult. Examples include re-flowing text in comments or
|
||||
documentation, or addition or removal of blank lines or whitespace
|
||||
within lines. Such changes can be made separately, as a "formatting
|
||||
cleanup" PR, if needed.
|
||||
|
||||
|
||||
Automated Testing
|
||||
=================
|
||||
|
||||
All pull requests and merges to 'master' branch are tested using `Travis CI`_
|
||||
and `Appveyor CI`_ based on our `.travis.yml`_ and `appveyor.yml`_ files.
|
||||
|
||||
You can find the status and results to the CI runs for your PR on GitHub's Web
|
||||
UI for the pull request. You can also find links to the CI services' pages for
|
||||
the specific builds in the form of "Details" links, in case the CI run fails
|
||||
and you wish to view the output.
|
||||
|
||||
To trigger CI to run again for a pull request, you can close and open the pull
|
||||
request or submit another change to the pull request. If needed, project
|
||||
maintainers can manually trigger a restart of a job/build.
|
||||
|
||||
Running tests
|
||||
=============
|
||||
|
||||
OS Requirements: subversion, bazaar, git, and mercurial.
|
||||
|
||||
Python Requirements: tox or install all packages listed in
|
||||
`tools/test-requirements.txt`_
|
||||
|
||||
Ways to run the tests locally::
|
||||
|
||||
$ tox -e py36 # The preferred way to run the tests, can use pyNN to
|
||||
# run for a particular version or leave off the -e to
|
||||
# run for all versions.
|
||||
$ python setup.py test # Using the setuptools test plugin
|
||||
$ py.test # Using py.test directly
|
||||
$ tox # Using tox against pip's tox.ini
|
||||
|
||||
If you are missing one of the VCS tools, you can tell ``py.test`` to skip it::
|
||||
|
||||
# When using tox
|
||||
$ tox -e py36 -- -k 'not svn'
|
||||
$ tox -e py36 -- -k 'not (svn or git)'
|
||||
# Directly with py.test
|
||||
$ py.test -k 'not svn'
|
||||
$ py.test -k 'not (svn or git)'
|
||||
|
||||
|
||||
Getting Involved
|
||||
================
|
||||
|
||||
The pip project welcomes help in the following ways:
|
||||
|
||||
- Making Pull Requests for code, tests, or docs.
|
||||
- Commenting on open issues and pull requests.
|
||||
- Helping to answer questions on the `mailing list`_.
|
||||
|
||||
If you want to become an official maintainer, start by helping out.
|
||||
|
||||
Later, when you think you're ready, get in touch with one of the maintainers,
|
||||
and they will initiate a vote.
|
||||
|
||||
|
||||
Adding a NEWS Entry
|
||||
===================
|
||||
|
||||
The ``NEWS.rst`` file is managed using `towncrier`_ and all non trivial changes
|
||||
must be accompanied by a news entry.
|
||||
|
||||
To add an entry to the news file, first you need to have created an issue
|
||||
describing the change you want to make. A Pull Request itself *may* function as
|
||||
such, but it is preferred to have a dedicated issue (for example, in case the
|
||||
PR ends up rejected due to code quality reasons).
|
||||
|
||||
Once you have an issue or pull request, you take the number and you create a
|
||||
file inside of the ``news/`` directory named after that issue number with an
|
||||
extension of ``removal``, ``feature``, ``bugfix``, or ``doc``. Thus if your
|
||||
issue or PR number is ``1234`` and this change is fixing a bug, then you would
|
||||
create a file ``news/1234.bugfix``. PRs can span multiple categories by creating
|
||||
multiple files (for instance, if you added a feature and deprecated/removed the
|
||||
old feature at the same time, you would create ``news/NNNN.feature`` and
|
||||
``news/NNNN.removal``). Likewise if a PR touches multiple issues/PRs you may
|
||||
create a file for each of them with the exact same contents and Towncrier will
|
||||
deduplicate them.
|
||||
|
||||
The contents of this file are reStructuredText formatted text that will be used
|
||||
as the content of the news file entry. You do not need to reference the issue
|
||||
or PR numbers here as towncrier will automatically add a reference to all of
|
||||
the affected issues when rendering the news file.
|
||||
|
||||
In order to maintain a consistent style in the ``NEWS.rst`` file, it is
|
||||
preferred to keep the news entry to the point, in sentence case, shorter than
|
||||
80 characters and in an imperative tone -- an entry should complete the sentence
|
||||
"This change will ...". In rare cases, where one line is not enough, use a
|
||||
summary line in an imperative tone followed by a blank line separating it
|
||||
from a description of the feature/change in one or more paragraphs, each wrapped
|
||||
at 80 characters. Remember that a news entry is meant for end users and should
|
||||
only contain details relevant to an end user.
|
||||
|
||||
A trivial change is anything that does not warrant an entry in the news file.
|
||||
Some examples are: Code refactors that don't change anything as far as the
|
||||
public is concerned, typo fixes, white space modification, etc. To mark a PR
|
||||
as trivial a contributor simply needs to add a randomly named, empty file to
|
||||
the ``news/`` directory with the extension of ``.trivial``. If you are on a
|
||||
POSIX like operating system, one can be added by running
|
||||
``touch news/$(uuidgen).trivial``. On Windows, the same result can be achieved
|
||||
in Powershell using ``New-Item "news/$([guid]::NewGuid()).trivial"``. Core
|
||||
committers may also add a "trivial" label to the PR which will accomplish the
|
||||
same thing.
|
||||
|
||||
Upgrading, removing, or adding a new vendored library gets a special mention
|
||||
using a ``news/<library>.vendor`` file. This is in addition to any features,
|
||||
bugfixes, or other kinds of news that pulling in this library may have. This
|
||||
uses the library name as the key so that updating the same library twice doesn't
|
||||
produce two news file entries.
|
||||
|
||||
Changes to the processes, policies, or other non code related changed that are
|
||||
otherwise notable can be done using a ``news/<name>.process`` file. This is not
|
||||
typically used, but can be used for things like changing version schemes,
|
||||
updating deprecation policy, etc.
|
||||
|
||||
|
||||
Release Cadence
|
||||
===============
|
||||
|
||||
The pip project has a release cadence of releasing whatever is on ``master``
|
||||
every 3 months. This gives users a predictable pattern for when releases
|
||||
are going to happen and prevents locking up improvements for fixes for long
|
||||
periods of time, while still preventing massively fracturing the user base
|
||||
with version numbers.
|
||||
|
||||
Our release months are January, April, July, October. The release date within
|
||||
that month will be up to the release manager for that release. If there are
|
||||
no changes, then that release month is skipped and the next release will be
|
||||
3 month later.
|
||||
|
||||
The release manager may, at their discretion, choose whether or not there
|
||||
will be a pre-release period for a release, and if there is may extend that
|
||||
period into the next month if needed.
|
||||
|
||||
Because releases are made direct from the ``master`` branch, it is essential
|
||||
that ``master`` is always in a releasable state. It is acceptable to merge
|
||||
PRs that partially implement a new feature, but only if the partially
|
||||
implemented version is usable in that state (for example, with reduced
|
||||
functionality or disabled by default). In the case where a merged PR is found
|
||||
to need extra work before being released, the release manager always has the
|
||||
option to back out the partial change prior to a release. The PR can then be
|
||||
reworked and resubmitted for the next release.
|
||||
|
||||
|
||||
Deprecation Policy
|
||||
==================
|
||||
|
||||
Any change to pip that removes or significantly alters user-visible behaviour
|
||||
that is described in the pip documentation will be deprecated for a minimum of
|
||||
one released version before the change occurs. Deprecation will take the form of
|
||||
a warning being issued by pip when the feature is used. Longer deprecation
|
||||
periods, or deprecation warnings for behaviour changes that would not normally
|
||||
be covered by this policy, are also possible depending on circumstances, but
|
||||
this is at the discretion of the pip developers.
|
||||
|
||||
Note that the documentation is the sole reference for what counts as agreed
|
||||
behaviour. If something isn't explicitly mentioned in the documentation, it can
|
||||
be changed without warning, or any deprecation period, in a pip release.
|
||||
However, we are aware that the documentation isn't always complete - PRs that
|
||||
document existing behaviour with the intention of covering that behaviour with
|
||||
the above deprecation process are always acceptable, and will be considered on
|
||||
their merits.
|
||||
|
||||
|
||||
Release Process
|
||||
===============
|
||||
|
||||
#. On the current pip ``master`` branch, generate a new ``AUTHORS.txt`` by
|
||||
running ``invoke generate.authors`` and commit the results.
|
||||
#. On the current pip ``master`` branch, make a new commit which bumps the
|
||||
version in ``pip/__init__.py`` to the release version and adjust the
|
||||
``CHANGES.txt`` file to reflect the current date. The release version should
|
||||
follow a YY.N scheme, where YY is the two digit year, and N is the Nth
|
||||
release within that year.
|
||||
#. On the current pip ``master`` branch, generate a new ``NEWS.rst`` by running
|
||||
``invoke generate.news`` and commit the results.
|
||||
#. Create a signed tag of the ``master`` branch of the form ``X.Y.Z`` using the
|
||||
command ``git tag -s X.Y.Z``.
|
||||
#. Checkout the tag using ``git checkout X.Y.Z`` and create the distribution
|
||||
files using ``python setup.py sdist bdist_wheel``.
|
||||
#. Upload the distribution files to PyPI using twine
|
||||
(``twine upload -s dist/*``). The upload should include GPG signatures of
|
||||
the distribution files.
|
||||
#. Push all of the changes.
|
||||
#. Regenerate the ``get-pip.py`` script by running
|
||||
``invoke generate.installer`` in the get-pip repository, and committing the
|
||||
results.
|
||||
|
||||
|
||||
Creating a Bugfix Release
|
||||
=========================
|
||||
|
||||
Sometimes we need to release a bugfix release of the form ``X.Y.Z+1``. In order
|
||||
to create one of these the changes should already be merged into the
|
||||
``master`` branch.
|
||||
|
||||
#. Create a new ``release/YY.N.Z+1`` branch off of the ``YY.N`` tag using the
|
||||
command ``git checkout -b release/YY.N.Z+1 YY.N``.
|
||||
#. Cherry pick the fixed commits off of the ``master`` branch, fixing any
|
||||
conflicts and moving any changelog entries from the development version's
|
||||
changelog section to the ``YY.N.Z+1`` section.
|
||||
#. Push the ``release/YY.N.Z+1`` branch to github and submit a PR for it against
|
||||
the ``master`` branch and wait for the tests to run.
|
||||
#. Once tests run, merge the ``release/YY.N.Z+1`` branch into master, and follow
|
||||
the above release process starting with step 4.
|
||||
|
||||
|
||||
.. _`mailing list`: https://mail.python.org/mailman/listinfo/distutils-sig
|
||||
.. _`towncrier`: https://pypi.org/project/towncrier/
|
||||
.. _`Travis CI`: https://travis-ci.org/
|
||||
.. _`Appveyor CI`: https://www.appveyor.com/
|
||||
.. _`.travis.yml`: https://github.com/pypa/pip/blob/master/.travis.yml
|
||||
.. _`appveyor.yml`: https://github.com/pypa/pip/blob/master/appveyor.yml
|
||||
.. _`Travis CI Pull Requests`: https://travis-ci.org/pypa/pip/pull_requests
|
||||
.. _`tools/test-requirements.txt`: https://github.com/pypa/pip/blob/master/tools/test-requirements.txt
|
|
@ -0,0 +1,238 @@
|
|||
============
|
||||
Contributing
|
||||
============
|
||||
|
||||
.. todo
|
||||
Create a "guide" to pip's internals and link to it from here saying
|
||||
"you might want to take a look at the guide"
|
||||
|
||||
|
||||
Submitting Pull Requests
|
||||
========================
|
||||
|
||||
Submit pull requests against the ``master`` branch, providing a good
|
||||
description of what you're doing and why. You must have legal permission to
|
||||
distribute any code you contribute to pip and it must be available under the
|
||||
MIT License.
|
||||
|
||||
Provide tests that cover your changes and run the tests locally first. pip
|
||||
:ref:`supports <compatibility-requirements>` multiple Python versions and
|
||||
operating systems. Any pull request must consider and work on all these
|
||||
platforms.
|
||||
|
||||
Pull Requests should be small to facilitate easier review. Keep them
|
||||
self-contained, and limited in scope. `Studies have shown`_ that review quality
|
||||
falls off as patch size grows. Sometimes this will result in many small PRs to
|
||||
land a single large feature. In particular, pull requests must not be treated
|
||||
as "feature branches", with ongoing development work happening within the PR.
|
||||
Instead, the feature should be broken up into smaller, independent parts which
|
||||
can be reviewed and merged individually.
|
||||
|
||||
Additionally, avoid including "cosmetic" changes to code that
|
||||
is unrelated to your change, as these make reviewing the PR more difficult.
|
||||
Examples include re-flowing text in comments or documentation, or addition or
|
||||
removal of blank lines or whitespace within lines. Such changes can be made
|
||||
separately, as a "formatting cleanup" PR, if needed.
|
||||
|
||||
Automated Testing
|
||||
=================
|
||||
|
||||
All pull requests and merges to 'master' branch are tested using `Travis CI`_
|
||||
and `Appveyor CI`_ based on our `.travis.yml`_ and `appveyor.yml`_ files.
|
||||
|
||||
You can find the status and results to the CI runs for your PR on GitHub's Web
|
||||
UI for the pull request. You can also find links to the CI services' pages for
|
||||
the specific builds in the form of "Details" links, in case the CI run fails
|
||||
and you wish to view the output.
|
||||
|
||||
To trigger CI to run again for a pull request, you can close and open the pull
|
||||
request or submit another change to the pull request. If needed, project
|
||||
maintainers can manually trigger a restart of a job/build.
|
||||
|
||||
|
||||
NEWS Entries
|
||||
============
|
||||
|
||||
The ``NEWS.rst`` file is managed using `towncrier`_ and all non trivial changes
|
||||
must be accompanied by a news entry.
|
||||
|
||||
To add an entry to the news file, first you need to have created an issue
|
||||
describing the change you want to make. A Pull Request itself *may* function as
|
||||
such, but it is preferred to have a dedicated issue (for example, in case the
|
||||
PR ends up rejected due to code quality reasons).
|
||||
|
||||
Once you have an issue or pull request, you take the number and you create a
|
||||
file inside of the ``news/`` directory named after that issue number with an
|
||||
extension of ``removal``, ``feature``, ``bugfix``, or ``doc``. Thus if your
|
||||
issue or PR number is ``1234`` and this change is fixing a bug, then you would
|
||||
create a file ``news/1234.bugfix``. PRs can span multiple categories by creating
|
||||
multiple files (for instance, if you added a feature and deprecated/removed the
|
||||
old feature at the same time, you would create ``news/NNNN.feature`` and
|
||||
``news/NNNN.removal``). Likewise if a PR touches multiple issues/PRs you may
|
||||
create a file for each of them with the exact same contents and Towncrier will
|
||||
deduplicate them.
|
||||
|
||||
Contents of a NEWS entry
|
||||
------------------------
|
||||
|
||||
The contents of this file are reStructuredText formatted text that will be used
|
||||
as the content of the news file entry. You do not need to reference the issue
|
||||
or PR numbers here as towncrier will automatically add a reference to all of
|
||||
the affected issues when rendering the news file.
|
||||
|
||||
In order to maintain a consistent style in the ``NEWS.rst`` file, it is
|
||||
preferred to keep the news entry to the point, in sentence case, shorter than
|
||||
80 characters and in an imperative tone -- an entry should complete the sentence
|
||||
"This change will ...". In rare cases, where one line is not enough, use a
|
||||
summary line in an imperative tone followed by a blank line separating it
|
||||
from a description of the feature/change in one or more paragraphs, each wrapped
|
||||
at 80 characters. Remember that a news entry is meant for end users and should
|
||||
only contain details relevant to an end user.
|
||||
|
||||
Choosing the type of NEWS entry
|
||||
-------------------------------
|
||||
|
||||
A trivial change is anything that does not warrant an entry in the news file.
|
||||
Some examples are: Code refactors that don't change anything as far as the
|
||||
public is concerned, typo fixes, white space modification, etc. To mark a PR
|
||||
as trivial a contributor simply needs to add a randomly named, empty file to
|
||||
the ``news/`` directory with the extension of ``.trivial``. If you are on a
|
||||
POSIX like operating system, one can be added by running
|
||||
``touch news/$(uuidgen).trivial``. On Windows, the same result can be achieved
|
||||
in Powershell using ``New-Item "news/$([guid]::NewGuid()).trivial"``. Core
|
||||
committers may also add a "trivial" label to the PR which will accomplish the
|
||||
same thing.
|
||||
|
||||
Upgrading, removing, or adding a new vendored library gets a special mention
|
||||
using a ``news/<library>.vendor`` file. This is in addition to any features,
|
||||
bugfixes, or other kinds of news that pulling in this library may have. This
|
||||
uses the library name as the key so that updating the same library twice doesn't
|
||||
produce two news file entries.
|
||||
|
||||
Changes to the processes, policies, or other non code related changed that are
|
||||
otherwise notable can be done using a ``news/<name>.process`` file. This is not
|
||||
typically used, but can be used for things like changing version schemes,
|
||||
updating deprecation policy, etc.
|
||||
|
||||
|
||||
Updating your branch
|
||||
====================
|
||||
|
||||
As you work, you might need to update your local master branch up-to-date with
|
||||
the ``master`` branch in the main pip repository, which moves forward as the
|
||||
maintainers merge pull requests. Most people working on the project use the
|
||||
following workflow.
|
||||
|
||||
This assumes that you have Git configured so that when you run the following
|
||||
command:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
git remote -v
|
||||
|
||||
Your output looks like this:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
origin https://github.com/USERNAME/pip.git (fetch)
|
||||
origin https://github.com/USERNAME/pip.git (push)
|
||||
upstream https://github.com/pypa/pip.git (fetch)
|
||||
upstream https://github.com/pypa/pip.git (push)
|
||||
|
||||
In the example above, ``USERNAME`` is your username on GitHub.
|
||||
|
||||
First, fetch the latest changes from the main pip repository, ``upstream``:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
git fetch upstream
|
||||
|
||||
Then, check out your local ``master`` branch, and rebase the changes on top of
|
||||
it:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
git checkout master
|
||||
git rebase upstream/master
|
||||
|
||||
At this point, you might have to `resolve merge conflicts`_. Once this is done,
|
||||
push the updates you have just made to your local ``master`` branch to your
|
||||
``origin`` repository on GitHub:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
git checkout master
|
||||
git push origin master
|
||||
|
||||
Now your local ``master`` branch and the ``master`` branch in your ``origin``
|
||||
repo have been updated with the most recent changes from the main pip
|
||||
repository.
|
||||
|
||||
To keep your branches updated, the process is similar:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
git checkout awesome-feature
|
||||
git fetch upstream
|
||||
git rebase upstream/master
|
||||
|
||||
Now your branch has been updated with the latest changes from the
|
||||
``master`` branch on the upstream pip repository.
|
||||
|
||||
It's good practice to back up your branches by pushing them to your
|
||||
``origin`` on GitHub as you are working on them. To push a branch,
|
||||
run this command:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
git push origin awesome-feature
|
||||
|
||||
In this example, ``<awesome-feature>`` is the name of your branch. This
|
||||
will push the branch you are working on to GitHub, but will not
|
||||
create a PR.
|
||||
|
||||
Once you have pushed your branch to your ``origin``, if you need to
|
||||
update it again, you will have to force push your changes by running the
|
||||
following command:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
git push -f origin awesome-feature
|
||||
|
||||
The ``-f`` (or ``--force``) flag after ``push`` forces updates from your local
|
||||
branch to update your ``origin`` branch. If you have a PR open on your
|
||||
branch, force pushing will update your PR. (This is a useful command
|
||||
when someone requests changes on a PR.)
|
||||
|
||||
If you get an error message like this:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
! [rejected] awesome-feature -> awesome-feature (non-fast-forward)
|
||||
error: failed to push some refs to 'https://github.com/USERNAME/pip.git'
|
||||
hint: Updates were rejected because the tip of your current branch is behind
|
||||
hint: its remote counterpart. Integrate the remote changes (e.g.
|
||||
hint: 'git pull ...') before pushing again.
|
||||
hint: See the 'Note about fast-forwards' in 'git push --help' for details.
|
||||
|
||||
Try force-pushing your branch with ``push -f``.
|
||||
|
||||
The ``master`` branch in the main pip repository gets updated frequently, so
|
||||
you might have to update your branch at least once while you are working on it.
|
||||
|
||||
|
||||
Becoming a maintainer
|
||||
=====================
|
||||
|
||||
If you want to become an official maintainer, start by helping out.
|
||||
|
||||
Later, when you think you're ready, get in touch with one of the maintainers
|
||||
and they will initiate a vote.
|
||||
|
||||
.. _`Studies have shown`: https://smartbear.com/smartbear/media/pdfs/wp-cc-11-best-practices-of-peer-code-review.pdf
|
||||
.. _`resolve merge conflicts`: https://help.github.com/articles/resolving-a-merge-conflict-using-the-command-line/
|
||||
.. _`Travis CI`: https://travis-ci.org/
|
||||
.. _`Appveyor CI`: https://www.appveyor.com/
|
||||
.. _`.travis.yml`: https://github.com/pypa/pip/blob/master/.travis.yml
|
||||
.. _`appveyor.yml`: https://github.com/pypa/pip/blob/master/appveyor.yml
|
||||
.. _`towncrier`: https://pypi.org/project/towncrier/
|
|
@ -0,0 +1,110 @@
|
|||
===============
|
||||
Getting Started
|
||||
===============
|
||||
|
||||
We’re pleased that you are interested in working on pip.
|
||||
|
||||
This document is meant to get you setup to work on pip and to act as a guide and
|
||||
reference to the the development setup. If you face any issues during this
|
||||
process, please `open an issue`_ about it on the issue tracker.
|
||||
|
||||
Development tools
|
||||
=================
|
||||
|
||||
pip uses :pypi:`tox` for testing against multiple different Python environments
|
||||
and ensuring reproducible environments for linting and building documentation.
|
||||
|
||||
For developing pip, you need to install ``tox`` on your system. Often, you can
|
||||
just do ``python -m pip install tox`` to install and use it.
|
||||
|
||||
Running Tests
|
||||
-------------
|
||||
|
||||
pip uses the :pypi:`pytest` test framework, :pypi:`mock` and :pypi:`pretend`
|
||||
for testing. These are automatically installed by tox for running the tests.
|
||||
|
||||
To run tests locally, run:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ tox -e py36
|
||||
|
||||
The example above runs tests against Python 3.6. You can also use other
|
||||
versions like ``py27`` and ``pypy3``.
|
||||
|
||||
``tox`` has been configured to any additional arguments it is given to
|
||||
``pytest``. This enables the use of pytest's `rich CLI`_. As an example, you
|
||||
can select tests using the various ways that pytest provides:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ # Using file name
|
||||
$ tox -e py36 -- tests/functional/test_install.py
|
||||
$ # Using markers
|
||||
$ tox -e py36 -- -m unit
|
||||
$ # Using keywords
|
||||
$ tox -e py36 -- -k "install and not wheel"
|
||||
|
||||
Running pip's test suite requires supported version control tools (subversion,
|
||||
bazaar, git, and mercurial) to be installed. If you are missing one of the VCS
|
||||
tools, you can tell pip to skip those tests:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ tox -e py36 -- -k "not svn"
|
||||
$ tox -e py36 -- -k "not (svn or git)"
|
||||
|
||||
Running Linters
|
||||
---------------
|
||||
|
||||
pip uses :pypi:`flake8` and :pypi:`isort` for linting the codebase. These
|
||||
ensure that the codebase is in compliance with :pep:`8` and the imports are
|
||||
consistently ordered and styled.
|
||||
|
||||
To use linters locally, run:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ tox -e lint-py2
|
||||
$ tox -e lint-py3
|
||||
|
||||
The above commands run the linters on Python 2 followed by Python 3.
|
||||
|
||||
.. note::
|
||||
|
||||
Do not silence errors from flake8 with ``# noqa`` comments or otherwise.
|
||||
The only exception to this is silencing unused-import errors for imports
|
||||
related to static type checking as currently `flake8 does not understand
|
||||
PEP 484 type-comments`_.
|
||||
|
||||
Running mypy
|
||||
------------
|
||||
|
||||
pip uses :pypi:`mypy` to run static type analysis, which helps catch certain
|
||||
kinds of bugs. The codebase uses `PEP 484 type-comments`_ due to compatibility
|
||||
requirements with Python 2.7.
|
||||
|
||||
To run the ``mypy`` type checker, run:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ tox -e mypy
|
||||
|
||||
Building Documentation
|
||||
----------------------
|
||||
|
||||
pip's documentation is built using :pypi:`Sphinx`. The documentation is written
|
||||
in reStructuredText.
|
||||
|
||||
To build it locally, run:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ tox -e docs
|
||||
|
||||
The built documentation can be found in the ``docs/build`` folder.
|
||||
|
||||
.. _`open an issue`: https://github.com/pypa/pip/issues/new?title=Trouble+with+pip+development+environment
|
||||
.. _`flake8 does not understand PEP 484 type-comments`: https://gitlab.com/pycqa/flake8/issues/118
|
||||
.. _`PEP 484 type-comments`: https://www.python.org/dev/peps/pep-0484/#suggested-syntax-for-python-2-7-and-straddling-code
|
||||
.. _`rich CLI`: https://docs.pytest.org/en/latest/usage.html#specifying-tests-selecting-tests
|
|
@ -0,0 +1,25 @@
|
|||
Development
|
||||
===========
|
||||
|
||||
pip is a volunteer maintained open source project and we welcome contributions
|
||||
of all forms. The sections below will help you get started with development,
|
||||
testing, and documentation.
|
||||
|
||||
You can also join ``#pypa`` (general packaging discussion and user support) and
|
||||
``#pypa-dev`` (discussion about development of packaging tools) `on Freenode`_,
|
||||
or the `pypa-dev mailing list`_, to ask questions or get involved.
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
||||
getting-started
|
||||
contributing
|
||||
release-process
|
||||
|
||||
.. note::
|
||||
|
||||
pip's development documentation has been rearranged and some older
|
||||
references might be broken.
|
||||
|
||||
.. _`on Freenode`: https://webchat.freenode.net/?channels=%23pypa-dev,pypa
|
||||
.. _`pypa-dev mailing list`: https://groups.google.com/forum/#!forum/pypa-dev
|
|
@ -0,0 +1,103 @@
|
|||
===============
|
||||
Release process
|
||||
===============
|
||||
|
||||
|
||||
Release Cadence
|
||||
===============
|
||||
|
||||
The pip project has a release cadence of releasing whatever is on ``master``
|
||||
every 3 months. This gives users a predictable pattern for when releases
|
||||
are going to happen and prevents locking up improvements for fixes for long
|
||||
periods of time, while still preventing massively fracturing the user base
|
||||
with version numbers.
|
||||
|
||||
Our release months are January, April, July, October. The release date within
|
||||
that month will be up to the release manager for that release. If there are
|
||||
no changes, then that release month is skipped and the next release will be
|
||||
3 month later.
|
||||
|
||||
The release manager may, at their discretion, choose whether or not there
|
||||
will be a pre-release period for a release, and if there is may extend that
|
||||
period into the next month if needed.
|
||||
|
||||
Because releases are made direct from the ``master`` branch, it is essential
|
||||
that ``master`` is always in a releasable state. It is acceptable to merge
|
||||
PRs that partially implement a new feature, but only if the partially
|
||||
implemented version is usable in that state (for example, with reduced
|
||||
functionality or disabled by default). In the case where a merged PR is found
|
||||
to need extra work before being released, the release manager always has the
|
||||
option to back out the partial change prior to a release. The PR can then be
|
||||
reworked and resubmitted for the next release.
|
||||
|
||||
|
||||
Deprecation Policy
|
||||
==================
|
||||
|
||||
Any change to pip that removes or significantly alters user-visible behavior
|
||||
that is described in the pip documentation will be deprecated for a minimum of
|
||||
6 months before the change occurs. Deprecation will take the form of a warning
|
||||
being issued by pip when the feature is used. Longer deprecation periods, or
|
||||
deprecation warnings for behavior changes that would not normally be covered by
|
||||
this policy, are also possible depending on circumstances, but this is at the
|
||||
discretion of the pip developers.
|
||||
|
||||
Note that the documentation is the sole reference for what counts as agreed
|
||||
behavior. If something isn't explicitly mentioned in the documentation, it can
|
||||
be changed without warning, or any deprecation period, in a pip release.
|
||||
However, we are aware that the documentation isn't always complete - PRs that
|
||||
document existing behavior with the intention of covering that behavior with
|
||||
the above deprecation process are always acceptable, and will be considered on
|
||||
their merits.
|
||||
|
||||
.. note::
|
||||
|
||||
pip has a helper function for making deprecation easier for pip maintainers.
|
||||
The supporting documentation can be found in the source code of
|
||||
``pip._internal.utils.deprecation.deprecated``. The function is not a part of
|
||||
pip's public API.
|
||||
|
||||
|
||||
Release Process
|
||||
===============
|
||||
|
||||
Creating a new release
|
||||
----------------------
|
||||
|
||||
#. On the current pip ``master`` branch, generate a new ``AUTHORS.txt`` by
|
||||
running ``invoke generate.authors`` and commit the results.
|
||||
#. On the current pip ``master`` branch, make a new commit which bumps the
|
||||
version in ``pip/__init__.py`` to the release version and adjust the
|
||||
``CHANGES.txt`` file to reflect the current date. The release version should
|
||||
follow a YY.N scheme, where YY is the two digit year, and N is the Nth
|
||||
release within that year.
|
||||
#. On the current pip ``master`` branch, generate a new ``NEWS.rst`` by running
|
||||
``invoke generate.news`` and commit the results.
|
||||
#. Create a signed tag of the ``master`` branch of the form ``YY.N`` using the
|
||||
command ``git tag -s YY.N``.
|
||||
#. Checkout the tag using ``git checkout YY.N`` and create the distribution
|
||||
files using ``python setup.py sdist bdist_wheel``.
|
||||
#. Upload the distribution files to PyPI using twine
|
||||
(``twine upload -s dist/*``). The upload should include GPG signatures of
|
||||
the distribution files.
|
||||
#. Push all of the changes.
|
||||
#. Regenerate the ``get-pip.py`` script by running
|
||||
``invoke generate.installer`` in the get-pip repository, and committing the
|
||||
results.
|
||||
|
||||
Creating a bug-fix release
|
||||
--------------------------
|
||||
|
||||
Sometimes we need to release a bugfix release of the form ``YY.N.Z+1``. In
|
||||
order to create one of these the changes should already be merged into the
|
||||
``master`` branch.
|
||||
|
||||
#. Create a new ``release/YY.N.Z+1`` branch off of the ``YY.N`` tag using the
|
||||
command ``git checkout -b release/YY.N.Z+1 YY.N``.
|
||||
#. Cherry pick the fixed commits off of the ``master`` branch, fixing any
|
||||
conflicts and moving any changelog entries from the development version's
|
||||
changelog section to the ``YY.N.Z+1`` section.
|
||||
#. Push the ``release/YY.N.Z+1`` branch to github and submit a PR for it against
|
||||
the ``master`` branch and wait for the tests to run.
|
||||
#. Once tests run, merge the ``release/YY.N.Z+1`` branch into master, and follow
|
||||
the above release process starting with step 4.
|
|
@ -18,5 +18,5 @@ for installing Python packages.
|
|||
installing
|
||||
user_guide
|
||||
reference/index
|
||||
development
|
||||
development/index
|
||||
news
|
||||
|
|
|
@ -6,10 +6,10 @@ Installation
|
|||
Do I need to install pip?
|
||||
-------------------------
|
||||
|
||||
pip is already installed if you are using Python 2 >=2.7.9 or Python 3 >=3.4
|
||||
downloaded from `python.org <https://www.python.org>`_ or if you are working
|
||||
in a :ref:`Virtual Environment <pypug:Creating and using Virtual Environments>`
|
||||
created by :ref:`pypug:virtualenv` or :ref:`pyvenv <pypug:venv>`.
|
||||
pip is already installed if you are using Python 2 >=2.7.9 or Python 3 >=3.4
|
||||
downloaded from `python.org <https://www.python.org>`_ or if you are working
|
||||
in a :ref:`Virtual Environment <pypug:Creating and using Virtual Environments>`
|
||||
created by :ref:`pypug:virtualenv` or :ref:`pyvenv <pypug:venv>`.
|
||||
Just make sure to :ref:`upgrade pip <Upgrading pip>`.
|
||||
|
||||
|
||||
|
@ -23,7 +23,9 @@ To install pip, securely download `get-pip.py
|
|||
|
||||
curl https://bootstrap.pypa.io/get-pip.py -o get-pip.py
|
||||
|
||||
Inspect ``get-pip.py`` for any malevolence. Then run the following::
|
||||
As when running any script downloaded from the web, ensure that you have
|
||||
reviewed the code and are happy that it works as you expect.
|
||||
Then run the following::
|
||||
|
||||
python get-pip.py
|
||||
|
||||
|
@ -34,7 +36,7 @@ Inspect ``get-pip.py`` for any malevolence. Then run the following::
|
|||
system or another package manager. ``get-pip.py`` does not coordinate with
|
||||
those tools, and may leave your system in an inconsistent state.
|
||||
|
||||
``get-pip.py`` also installs :ref:`pypug:setuptools` [2]_ and :ref:`pypug:wheel`
|
||||
``get-pip.py`` also installs :ref:`pypug:setuptools` [2]_ and :ref:`pypug:wheel`
|
||||
if they are not already. :ref:`pypug:setuptools` is required to install
|
||||
:term:`source distributions <pypug:Source Distribution (or "sdist")>`. Both are
|
||||
required in order to build a :ref:`Wheel cache` (which improves installation
|
||||
|
@ -76,6 +78,11 @@ Install behind a proxy::
|
|||
|
||||
python get-pip.py --proxy="http://[user:passwd@]proxy.server:port"
|
||||
|
||||
``get-pip.py`` can also be used to install a specified combination of ``pip``,
|
||||
``setuptools``, and ``wheel`` using the same requirements syntax as ``pip``::
|
||||
|
||||
python get-pip.py pip==9.0.2 wheel==0.30.0 setuptools==28.8.0
|
||||
|
||||
|
||||
Using Linux Package Managers
|
||||
----------------------------
|
||||
|
@ -99,6 +106,8 @@ On Windows [4]_::
|
|||
python -m pip install -U pip
|
||||
|
||||
|
||||
.. _compatibility-requirements:
|
||||
|
||||
Python and OS Compatibility
|
||||
---------------------------
|
||||
|
||||
|
|
|
@ -72,7 +72,7 @@ Build System Interface
|
|||
======================
|
||||
|
||||
Pip builds packages by invoking the build system. Presently, the only supported
|
||||
build system is ``setuptools``, but in the future, pip will support `PEP517`_
|
||||
build system is ``setuptools``, but in the future, pip will support :pep:`517`
|
||||
which allows projects to specify an alternative build system in a
|
||||
``pyproject.toml`` file. As well as package building, the build system is also
|
||||
invoked to install packages direct from source. This is handled by invoking
|
||||
|
@ -118,7 +118,7 @@ PEP 518 Support
|
|||
|
||||
As of 10.0, pip supports projects declaring dependencies that are required at
|
||||
install time using a ``pyproject.toml`` file, in the form described in
|
||||
`PEP518`_. When building a project, pip will install the required dependencies
|
||||
:pep:`518`. When building a project, pip will install the required dependencies
|
||||
locally, and make them available to the build process.
|
||||
|
||||
When making build requirements available, pip does so in an *isolated
|
||||
|
@ -141,7 +141,7 @@ appropriately.
|
|||
|
||||
**Limitations**:
|
||||
|
||||
* until `PEP517`_ support is added, ``setuptools`` and ``wheel`` **must** be
|
||||
* until :pep:`517` support is added, ``setuptools`` and ``wheel`` **must** be
|
||||
included in the list of build requirements: pip will assume these as default,
|
||||
but will not automatically add them to the list of build requirements if
|
||||
explicitly defined in ``pyproject.toml``.
|
||||
|
@ -149,21 +149,20 @@ appropriately.
|
|||
* the current implementation only support installing build requirements from
|
||||
wheels: this is a technical limitation of the implementation - source
|
||||
installs would require a build step of their own, potentially recursively
|
||||
triggering another `PEP518`_ dependency installation process. The possible
|
||||
triggering another :pep:`518` dependency installation process. The possible
|
||||
unbounded recursion involved was not considered acceptable, and so
|
||||
installation of build dependencies from source has been disabled until a safe
|
||||
resolution of this issue is found.
|
||||
|
||||
* ``pip<18.0`` does not support the use of environment markers and extras, only
|
||||
version specifiers are respected.
|
||||
* ``pip<18.0``: only support installing build requirements from wheels, and
|
||||
does not support the use of environment markers and extras (only version
|
||||
specifiers are respected).
|
||||
|
||||
.. _PEP517: https://www.python.org/dev/peps/pep-0517/
|
||||
.. _PEP518: https://www.python.org/dev/peps/pep-0518/
|
||||
|
||||
Future Developments
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
`PEP426`_ notes that the intention is to add hooks to project metadata in
|
||||
:pep:`426` notes that the intention is to add hooks to project metadata in
|
||||
version 2.1 of the metadata spec, to explicitly define how to build a project
|
||||
from its source. Once this version of the metadata spec is final, pip will
|
||||
migrate to using that interface. At that point, the ``setup.py`` interface
|
||||
|
@ -173,7 +172,6 @@ have migrated.
|
|||
Specifically, applications should *not* expect to rely on there being any form
|
||||
of backward compatibility guarantees around the ``setup.py`` interface.
|
||||
|
||||
.. _PEP426: https://www.python.org/dev/peps/pep-0426/#metabuild-system
|
||||
|
||||
Build Options
|
||||
~~~~~~~~~~~~~
|
||||
|
|
|
@ -38,7 +38,7 @@ each is, in the following order:
|
|||
an error).
|
||||
3. Local file (a sdist or wheel format archive, following the naming
|
||||
conventions for those formats).
|
||||
4. A requirement, as specified in PEP 440.
|
||||
4. A requirement, as specified in :pep:`440`.
|
||||
|
||||
Each item identified is added to the set of requirements to be satisfied by
|
||||
the install.
|
||||
|
@ -237,7 +237,7 @@ Requirement Specifiers
|
|||
pip supports installing from a package index using a :term:`requirement
|
||||
specifier <pypug:Requirement Specifier>`. Generally speaking, a requirement
|
||||
specifier is composed of a project name followed by optional :term:`version
|
||||
specifiers <pypug:Version Specifier>`. `PEP508`_ contains a full specification
|
||||
specifiers <pypug:Version Specifier>`. :pep:`508` contains a full specification
|
||||
of the format of a requirement (``pip`` does not support the ``url_req`` form
|
||||
of specifier at this time).
|
||||
|
||||
|
@ -311,7 +311,7 @@ Pre-release Versions
|
|||
++++++++++++++++++++
|
||||
|
||||
Starting with v1.4, pip will only install stable versions as specified by
|
||||
`pre-releases`_ by default. If a version cannot be parsed as a compliant `PEP440`_
|
||||
`pre-releases`_ by default. If a version cannot be parsed as a compliant :pep:`440`
|
||||
version then it is assumed to be a pre-release.
|
||||
|
||||
If a Requirement specifier includes a pre-release or development version
|
||||
|
@ -484,7 +484,7 @@ pip looks for packages in a number of places, on PyPI (if not disabled via
|
|||
```--no-index```), in the local filesystem, and in any additional repositories
|
||||
specified via ```--find-links``` or ```--index-url```. There is no ordering in
|
||||
the locations that are searched, rather they are all checked, and the "best"
|
||||
match for the requirements (in terms of version number - see `PEP440`_ for
|
||||
match for the requirements (in terms of version number - see :pep:`440` for
|
||||
details) is selected.
|
||||
|
||||
See the :ref:`pip install Examples<pip install Examples>`.
|
||||
|
@ -881,6 +881,3 @@ Examples
|
|||
|
||||
.. [1] This is true with the exception that pip v7.0 and v7.0.1 required quotes
|
||||
around specifiers containing environment markers in requirement files.
|
||||
|
||||
.. _PEP440: https://www.python.org/dev/peps/pep-0440/
|
||||
.. _PEP508: https://www.python.org/dev/peps/pep-0508/
|
||||
|
|
|
@ -51,6 +51,20 @@ For more information and examples, see the :ref:`pip install` reference.
|
|||
|
||||
.. _`Requirements Files`:
|
||||
|
||||
Using a Proxy Server
|
||||
********************
|
||||
|
||||
When installing packages from `PyPI`_, pip requires internet access, which
|
||||
in many corporate environments requires an outbound HTTP proxy server.
|
||||
|
||||
pip can be configured to connect through a proxy server in various ways:
|
||||
|
||||
* using the ``--proxy`` command-line option to specify a proxy in the form
|
||||
``[user:passwd@]proxy.server:port``
|
||||
* using ``proxy`` in a :ref:`config-file`
|
||||
* by setting the standard environment-variables ``http_proxy``, ``https_proxy``
|
||||
and ``no_proxy``.
|
||||
|
||||
Requirements Files
|
||||
******************
|
||||
|
||||
|
@ -176,9 +190,7 @@ Installing from Wheels
|
|||
|
||||
"Wheel" is a built, archive format that can greatly speed installation compared
|
||||
to building and installing from source archives. For more information, see the
|
||||
`Wheel docs <https://wheel.readthedocs.io>`_ ,
|
||||
`PEP427 <https://www.python.org/dev/peps/pep-0427/>`_, and
|
||||
`PEP425 <https://www.python.org/dev/peps/pep-0425/>`_
|
||||
`Wheel docs <https://wheel.readthedocs.io>`_ , :pep:`427`, and :pep:`425`.
|
||||
|
||||
Pip prefers Wheels where they are available. To disable this, use the
|
||||
:ref:`--no-binary <install_--no-binary>` flag for :ref:`pip install`.
|
||||
|
@ -338,13 +350,13 @@ variable ``PIP_CONFIG_FILE``.
|
|||
If multiple configuration files are found by pip then they are combined in
|
||||
the following order:
|
||||
|
||||
1. Firstly the site-wide file is read, then
|
||||
2. The per-user file is read, and finally
|
||||
3. The virtualenv-specific file is read.
|
||||
1. The site-wide file is read
|
||||
2. The per-user file is read
|
||||
3. The virtualenv-specific file is read
|
||||
|
||||
Each file read overrides any values read from previous files, so if the
|
||||
global timeout is specified in both the site-wide file and the per-user file
|
||||
then the latter value is the one that will be used.
|
||||
then the latter value will be used.
|
||||
|
||||
The names of the settings are derived from the long command line option, e.g.
|
||||
if you want to use a different package index (``--index-url``) and set the
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
Introduce a new --prefer-binary flag, to prefer older wheels over newer source packages.
|
|
@ -1 +0,0 @@
|
|||
Dropped support for Python 3.3.
|
|
@ -1 +0,0 @@
|
|||
Adjust path to selfcheck.json - remove virtualenv specific path and honor cache-dir in pip.conf
|
|
@ -0,0 +1,5 @@
|
|||
Allow PEP 508 URL requirements to be used as dependencies.
|
||||
|
||||
As a security measure, pip will raise an exception when installing packages from
|
||||
PyPI if those packages depend on packages not also hosted on PyPI.
|
||||
In the future, PyPI will block uploading packages with such external URL dependencies directly.
|
|
@ -1 +0,0 @@
|
|||
Remove compiled pyo files for wheel packages.
|
|
@ -1,2 +0,0 @@
|
|||
Improve autocompletion function on file name completion after options
|
||||
which have ``<file>``, ``<dir>`` or ``<path>`` as metavar.
|
|
@ -1,2 +0,0 @@
|
|||
Improve autocompletion function on file name completion after options
|
||||
which have ``<file>``, ``<dir>`` or ``<path>`` as metavar.
|
|
@ -1 +0,0 @@
|
|||
Speed up printing of newly installed package versions
|
|
@ -1,4 +0,0 @@
|
|||
Restrict install time dependency warnings to directly-dependant packages
|
||||
|
||||
Warning about the entire package set has resulted in users getting confused as
|
||||
to why pip is printing these warnings.
|
|
@ -1 +0,0 @@
|
|||
Improve handling of PEP 518 build requirements: support environment markers and extras.
|
|
@ -1 +0,0 @@
|
|||
Remove username/password from log message when using index with basic auth
|
|
@ -1 +0,0 @@
|
|||
Clarify that the output of pip show is in RFC-compliant mail header format for people who want to parse the ouput.
|
|
@ -1 +0,0 @@
|
|||
Improve handling of PEP 518 build requirements: support environment markers and extras.
|
|
@ -1 +0,0 @@
|
|||
Remove trailing os.sep from PATH directories to avoid false negatives
|
|
@ -1 +0,0 @@
|
|||
Fix "pip wheel pip" being blocked by the "don't use pip to modify itself" check
|
|
@ -1 +0,0 @@
|
|||
Fix "pip wheel pip" being blocked by the "don't use pip to modify itself" check
|
|
@ -1 +0,0 @@
|
|||
Improve status message when upgrade is skipped due to only-if-needed strategy
|
|
@ -1 +0,0 @@
|
|||
Add test for PR 5293: Remove trailing os.sep to avoid false negatives
|
|
@ -1,5 +0,0 @@
|
|||
Disable pip's version check (and upgrade message) when installed by a different package manager.
|
||||
|
||||
This works better with Linux distributions where pip's upgrade message may
|
||||
result in users running pip in a manner that modifies files that should be
|
||||
managed by the OS's package manager.
|
|
@ -1 +0,0 @@
|
|||
Fix the revendoring script no to rewrite unrelated import that starts suspicious.
|
|
@ -1 +0,0 @@
|
|||
Check for file existence and unlink first when clobbering existing files during a wheel install.
|
|
@ -1 +0,0 @@
|
|||
Improve error message to be more specific when no files are found as listed in as listed in PKG-INFO.
|
|
@ -1 +0,0 @@
|
|||
Start refusing to install packages with non PEP-518 compliant pyproject.toml
|
|
@ -1,4 +0,0 @@
|
|||
Restrict install time dependency warnings to directly-dependant packages
|
||||
|
||||
Warning about the entire package set has resulted in users getting confused as
|
||||
to why pip is printing these warnings.
|
|
@ -1 +0,0 @@
|
|||
Always read ``pyproject.toml`` as UTF-8. This fixes Unicode handling on Windows and Python 2.
|
|
@ -1 +0,0 @@
|
|||
Start refusing to install packages with non PEP-518 compliant pyproject.toml
|
|
@ -1,4 +0,0 @@
|
|||
Remove the shim for the old get-pip.py location.
|
||||
|
||||
For the past 2 years, it's only been redirecting users to use the newer
|
||||
https://bootstrap.pypa.io/get-pip.py location.
|
|
@ -1 +0,0 @@
|
|||
Switch to a Calendar based versioning scheme.
|
|
@ -1 +0,0 @@
|
|||
Formally document our deprecation process
|
|
@ -1 +0,0 @@
|
|||
Adopt and document NEWS fragment writing style
|
|
@ -1 +0,0 @@
|
|||
Switch to releasing a new, non bug fix version of pip every 3 months.
|
12
setup.cfg
12
setup.cfg
|
@ -1,7 +1,9 @@
|
|||
[isort]
|
||||
skip =
|
||||
_vendor
|
||||
__main__.py
|
||||
.tox,
|
||||
.scratch,
|
||||
_vendor,
|
||||
data
|
||||
multi_line_output = 5
|
||||
known_third_party =
|
||||
pip._vendor
|
||||
|
@ -12,7 +14,11 @@ default_section = THIRDPARTY
|
|||
include_trailing_comma = true
|
||||
|
||||
[flake8]
|
||||
exclude = .tox,.idea,.scratch,*.egg,build,_vendor,data
|
||||
exclude =
|
||||
.tox,
|
||||
.scratch,
|
||||
_vendor,
|
||||
data
|
||||
select = E,W,F
|
||||
|
||||
[mypy]
|
||||
|
|
3
setup.py
3
setup.py
|
@ -3,8 +3,7 @@ import os
|
|||
import re
|
||||
import sys
|
||||
|
||||
from setuptools import setup, find_packages
|
||||
|
||||
from setuptools import find_packages, setup
|
||||
|
||||
here = os.path.abspath(os.path.dirname(__file__))
|
||||
|
||||
|
|
|
@ -1 +1 @@
|
|||
__version__ = "18.0.dev0"
|
||||
__version__ = "18.1.dev0"
|
||||
|
|
|
@ -13,7 +13,7 @@ if __package__ == '':
|
|||
path = os.path.dirname(os.path.dirname(__file__))
|
||||
sys.path.insert(0, path)
|
||||
|
||||
from pip._internal import main as _main # noqa
|
||||
from pip._internal import main as _main # isort:skip # noqa
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(_main())
|
||||
|
|
|
@ -274,15 +274,6 @@ def parseopts(args):
|
|||
return cmd_name, cmd_args
|
||||
|
||||
|
||||
def check_isolated(args):
|
||||
isolated = False
|
||||
|
||||
if "--isolated" in args:
|
||||
isolated = True
|
||||
|
||||
return isolated
|
||||
|
||||
|
||||
def main(args=None):
|
||||
if args is None:
|
||||
args = sys.argv[1:]
|
||||
|
@ -306,5 +297,5 @@ def main(args=None):
|
|||
except locale.Error as e:
|
||||
# setlocale can apparently crash if locale are uninitialized
|
||||
logger.debug("Ignoring error %s when setting locale", e)
|
||||
command = commands_dict[cmd_name](isolated=check_isolated(cmd_args))
|
||||
command = commands_dict[cmd_name](isolated=("--isolated" in cmd_args))
|
||||
return command.main(cmd_args)
|
||||
|
|
|
@ -24,7 +24,7 @@ from pip._internal.status_codes import (
|
|||
ERROR, PREVIOUS_BUILD_DIR_ERROR, SUCCESS, UNKNOWN_ERROR,
|
||||
VIRTUALENV_NOT_FOUND,
|
||||
)
|
||||
from pip._internal.utils.logging import IndentingFormatter
|
||||
from pip._internal.utils.logging import setup_logging
|
||||
from pip._internal.utils.misc import get_prog, normalize_path
|
||||
from pip._internal.utils.outdated import pip_version_check
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
|
@ -42,7 +42,6 @@ class Command(object):
|
|||
usage = None # type: Optional[str]
|
||||
hidden = False # type: bool
|
||||
ignore_require_venv = False # type: bool
|
||||
log_streams = ("ext://sys.stdout", "ext://sys.stderr")
|
||||
|
||||
def __init__(self, isolated=False):
|
||||
parser_kw = {
|
||||
|
@ -114,89 +113,15 @@ class Command(object):
|
|||
# Set verbosity so that it can be used elsewhere.
|
||||
self.verbosity = options.verbose - options.quiet
|
||||
|
||||
if self.verbosity >= 1:
|
||||
level = "DEBUG"
|
||||
elif self.verbosity == -1:
|
||||
level = "WARNING"
|
||||
elif self.verbosity == -2:
|
||||
level = "ERROR"
|
||||
elif self.verbosity <= -3:
|
||||
level = "CRITICAL"
|
||||
else:
|
||||
level = "INFO"
|
||||
setup_logging(
|
||||
verbosity=self.verbosity,
|
||||
no_color=options.no_color,
|
||||
user_log_file=options.log,
|
||||
)
|
||||
|
||||
# The root logger should match the "console" level *unless* we
|
||||
# specified "--log" to send debug logs to a file.
|
||||
root_level = level
|
||||
if options.log:
|
||||
root_level = "DEBUG"
|
||||
|
||||
logger_class = "pip._internal.utils.logging.ColorizedStreamHandler"
|
||||
handler_class = "pip._internal.utils.logging.BetterRotatingFileHandler"
|
||||
|
||||
logging.config.dictConfig({
|
||||
"version": 1,
|
||||
"disable_existing_loggers": False,
|
||||
"filters": {
|
||||
"exclude_warnings": {
|
||||
"()": "pip._internal.utils.logging.MaxLevelFilter",
|
||||
"level": logging.WARNING,
|
||||
},
|
||||
},
|
||||
"formatters": {
|
||||
"indent": {
|
||||
"()": IndentingFormatter,
|
||||
"format": "%(message)s",
|
||||
},
|
||||
},
|
||||
"handlers": {
|
||||
"console": {
|
||||
"level": level,
|
||||
"class": logger_class,
|
||||
"no_color": options.no_color,
|
||||
"stream": self.log_streams[0],
|
||||
"filters": ["exclude_warnings"],
|
||||
"formatter": "indent",
|
||||
},
|
||||
"console_errors": {
|
||||
"level": "WARNING",
|
||||
"class": logger_class,
|
||||
"no_color": options.no_color,
|
||||
"stream": self.log_streams[1],
|
||||
"formatter": "indent",
|
||||
},
|
||||
"user_log": {
|
||||
"level": "DEBUG",
|
||||
"class": handler_class,
|
||||
"filename": options.log or "/dev/null",
|
||||
"delay": True,
|
||||
"formatter": "indent",
|
||||
},
|
||||
},
|
||||
"root": {
|
||||
"level": root_level,
|
||||
"handlers": list(filter(None, [
|
||||
"console",
|
||||
"console_errors",
|
||||
"user_log" if options.log else None,
|
||||
])),
|
||||
},
|
||||
# Disable any logging besides WARNING unless we have DEBUG level
|
||||
# logging enabled. These use both pip._vendor and the bare names
|
||||
# for the case where someone unbundles our libraries.
|
||||
"loggers": {
|
||||
name: {
|
||||
"level": (
|
||||
"WARNING" if level in ["INFO", "ERROR"] else "DEBUG"
|
||||
)
|
||||
} for name in [
|
||||
"pip._vendor", "distlib", "requests", "urllib3"
|
||||
]
|
||||
},
|
||||
})
|
||||
|
||||
# TODO: try to get these passing down from the command?
|
||||
# without resorting to os.environ to hold these.
|
||||
# TODO: Try to get these passing down from the command?
|
||||
# without resorting to os.environ to hold these.
|
||||
# This also affects isolated builds and it should.
|
||||
|
||||
if options.no_input:
|
||||
os.environ['PIP_NO_INPUT'] = '1'
|
||||
|
@ -212,8 +137,6 @@ class Command(object):
|
|||
)
|
||||
sys.exit(VIRTUALENV_NOT_FOUND)
|
||||
|
||||
original_root_handlers = set(logging.root.handlers)
|
||||
|
||||
try:
|
||||
status = self.run(options, args)
|
||||
# FIXME: all commands should return an exit status
|
||||
|
@ -240,23 +163,27 @@ class Command(object):
|
|||
logger.debug('Exception information:', exc_info=True)
|
||||
|
||||
return ERROR
|
||||
except:
|
||||
except BaseException:
|
||||
logger.critical('Exception:', exc_info=True)
|
||||
|
||||
return UNKNOWN_ERROR
|
||||
finally:
|
||||
# Check if we're using the latest version of pip available
|
||||
if (not options.disable_pip_version_check and not
|
||||
getattr(options, "no_index", False)):
|
||||
with self._build_session(
|
||||
options,
|
||||
retries=0,
|
||||
timeout=min(5, options.timeout)) as session:
|
||||
skip_version_check = (
|
||||
options.disable_pip_version_check or
|
||||
getattr(options, "no_index", False)
|
||||
)
|
||||
if not skip_version_check:
|
||||
session = self._build_session(
|
||||
options,
|
||||
retries=0,
|
||||
timeout=min(5, options.timeout)
|
||||
)
|
||||
with session:
|
||||
pip_version_check(session, options)
|
||||
# Avoid leaking loggers
|
||||
for handler in set(logging.root.handlers) - original_root_handlers:
|
||||
# this method benefit from the Logger class internal lock
|
||||
logging.root.removeHandler(handler)
|
||||
|
||||
# Shutdown the logging module
|
||||
logging.shutdown()
|
||||
|
||||
return SUCCESS
|
||||
|
||||
|
|
|
@ -11,7 +11,6 @@ from pip._internal.utils.misc import call_subprocess
|
|||
from pip._internal.utils.temp_dir import TempDirectory
|
||||
from pip._internal.utils.ui import open_spinner
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
|
@ -80,10 +79,13 @@ class BuildEnvironment(object):
|
|||
args = [
|
||||
sys.executable, '-m', 'pip', 'install', '--ignore-installed',
|
||||
'--no-user', '--prefix', self.path, '--no-warn-script-location',
|
||||
'--only-binary', ':all:',
|
||||
]
|
||||
if logger.getEffectiveLevel() <= logging.DEBUG:
|
||||
args.append('-v')
|
||||
for format_control in ('no_binary', 'only_binary'):
|
||||
formats = getattr(finder.format_control, format_control)
|
||||
args.extend(('--' + format_control.replace('_', '-'),
|
||||
','.join(sorted(formats or {':none:'}))))
|
||||
if finder.index_urls:
|
||||
args.extend(['-i', finder.index_urls[0]])
|
||||
for extra_index in finder.index_urls[1:]:
|
||||
|
|
|
@ -9,6 +9,7 @@ from pip._internal.exceptions import CommandError
|
|||
from pip._internal.index import FormatControl
|
||||
from pip._internal.operations.prepare import RequirementPreparer
|
||||
from pip._internal.req import RequirementSet
|
||||
from pip._internal.req.req_tracker import RequirementTracker
|
||||
from pip._internal.resolve import Resolver
|
||||
from pip._internal.utils.filesystem import check_path_owner
|
||||
from pip._internal.utils.misc import ensure_dir, normalize_path
|
||||
|
@ -180,7 +181,7 @@ class DownloadCommand(RequirementCommand):
|
|||
)
|
||||
options.cache_dir = None
|
||||
|
||||
with TempDirectory(
|
||||
with RequirementTracker() as req_tracker, TempDirectory(
|
||||
options.build_dir, delete=build_delete, kind="download"
|
||||
) as directory:
|
||||
|
||||
|
@ -204,6 +205,7 @@ class DownloadCommand(RequirementCommand):
|
|||
wheel_download_dir=None,
|
||||
progress_bar=options.progress_bar,
|
||||
build_isolation=options.build_isolation,
|
||||
req_tracker=req_tracker,
|
||||
)
|
||||
|
||||
resolver = Resolver(
|
||||
|
|
|
@ -19,6 +19,7 @@ from pip._internal.locations import distutils_scheme, virtualenv_no_global
|
|||
from pip._internal.operations.check import check_install_conflicts
|
||||
from pip._internal.operations.prepare import RequirementPreparer
|
||||
from pip._internal.req import RequirementSet, install_given_reqs
|
||||
from pip._internal.req.req_tracker import RequirementTracker
|
||||
from pip._internal.resolve import Resolver
|
||||
from pip._internal.status_codes import ERROR
|
||||
from pip._internal.utils.filesystem import check_path_owner
|
||||
|
@ -260,7 +261,7 @@ class InstallCommand(RequirementCommand):
|
|||
)
|
||||
options.cache_dir = None
|
||||
|
||||
with TempDirectory(
|
||||
with RequirementTracker() as req_tracker, TempDirectory(
|
||||
options.build_dir, delete=build_delete, kind="install"
|
||||
) as directory:
|
||||
requirement_set = RequirementSet(
|
||||
|
@ -279,6 +280,7 @@ class InstallCommand(RequirementCommand):
|
|||
wheel_download_dir=None,
|
||||
progress_bar=options.progress_bar,
|
||||
build_isolation=options.build_isolation,
|
||||
req_tracker=req_tracker,
|
||||
)
|
||||
|
||||
resolver = Resolver(
|
||||
|
|
|
@ -2,7 +2,6 @@ from __future__ import absolute_import
|
|||
|
||||
import json
|
||||
import logging
|
||||
import warnings
|
||||
|
||||
from pip._vendor import six
|
||||
from pip._vendor.six.moves import zip_longest
|
||||
|
@ -11,7 +10,6 @@ from pip._internal.basecommand import Command
|
|||
from pip._internal.cmdoptions import index_group, make_option_group
|
||||
from pip._internal.exceptions import CommandError
|
||||
from pip._internal.index import PackageFinder
|
||||
from pip._internal.utils.deprecation import RemovedInPip11Warning
|
||||
from pip._internal.utils.misc import (
|
||||
dist_is_editable, get_installed_distributions,
|
||||
)
|
||||
|
@ -78,9 +76,9 @@ class ListCommand(Command):
|
|||
action='store',
|
||||
dest='list_format',
|
||||
default="columns",
|
||||
choices=('legacy', 'columns', 'freeze', 'json'),
|
||||
choices=('columns', 'freeze', 'json'),
|
||||
help="Select the output format among: columns (default), freeze, "
|
||||
"json, or legacy.",
|
||||
"or json",
|
||||
)
|
||||
|
||||
cmd_opts.add_option(
|
||||
|
@ -123,13 +121,6 @@ class ListCommand(Command):
|
|||
)
|
||||
|
||||
def run(self, options, args):
|
||||
if options.list_format == "legacy":
|
||||
warnings.warn(
|
||||
"The legacy format has been deprecated and will be removed "
|
||||
"in the future.",
|
||||
RemovedInPip11Warning,
|
||||
)
|
||||
|
||||
if options.outdated and options.uptodate:
|
||||
raise CommandError(
|
||||
"Options --outdated and --uptodate cannot be combined.")
|
||||
|
@ -208,30 +199,6 @@ class ListCommand(Command):
|
|||
dist.latest_filetype = typ
|
||||
yield dist
|
||||
|
||||
def output_legacy(self, dist, options):
|
||||
if options.verbose >= 1:
|
||||
return '%s (%s, %s, %s)' % (
|
||||
dist.project_name,
|
||||
dist.version,
|
||||
dist.location,
|
||||
get_installer(dist),
|
||||
)
|
||||
elif dist_is_editable(dist):
|
||||
return '%s (%s, %s)' % (
|
||||
dist.project_name,
|
||||
dist.version,
|
||||
dist.location,
|
||||
)
|
||||
else:
|
||||
return '%s (%s)' % (dist.project_name, dist.version)
|
||||
|
||||
def output_legacy_latest(self, dist, options):
|
||||
return '%s - Latest: %s [%s]' % (
|
||||
self.output_legacy(dist, options),
|
||||
dist.latest_version,
|
||||
dist.latest_filetype,
|
||||
)
|
||||
|
||||
def output_package_listing(self, packages, options):
|
||||
packages = sorted(
|
||||
packages,
|
||||
|
@ -249,12 +216,6 @@ class ListCommand(Command):
|
|||
logger.info("%s==%s", dist.project_name, dist.version)
|
||||
elif options.list_format == 'json':
|
||||
logger.info(format_for_json(packages, options))
|
||||
elif options.list_format == "legacy":
|
||||
for dist in packages:
|
||||
if options.outdated:
|
||||
logger.info(self.output_legacy_latest(dist, options))
|
||||
else:
|
||||
logger.info(self.output_legacy(dist, options))
|
||||
|
||||
def output_package_listing_columns(self, data, header):
|
||||
# insert the header first: we need to know the size of column names
|
||||
|
|
|
@ -10,6 +10,7 @@ from pip._internal.cache import WheelCache
|
|||
from pip._internal.exceptions import CommandError, PreviousBuildDirError
|
||||
from pip._internal.operations.prepare import RequirementPreparer
|
||||
from pip._internal.req import RequirementSet
|
||||
from pip._internal.req.req_tracker import RequirementTracker
|
||||
from pip._internal.resolve import Resolver
|
||||
from pip._internal.utils.temp_dir import TempDirectory
|
||||
from pip._internal.wheel import WheelBuilder
|
||||
|
@ -120,9 +121,10 @@ class WheelCommand(RequirementCommand):
|
|||
build_delete = (not (options.no_clean or options.build_dir))
|
||||
wheel_cache = WheelCache(options.cache_dir, options.format_control)
|
||||
|
||||
with TempDirectory(
|
||||
with RequirementTracker() as req_tracker, TempDirectory(
|
||||
options.build_dir, delete=build_delete, kind="wheel"
|
||||
) as directory:
|
||||
|
||||
requirement_set = RequirementSet(
|
||||
require_hashes=options.require_hashes,
|
||||
)
|
||||
|
@ -140,6 +142,7 @@ class WheelCommand(RequirementCommand):
|
|||
wheel_download_dir=options.wheel_dir,
|
||||
progress_bar=options.progress_bar,
|
||||
build_isolation=options.build_isolation,
|
||||
req_tracker=req_tracker,
|
||||
)
|
||||
|
||||
resolver = Resolver(
|
||||
|
|
|
@ -217,7 +217,7 @@ else:
|
|||
'hh',
|
||||
fcntl.ioctl(fd, termios.TIOCGWINSZ, '12345678')
|
||||
)
|
||||
except:
|
||||
except Exception:
|
||||
return None
|
||||
if cr == (0, 0):
|
||||
return None
|
||||
|
@ -228,7 +228,7 @@ else:
|
|||
fd = os.open(os.ctermid(), os.O_RDONLY)
|
||||
cr = ioctl_GWINSZ(fd)
|
||||
os.close(fd)
|
||||
except:
|
||||
except Exception:
|
||||
pass
|
||||
if not cr:
|
||||
cr = (os.environ.get('LINES', 25), os.environ.get('COLUMNS', 80))
|
||||
|
|
|
@ -9,7 +9,6 @@ import os
|
|||
import posixpath
|
||||
import re
|
||||
import sys
|
||||
import warnings
|
||||
from collections import namedtuple
|
||||
|
||||
from pip._vendor import html5lib, requests, six
|
||||
|
@ -31,7 +30,7 @@ from pip._internal.models.candidate import InstallationCandidate
|
|||
from pip._internal.models.index import PyPI
|
||||
from pip._internal.models.link import Link
|
||||
from pip._internal.pep425tags import get_supported
|
||||
from pip._internal.utils.deprecation import RemovedInPip11Warning
|
||||
from pip._internal.utils.deprecation import deprecated
|
||||
from pip._internal.utils.logging import indent_log
|
||||
from pip._internal.utils.misc import (
|
||||
ARCHIVE_EXTENSIONS, SUPPORTED_EXTENSIONS, cached_property, normalize_path,
|
||||
|
@ -173,10 +172,12 @@ class PackageFinder(object):
|
|||
# # dependency_links value
|
||||
# # FIXME: also, we should track comes_from (i.e., use Link)
|
||||
if self.process_dependency_links:
|
||||
warnings.warn(
|
||||
deprecated(
|
||||
"Dependency Links processing has been deprecated and will be "
|
||||
"removed in a future release.",
|
||||
RemovedInPip11Warning,
|
||||
replacement=None,
|
||||
gone_in="18.2",
|
||||
issue=4187,
|
||||
)
|
||||
self.dependency_links.extend(links)
|
||||
|
||||
|
|
|
@ -5,16 +5,21 @@ class PackageIndex(object):
|
|||
"""Represents a Package Index and provides easier access to endpoints
|
||||
"""
|
||||
|
||||
def __init__(self, url):
|
||||
def __init__(self, url, file_storage_domain):
|
||||
super(PackageIndex, self).__init__()
|
||||
|
||||
self.url = url
|
||||
self.netloc = urllib_parse.urlsplit(url).netloc
|
||||
self.simple_url = self._url_for_path('simple')
|
||||
self.pypi_url = self._url_for_path('pypi')
|
||||
|
||||
def _url_for_path(self, path):
|
||||
# This is part of a temporary hack used to block installs of PyPI
|
||||
# packages which depend on external urls only necessary until PyPI can
|
||||
# block such packages themselves
|
||||
self.file_storage_domain = file_storage_domain
|
||||
|
||||
def url_to_path(self, path):
|
||||
return urllib_parse.urljoin(self.url, path)
|
||||
|
||||
|
||||
PyPI = PackageIndex('https://pypi.org/')
|
||||
PyPI = PackageIndex('https://pypi.org/', 'files.pythonhosted.org')
|
||||
TestPyPI = PackageIndex('https://test.pypi.org/', 'test-files.pythonhosted.org')
|
||||
|
|
|
@ -6,7 +6,6 @@ from collections import namedtuple
|
|||
from pip._vendor.packaging.utils import canonicalize_name
|
||||
|
||||
from pip._internal.operations.prepare import make_abstract_dist
|
||||
|
||||
from pip._internal.utils.misc import get_installed_distributions
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
|
||||
|
|
|
@ -4,7 +4,6 @@ import collections
|
|||
import logging
|
||||
import os
|
||||
import re
|
||||
import warnings
|
||||
|
||||
from pip._vendor import pkg_resources, six
|
||||
from pip._vendor.packaging.utils import canonicalize_name
|
||||
|
@ -13,7 +12,7 @@ from pip._vendor.pkg_resources import RequirementParseError
|
|||
from pip._internal.exceptions import InstallationError
|
||||
from pip._internal.req import InstallRequirement
|
||||
from pip._internal.req.req_file import COMMENT_RE
|
||||
from pip._internal.utils.deprecation import RemovedInPip11Warning
|
||||
from pip._internal.utils.deprecation import deprecated
|
||||
from pip._internal.utils.misc import (
|
||||
dist_is_editable, get_installed_distributions,
|
||||
)
|
||||
|
@ -216,10 +215,12 @@ class FrozenRequirement(object):
|
|||
'for this package:'
|
||||
)
|
||||
else:
|
||||
warnings.warn(
|
||||
deprecated(
|
||||
"SVN editable detection based on dependency links "
|
||||
"will be dropped in the future.",
|
||||
RemovedInPip11Warning,
|
||||
replacement=None,
|
||||
gone_in="18.2",
|
||||
issue=4187,
|
||||
)
|
||||
comments.append(
|
||||
'# Installing as editable to satisfy requirement %s:' %
|
||||
|
|
|
@ -141,11 +141,12 @@ class RequirementPreparer(object):
|
|||
"""
|
||||
|
||||
def __init__(self, build_dir, download_dir, src_dir, wheel_download_dir,
|
||||
progress_bar, build_isolation):
|
||||
progress_bar, build_isolation, req_tracker):
|
||||
super(RequirementPreparer, self).__init__()
|
||||
|
||||
self.src_dir = src_dir
|
||||
self.build_dir = build_dir
|
||||
self.req_tracker = req_tracker
|
||||
|
||||
# Where still packed archives should be written to. If None, they are
|
||||
# not saved, and are deleted immediately after unpacking.
|
||||
|
@ -293,7 +294,8 @@ class RequirementPreparer(object):
|
|||
(req, exc, req.link)
|
||||
)
|
||||
abstract_dist = make_abstract_dist(req)
|
||||
abstract_dist.prep_for_dist(finder, self.build_isolation)
|
||||
with self.req_tracker.track(req):
|
||||
abstract_dist.prep_for_dist(finder, self.build_isolation)
|
||||
if self._download_should_save:
|
||||
# Make a .zip of the source_dir we already created.
|
||||
if req.link.scheme in vcs.all_schemes:
|
||||
|
@ -319,7 +321,8 @@ class RequirementPreparer(object):
|
|||
req.update_editable(not self._download_should_save)
|
||||
|
||||
abstract_dist = make_abstract_dist(req)
|
||||
abstract_dist.prep_for_dist(finder, self.build_isolation)
|
||||
with self.req_tracker.track(req):
|
||||
abstract_dist.prep_for_dist(finder, self.build_isolation)
|
||||
|
||||
if self._download_should_save:
|
||||
req.archive(self.download_dir)
|
||||
|
|
|
@ -48,7 +48,7 @@ def install_given_reqs(to_install, install_options, global_options=(),
|
|||
*args,
|
||||
**kwargs
|
||||
)
|
||||
except:
|
||||
except Exception:
|
||||
should_rollback = (
|
||||
requirement.conflicts_with and
|
||||
not requirement.install_succeeded
|
||||
|
|
|
@ -8,7 +8,6 @@ import shutil
|
|||
import sys
|
||||
import sysconfig
|
||||
import traceback
|
||||
import warnings
|
||||
import zipfile
|
||||
from distutils.util import change_root
|
||||
from email.parser import FeedParser # type: ignore
|
||||
|
@ -18,8 +17,8 @@ from pip._vendor.packaging import specifiers
|
|||
from pip._vendor.packaging.markers import Marker
|
||||
from pip._vendor.packaging.requirements import InvalidRequirement, Requirement
|
||||
from pip._vendor.packaging.utils import canonicalize_name
|
||||
from pip._vendor.packaging.version import parse as parse_version
|
||||
from pip._vendor.packaging.version import Version
|
||||
from pip._vendor.packaging.version import parse as parse_version
|
||||
from pip._vendor.pkg_resources import RequirementParseError, parse_requirements
|
||||
|
||||
from pip._internal import wheel
|
||||
|
@ -33,10 +32,8 @@ from pip._internal.locations import (
|
|||
PIP_DELETE_MARKER_FILENAME, running_under_virtualenv,
|
||||
)
|
||||
from pip._internal.models.link import Link
|
||||
from pip._internal.models.index import PyPI, TestPyPI
|
||||
from pip._internal.req.req_uninstall import UninstallPathSet
|
||||
from pip._internal.utils.deprecation import (
|
||||
RemovedInPip11Warning, RemovedInPip12Warning,
|
||||
)
|
||||
from pip._internal.utils.hashes import Hashes
|
||||
from pip._internal.utils.logging import indent_log
|
||||
from pip._internal.utils.misc import (
|
||||
|
@ -168,11 +165,19 @@ class InstallRequirement(object):
|
|||
req = Requirement(req)
|
||||
except InvalidRequirement:
|
||||
raise InstallationError("Invalid requirement: '%s'" % req)
|
||||
if req.url:
|
||||
|
||||
domains_not_allowed = [
|
||||
PyPI.file_storage_domain,
|
||||
TestPyPI.file_storage_domain,
|
||||
]
|
||||
if req.url and comes_from.link.netloc in domains_not_allowed:
|
||||
# Explicitly disallow pypi packages that depend on external urls
|
||||
raise InstallationError(
|
||||
"Direct url requirement (like %s) are not allowed for "
|
||||
"dependencies" % req
|
||||
"Packages installed from PyPI cannot depend on packages "
|
||||
"which are not also hosted on PyPI.\n"
|
||||
"%s depends on %s " % (comes_from.name, req)
|
||||
)
|
||||
|
||||
return cls(req, comes_from, isolated=isolated, wheel_cache=wheel_cache)
|
||||
|
||||
@classmethod
|
||||
|
@ -567,44 +572,40 @@ class InstallRequirement(object):
|
|||
specified as per PEP 518 within the package. If `pyproject.toml` is not
|
||||
present, returns None to signify not using the same.
|
||||
"""
|
||||
# If pyproject.toml does not exist, don't do anything.
|
||||
if not os.path.isfile(self.pyproject_toml):
|
||||
return None
|
||||
|
||||
error_template = (
|
||||
"{package} has a pyproject.toml file that does not comply "
|
||||
"with PEP 518: {reason}"
|
||||
)
|
||||
|
||||
with io.open(self.pyproject_toml, encoding="utf-8") as f:
|
||||
pp_toml = pytoml.load(f)
|
||||
|
||||
# Extract the build requirements
|
||||
requires = pp_toml.get("build-system", {}).get("requires", None)
|
||||
# If there is no build-system table, just use setuptools and wheel.
|
||||
if "build-system" not in pp_toml:
|
||||
return ["setuptools", "wheel"]
|
||||
|
||||
template = (
|
||||
"%s does not comply with PEP 518 since pyproject.toml "
|
||||
"does not contain a valid '[build-system].requires' key: %s"
|
||||
)
|
||||
|
||||
if requires is None:
|
||||
logging.warn(template, self, "it is missing.")
|
||||
warnings.warn(
|
||||
"Future versions of pip will reject packages with "
|
||||
"pyproject.toml files that do not comply with PEP 518.",
|
||||
RemovedInPip12Warning,
|
||||
# Specifying the build-system table but not the requires key is invalid
|
||||
build_system = pp_toml["build-system"]
|
||||
if "requires" not in build_system:
|
||||
raise InstallationError(
|
||||
error_template.format(package=self, reason=(
|
||||
"it has a 'build-system' table but not "
|
||||
"'build-system.requires' which is mandatory in the table"
|
||||
))
|
||||
)
|
||||
|
||||
# NOTE: Currently allowing projects to skip this key so that they
|
||||
# can transition to a PEP 518 compliant pyproject.toml or
|
||||
# push to update the PEP.
|
||||
# Come pip 19.0, bring this to compliance with PEP 518.
|
||||
return None
|
||||
else:
|
||||
# Error out if it's not a list of strings
|
||||
is_list_of_str = isinstance(requires, list) and all(
|
||||
isinstance(req, six.string_types) for req in requires
|
||||
)
|
||||
if not is_list_of_str:
|
||||
raise InstallationError(
|
||||
template % (self, "it is not a list of strings.")
|
||||
)
|
||||
# Error out if it's not a list of strings
|
||||
requires = build_system["requires"]
|
||||
if not _is_list_of_str(requires):
|
||||
raise InstallationError(error_template.format(
|
||||
package=self,
|
||||
reason="'build-system.requires' is not a list of strings.",
|
||||
))
|
||||
|
||||
# If control flow reaches here, we're good to go.
|
||||
return requires
|
||||
|
||||
def run_egg_info(self):
|
||||
|
@ -1048,22 +1049,6 @@ class InstallRequirement(object):
|
|||
return install_args
|
||||
|
||||
|
||||
def _strip_postfix(req):
|
||||
"""
|
||||
Strip req postfix ( -dev, 0.2, etc )
|
||||
"""
|
||||
# FIXME: use package_to_requirement?
|
||||
match = re.search(r'^(.*?)(?:-dev|-\d.*)$', req)
|
||||
if match:
|
||||
# Strip off -dev, -0.2, etc.
|
||||
warnings.warn(
|
||||
"#egg cleanup for editable urls will be dropped in the future",
|
||||
RemovedInPip11Warning,
|
||||
)
|
||||
req = match.group(1)
|
||||
return req
|
||||
|
||||
|
||||
def parse_editable(editable_req):
|
||||
"""Parses an editable requirement into:
|
||||
- a requirement name
|
||||
|
@ -1128,7 +1113,7 @@ def parse_editable(editable_req):
|
|||
"Could not detect requirement name for '%s', please specify one "
|
||||
"with #egg=your_package_name" % editable_req
|
||||
)
|
||||
return _strip_postfix(package_name), url, None
|
||||
return package_name, url, None
|
||||
|
||||
|
||||
def deduce_helpful_msg(req):
|
||||
|
@ -1156,3 +1141,10 @@ def deduce_helpful_msg(req):
|
|||
else:
|
||||
msg += " File '%s' does not exist." % (req)
|
||||
return msg
|
||||
|
||||
|
||||
def _is_list_of_str(obj):
|
||||
return (
|
||||
isinstance(obj, list) and
|
||||
all(isinstance(item, six.string_types) for item in obj)
|
||||
)
|
||||
|
|
|
@ -0,0 +1,76 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
import contextlib
|
||||
import errno
|
||||
import hashlib
|
||||
import logging
|
||||
import os
|
||||
|
||||
from pip._internal.utils.temp_dir import TempDirectory
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class RequirementTracker(object):
|
||||
|
||||
def __init__(self):
|
||||
self._root = os.environ.get('PIP_REQ_TRACKER')
|
||||
if self._root is None:
|
||||
self._temp_dir = TempDirectory(delete=False, kind='req-tracker')
|
||||
self._temp_dir.create()
|
||||
self._root = os.environ['PIP_REQ_TRACKER'] = self._temp_dir.path
|
||||
logger.debug('Created requirements tracker %r', self._root)
|
||||
else:
|
||||
self._temp_dir = None
|
||||
logger.debug('Re-using requirements tracker %r', self._root)
|
||||
self._entries = set()
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
self.cleanup()
|
||||
|
||||
def _entry_path(self, link):
|
||||
hashed = hashlib.sha224(link.url_without_fragment.encode()).hexdigest()
|
||||
return os.path.join(self._root, hashed)
|
||||
|
||||
def add(self, req):
|
||||
link = req.link
|
||||
info = str(req)
|
||||
entry_path = self._entry_path(link)
|
||||
try:
|
||||
with open(entry_path) as fp:
|
||||
# Error, these's already a build in progress.
|
||||
raise LookupError('%s is already being built: %s'
|
||||
% (link, fp.read()))
|
||||
except IOError as e:
|
||||
if e.errno != errno.ENOENT:
|
||||
raise
|
||||
assert req not in self._entries
|
||||
with open(entry_path, 'w') as fp:
|
||||
fp.write(info)
|
||||
self._entries.add(req)
|
||||
logger.debug('Added %s to build tracker %r', req, self._root)
|
||||
|
||||
def remove(self, req):
|
||||
link = req.link
|
||||
self._entries.remove(req)
|
||||
os.unlink(self._entry_path(link))
|
||||
logger.debug('Removed %s from build tracker %r', req, self._root)
|
||||
|
||||
def cleanup(self):
|
||||
for req in set(self._entries):
|
||||
self.remove(req)
|
||||
remove = self._temp_dir is not None
|
||||
if remove:
|
||||
self._temp_dir.cleanup()
|
||||
logger.debug('%s build tracker %r',
|
||||
'Removed' if remove else 'Cleaned',
|
||||
self._root)
|
||||
|
||||
@contextlib.contextmanager
|
||||
def track(self, req):
|
||||
self.add(req)
|
||||
yield
|
||||
self.remove(req)
|
|
@ -120,6 +120,8 @@ def compress_for_output_listing(paths):
|
|||
folders.add(os.path.dirname(path))
|
||||
files.add(path)
|
||||
|
||||
_normcased_files = set(map(os.path.normcase, files))
|
||||
|
||||
folders = compact(folders)
|
||||
|
||||
# This walks the tree using os.walk to not miss extra folders
|
||||
|
@ -130,8 +132,9 @@ def compress_for_output_listing(paths):
|
|||
if fname.endswith(".pyc"):
|
||||
continue
|
||||
|
||||
file_ = os.path.normcase(os.path.join(dirpath, fname))
|
||||
if os.path.isfile(file_) and file_ not in files:
|
||||
file_ = os.path.join(dirpath, fname)
|
||||
if (os.path.isfile(file_) and
|
||||
os.path.normcase(file_) not in _normcased_files):
|
||||
# We are skipping this file. Add it to the set.
|
||||
will_skip.add(file_)
|
||||
|
||||
|
|
|
@ -18,7 +18,6 @@ from pip._internal.exceptions import (
|
|||
BestVersionAlreadyInstalled, DistributionNotFound, HashError, HashErrors,
|
||||
UnsupportedPythonVersion,
|
||||
)
|
||||
|
||||
from pip._internal.req.req_install import InstallRequirement
|
||||
from pip._internal.utils.logging import indent_log
|
||||
from pip._internal.utils.misc import dist_in_usersite, ensure_dir
|
||||
|
|
|
@ -6,72 +6,84 @@ from __future__ import absolute_import
|
|||
import logging
|
||||
import warnings
|
||||
|
||||
from pip._vendor.packaging.version import parse
|
||||
|
||||
from pip import __version__ as current_version
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
|
||||
if MYPY_CHECK_RUNNING:
|
||||
from typing import Any # noqa: F401
|
||||
from typing import Any, Optional # noqa: F401
|
||||
|
||||
|
||||
class PipDeprecationWarning(Warning):
|
||||
pass
|
||||
|
||||
|
||||
class Pending(object):
|
||||
pass
|
||||
|
||||
|
||||
class RemovedInPip11Warning(PipDeprecationWarning):
|
||||
pass
|
||||
|
||||
|
||||
class RemovedInPip12Warning(PipDeprecationWarning, Pending):
|
||||
pass
|
||||
_original_showwarning = None # type: Any
|
||||
|
||||
|
||||
# Warnings <-> Logging Integration
|
||||
|
||||
|
||||
_warnings_showwarning = None # type: Any
|
||||
|
||||
|
||||
def _showwarning(message, category, filename, lineno, file=None, line=None):
|
||||
if file is not None:
|
||||
if _warnings_showwarning is not None:
|
||||
_warnings_showwarning(
|
||||
if _original_showwarning is not None:
|
||||
_original_showwarning(
|
||||
message, category, filename, lineno, file, line,
|
||||
)
|
||||
elif issubclass(category, PipDeprecationWarning):
|
||||
# We use a specially named logger which will handle all of the
|
||||
# deprecation messages for pip.
|
||||
logger = logging.getLogger("pip._internal.deprecations")
|
||||
logger.warning(message)
|
||||
else:
|
||||
if issubclass(category, PipDeprecationWarning):
|
||||
# We use a specially named logger which will handle all of the
|
||||
# deprecation messages for pip.
|
||||
logger = logging.getLogger("pip._internal.deprecations")
|
||||
|
||||
# This is purposely using the % formatter here instead of letting
|
||||
# the logging module handle the interpolation. This is because we
|
||||
# want it to appear as if someone typed this entire message out.
|
||||
log_message = "DEPRECATION: %s" % message
|
||||
|
||||
# PipDeprecationWarnings that are Pending still have at least 2
|
||||
# versions to go until they are removed so they can just be
|
||||
# warnings. Otherwise, they will be removed in the very next
|
||||
# version of pip. We want these to be more obvious so we use the
|
||||
# ERROR logging level.
|
||||
if issubclass(category, Pending):
|
||||
logger.warning(log_message)
|
||||
else:
|
||||
logger.error(log_message)
|
||||
else:
|
||||
_warnings_showwarning(
|
||||
message, category, filename, lineno, file, line,
|
||||
)
|
||||
_original_showwarning(
|
||||
message, category, filename, lineno, file, line,
|
||||
)
|
||||
|
||||
|
||||
def install_warning_logger():
|
||||
# Enable our Deprecation Warnings
|
||||
warnings.simplefilter("default", PipDeprecationWarning, append=True)
|
||||
|
||||
global _warnings_showwarning
|
||||
global _original_showwarning
|
||||
|
||||
if _warnings_showwarning is None:
|
||||
_warnings_showwarning = warnings.showwarning
|
||||
if _original_showwarning is None:
|
||||
_original_showwarning = warnings.showwarning
|
||||
warnings.showwarning = _showwarning
|
||||
|
||||
|
||||
def deprecated(reason, replacement, gone_in, issue=None):
|
||||
# type: (str, Optional[str], Optional[str], Optional[int]) -> None
|
||||
"""Helper to deprecate existing functionality.
|
||||
|
||||
reason:
|
||||
Textual reason shown to the user about why this functionality has
|
||||
been deprecated.
|
||||
replacement:
|
||||
Textual suggestion shown to the user about what alternative
|
||||
functionality they can use.
|
||||
gone_in:
|
||||
The version of pip does this functionality should get removed in.
|
||||
Raises errors if pip's current version is greater than or equal to
|
||||
this.
|
||||
issue:
|
||||
Issue number on the tracker that would serve as a useful place for
|
||||
users to find related discussion and provide feedback.
|
||||
|
||||
Always pass replacement, gone_in and issue as keyword arguments for clarity
|
||||
at the call site.
|
||||
"""
|
||||
|
||||
# Construct a nice message.
|
||||
# This is purposely eagerly formatted as we want it to appear as if someone
|
||||
# typed this entire message out.
|
||||
message = "DEPRECATION: " + reason
|
||||
if replacement is not None:
|
||||
message += " A possible replacement is {}.".format(replacement)
|
||||
if issue is not None:
|
||||
url = "https://github.com/pypa/pip/issues/" + str(issue)
|
||||
message += " You can find discussion regarding this at {}.".format(url)
|
||||
|
||||
# Raise as an error if it has to be removed.
|
||||
if gone_in is not None and parse(current_version) >= parse(gone_in):
|
||||
raise PipDeprecationWarning(message)
|
||||
warnings.warn(message, category=PipDeprecationWarning, stacklevel=2)
|
||||
|
|
|
@ -130,3 +130,96 @@ class MaxLevelFilter(logging.Filter):
|
|||
|
||||
def filter(self, record):
|
||||
return record.levelno < self.level
|
||||
|
||||
|
||||
def setup_logging(verbosity, no_color, user_log_file):
|
||||
"""Configures and sets up all of the logging
|
||||
"""
|
||||
|
||||
# Determine the level to be logging at.
|
||||
if verbosity >= 1:
|
||||
level = "DEBUG"
|
||||
elif verbosity == -1:
|
||||
level = "WARNING"
|
||||
elif verbosity == -2:
|
||||
level = "ERROR"
|
||||
elif verbosity <= -3:
|
||||
level = "CRITICAL"
|
||||
else:
|
||||
level = "INFO"
|
||||
|
||||
# The "root" logger should match the "console" level *unless* we also need
|
||||
# to log to a user log file.
|
||||
include_user_log = user_log_file is not None
|
||||
if include_user_log:
|
||||
additional_log_file = user_log_file
|
||||
root_level = "DEBUG"
|
||||
else:
|
||||
additional_log_file = "/dev/null"
|
||||
root_level = level
|
||||
|
||||
# Disable any logging besides WARNING unless we have DEBUG level logging
|
||||
# enabled for vendored libraries.
|
||||
vendored_log_level = "WARNING" if level in ["INFO", "ERROR"] else "DEBUG"
|
||||
|
||||
# Shorthands for clarity
|
||||
log_streams = {
|
||||
"stdout": "ext://sys.stdout",
|
||||
"stderr": "ext://sys.stderr",
|
||||
}
|
||||
handler_classes = {
|
||||
"stream": "pip._internal.utils.logging.ColorizedStreamHandler",
|
||||
"file": "pip._internal.utils.logging.BetterRotatingFileHandler",
|
||||
}
|
||||
|
||||
logging.config.dictConfig({
|
||||
"version": 1,
|
||||
"disable_existing_loggers": False,
|
||||
"filters": {
|
||||
"exclude_warnings": {
|
||||
"()": "pip._internal.utils.logging.MaxLevelFilter",
|
||||
"level": logging.WARNING,
|
||||
},
|
||||
},
|
||||
"formatters": {
|
||||
"indent": {
|
||||
"()": IndentingFormatter,
|
||||
"format": "%(message)s",
|
||||
},
|
||||
},
|
||||
"handlers": {
|
||||
"console": {
|
||||
"level": level,
|
||||
"class": handler_classes["stream"],
|
||||
"no_color": no_color,
|
||||
"stream": log_streams["stdout"],
|
||||
"filters": ["exclude_warnings"],
|
||||
"formatter": "indent",
|
||||
},
|
||||
"console_errors": {
|
||||
"level": "WARNING",
|
||||
"class": handler_classes["stream"],
|
||||
"no_color": no_color,
|
||||
"stream": log_streams["stderr"],
|
||||
"formatter": "indent",
|
||||
},
|
||||
"user_log": {
|
||||
"level": "DEBUG",
|
||||
"class": handler_classes["file"],
|
||||
"filename": additional_log_file,
|
||||
"delay": True,
|
||||
"formatter": "indent",
|
||||
},
|
||||
},
|
||||
"root": {
|
||||
"level": root_level,
|
||||
"handlers": ["console", "console_errors"] + (
|
||||
["user_log"] if include_user_log else []
|
||||
),
|
||||
},
|
||||
"loggers": {
|
||||
"pip._vendor": {
|
||||
"level": vendored_log_level
|
||||
}
|
||||
},
|
||||
})
|
||||
|
|
|
@ -58,14 +58,14 @@ class SelfCheckState(object):
|
|||
separators=(",", ":"))
|
||||
|
||||
|
||||
def pip_installed_by_pip():
|
||||
"""Checks whether pip was installed by pip
|
||||
def was_installed_by_pip(pkg):
|
||||
"""Checks whether pkg was installed by pip
|
||||
|
||||
This is used not to display the upgrade message when pip is in fact
|
||||
installed by system package manager, such as dnf on Fedora.
|
||||
"""
|
||||
try:
|
||||
dist = pkg_resources.get_distribution('pip')
|
||||
dist = pkg_resources.get_distribution(pkg)
|
||||
return (dist.has_metadata('INSTALLER') and
|
||||
'pip' in dist.get_metadata_lines('INSTALLER'))
|
||||
except pkg_resources.DistributionNotFound:
|
||||
|
@ -125,7 +125,7 @@ def pip_version_check(session, options):
|
|||
# Determine if our pypi_version is older
|
||||
if (pip_version < remote_version and
|
||||
pip_version.base_version != remote_version.base_version and
|
||||
pip_installed_by_pip()):
|
||||
was_installed_by_pip('pip')):
|
||||
# Advise "python -m pip" on Windows to avoid issues
|
||||
# with overwriting pip.exe.
|
||||
if WINDOWS:
|
||||
|
|
|
@ -200,12 +200,6 @@ class VersionControl(object):
|
|||
drive, tail = os.path.splitdrive(repo)
|
||||
return repo.startswith(os.path.sep) or drive
|
||||
|
||||
# See issue #1083 for why this method was introduced:
|
||||
# https://github.com/pypa/pip/issues/1083
|
||||
def translate_egg_surname(self, surname):
|
||||
# For example, Django has branches of the form "stable/1.7.x".
|
||||
return surname.replace('/', '_')
|
||||
|
||||
def export(self, location):
|
||||
"""
|
||||
Export the repository at the url to the destination location
|
||||
|
@ -213,32 +207,59 @@ class VersionControl(object):
|
|||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def get_url_rev(self):
|
||||
def get_netloc_and_auth(self, netloc):
|
||||
"""
|
||||
Returns the correct repository URL and revision by parsing the given
|
||||
repository URL
|
||||
Parse the repository URL's netloc, and return the new netloc to use
|
||||
along with auth information.
|
||||
|
||||
This is mainly for the Subversion class to override, so that auth
|
||||
information can be provided via the --username and --password options
|
||||
instead of through the URL. For other subclasses like Git without
|
||||
such an option, auth information must stay in the URL.
|
||||
|
||||
Returns: (netloc, (username, password)).
|
||||
"""
|
||||
return netloc, (None, None)
|
||||
|
||||
def get_url_rev_and_auth(self, url):
|
||||
"""
|
||||
Parse the repository URL to use, and return the URL, revision,
|
||||
and auth info to use.
|
||||
|
||||
Returns: (url, rev, (username, password)).
|
||||
"""
|
||||
error_message = (
|
||||
"Sorry, '%s' is a malformed VCS url. "
|
||||
"The format is <vcs>+<protocol>://<url>, "
|
||||
"e.g. svn+http://myrepo/svn/MyApp#egg=MyApp"
|
||||
)
|
||||
assert '+' in self.url, error_message % self.url
|
||||
url = self.url.split('+', 1)[1]
|
||||
assert '+' in url, error_message % url
|
||||
url = url.split('+', 1)[1]
|
||||
scheme, netloc, path, query, frag = urllib_parse.urlsplit(url)
|
||||
netloc, user_pass = self.get_netloc_and_auth(netloc)
|
||||
rev = None
|
||||
if '@' in path:
|
||||
path, rev = path.rsplit('@', 1)
|
||||
url = urllib_parse.urlunsplit((scheme, netloc, path, query, ''))
|
||||
return url, rev
|
||||
return url, rev, user_pass
|
||||
|
||||
def get_info(self, location):
|
||||
def make_rev_args(self, username, password):
|
||||
"""
|
||||
Returns (url, revision), where both are strings
|
||||
Return the RevOptions "extra arguments" to use in obtain().
|
||||
"""
|
||||
assert not location.rstrip('/').endswith(self.dirname), \
|
||||
'Bad directory: %s' % location
|
||||
return self.get_url(location), self.get_revision(location)
|
||||
return []
|
||||
|
||||
def get_url_rev_options(self, url):
|
||||
"""
|
||||
Return the URL and RevOptions object to use in obtain() and in
|
||||
some cases export(), as a tuple (url, rev_options).
|
||||
"""
|
||||
url, rev, user_pass = self.get_url_rev_and_auth(url)
|
||||
username, password = user_pass
|
||||
extra_args = self.make_rev_args(username, password)
|
||||
rev_options = self.make_rev_options(rev, extra_args=extra_args)
|
||||
|
||||
return url, rev_options
|
||||
|
||||
def normalize_url(self, url):
|
||||
"""
|
||||
|
@ -253,10 +274,14 @@ class VersionControl(object):
|
|||
"""
|
||||
return (self.normalize_url(url1) == self.normalize_url(url2))
|
||||
|
||||
def obtain(self, dest):
|
||||
def fetch_new(self, dest, url, rev_options):
|
||||
"""
|
||||
Called when installing or updating an editable package, takes the
|
||||
source path of the checkout.
|
||||
Fetch a revision from a repository, in the case that this is the
|
||||
first fetch from the repository.
|
||||
|
||||
Args:
|
||||
dest: the directory to fetch the repository to.
|
||||
rev_options: a RevOptions object.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
|
@ -288,94 +313,95 @@ class VersionControl(object):
|
|||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def check_destination(self, dest, url, rev_options):
|
||||
def obtain(self, dest):
|
||||
"""
|
||||
Prepare a location to receive a checkout/clone.
|
||||
|
||||
Return True if the location is ready for (and requires) a
|
||||
checkout/clone, False otherwise.
|
||||
Install or update in editable mode the package represented by this
|
||||
VersionControl object.
|
||||
|
||||
Args:
|
||||
rev_options: a RevOptions object.
|
||||
dest: the repository directory in which to install or update.
|
||||
"""
|
||||
checkout = True
|
||||
prompt = False
|
||||
rev_display = rev_options.to_display()
|
||||
if os.path.exists(dest):
|
||||
checkout = False
|
||||
if os.path.exists(os.path.join(dest, self.dirname)):
|
||||
existing_url = self.get_url(dest)
|
||||
if self.compare_urls(existing_url, url):
|
||||
logger.debug(
|
||||
'%s in %s exists, and has correct URL (%s)',
|
||||
self.repo_name.title(),
|
||||
display_path(dest),
|
||||
url,
|
||||
)
|
||||
if not self.is_commit_id_equal(dest, rev_options.rev):
|
||||
logger.info(
|
||||
'Updating %s %s%s',
|
||||
display_path(dest),
|
||||
self.repo_name,
|
||||
rev_display,
|
||||
)
|
||||
self.update(dest, rev_options)
|
||||
else:
|
||||
logger.info(
|
||||
'Skipping because already up-to-date.')
|
||||
else:
|
||||
logger.warning(
|
||||
'%s %s in %s exists with URL %s',
|
||||
self.name,
|
||||
self.repo_name,
|
||||
display_path(dest),
|
||||
existing_url,
|
||||
)
|
||||
prompt = ('(s)witch, (i)gnore, (w)ipe, (b)ackup ',
|
||||
('s', 'i', 'w', 'b'))
|
||||
else:
|
||||
logger.warning(
|
||||
'Directory %s already exists, and is not a %s %s.',
|
||||
dest,
|
||||
self.name,
|
||||
self.repo_name,
|
||||
)
|
||||
prompt = ('(i)gnore, (w)ipe, (b)ackup ', ('i', 'w', 'b'))
|
||||
if prompt:
|
||||
logger.warning(
|
||||
'The plan is to install the %s repository %s',
|
||||
self.name,
|
||||
url,
|
||||
)
|
||||
response = ask_path_exists('What to do? %s' % prompt[0],
|
||||
prompt[1])
|
||||
url, rev_options = self.get_url_rev_options(self.url)
|
||||
|
||||
if response == 's':
|
||||
logger.info(
|
||||
'Switching %s %s to %s%s',
|
||||
self.repo_name,
|
||||
if not os.path.exists(dest):
|
||||
self.fetch_new(dest, url, rev_options)
|
||||
return
|
||||
|
||||
rev_display = rev_options.to_display()
|
||||
if self.is_repository_directory(dest):
|
||||
existing_url = self.get_url(dest)
|
||||
if self.compare_urls(existing_url, url):
|
||||
logger.debug(
|
||||
'%s in %s exists, and has correct URL (%s)',
|
||||
self.repo_name.title(),
|
||||
display_path(dest),
|
||||
url,
|
||||
rev_display,
|
||||
)
|
||||
self.switch(dest, url, rev_options)
|
||||
elif response == 'i':
|
||||
# do nothing
|
||||
pass
|
||||
elif response == 'w':
|
||||
logger.warning('Deleting %s', display_path(dest))
|
||||
rmtree(dest)
|
||||
checkout = True
|
||||
elif response == 'b':
|
||||
dest_dir = backup_dir(dest)
|
||||
logger.warning(
|
||||
'Backing up %s to %s', display_path(dest), dest_dir,
|
||||
)
|
||||
shutil.move(dest, dest_dir)
|
||||
checkout = True
|
||||
elif response == 'a':
|
||||
sys.exit(-1)
|
||||
return checkout
|
||||
if not self.is_commit_id_equal(dest, rev_options.rev):
|
||||
logger.info(
|
||||
'Updating %s %s%s',
|
||||
display_path(dest),
|
||||
self.repo_name,
|
||||
rev_display,
|
||||
)
|
||||
self.update(dest, rev_options)
|
||||
else:
|
||||
logger.info('Skipping because already up-to-date.')
|
||||
return
|
||||
|
||||
logger.warning(
|
||||
'%s %s in %s exists with URL %s',
|
||||
self.name,
|
||||
self.repo_name,
|
||||
display_path(dest),
|
||||
existing_url,
|
||||
)
|
||||
prompt = ('(s)witch, (i)gnore, (w)ipe, (b)ackup ',
|
||||
('s', 'i', 'w', 'b'))
|
||||
else:
|
||||
logger.warning(
|
||||
'Directory %s already exists, and is not a %s %s.',
|
||||
dest,
|
||||
self.name,
|
||||
self.repo_name,
|
||||
)
|
||||
prompt = ('(i)gnore, (w)ipe, (b)ackup ', ('i', 'w', 'b'))
|
||||
|
||||
logger.warning(
|
||||
'The plan is to install the %s repository %s',
|
||||
self.name,
|
||||
url,
|
||||
)
|
||||
response = ask_path_exists('What to do? %s' % prompt[0], prompt[1])
|
||||
|
||||
if response == 'a':
|
||||
sys.exit(-1)
|
||||
|
||||
if response == 'w':
|
||||
logger.warning('Deleting %s', display_path(dest))
|
||||
rmtree(dest)
|
||||
self.fetch_new(dest, url, rev_options)
|
||||
return
|
||||
|
||||
if response == 'b':
|
||||
dest_dir = backup_dir(dest)
|
||||
logger.warning(
|
||||
'Backing up %s to %s', display_path(dest), dest_dir,
|
||||
)
|
||||
shutil.move(dest, dest_dir)
|
||||
self.fetch_new(dest, url, rev_options)
|
||||
return
|
||||
|
||||
# Do nothing if the response is "i".
|
||||
if response == 's':
|
||||
logger.info(
|
||||
'Switching %s %s to %s%s',
|
||||
self.repo_name,
|
||||
display_path(dest),
|
||||
url,
|
||||
rev_display,
|
||||
)
|
||||
self.switch(dest, url, rev_options)
|
||||
|
||||
def unpack(self, location):
|
||||
"""
|
||||
|
@ -398,7 +424,6 @@ class VersionControl(object):
|
|||
def get_url(self, location):
|
||||
"""
|
||||
Return the url used at location
|
||||
Used in get_info or check_destination
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
|
@ -435,17 +460,26 @@ class VersionControl(object):
|
|||
else:
|
||||
raise # re-raise exception if a different error occurred
|
||||
|
||||
@classmethod
|
||||
def is_repository_directory(cls, path):
|
||||
"""
|
||||
Return whether a directory path is a repository directory.
|
||||
"""
|
||||
logger.debug('Checking in %s for %s (%s)...',
|
||||
path, cls.dirname, cls.name)
|
||||
return os.path.exists(os.path.join(path, cls.dirname))
|
||||
|
||||
@classmethod
|
||||
def controls_location(cls, location):
|
||||
"""
|
||||
Check if a location is controlled by the vcs.
|
||||
It is meant to be overridden to implement smarter detection
|
||||
mechanisms for specific vcs.
|
||||
|
||||
This can do more than is_repository_directory() alone. For example,
|
||||
the Git override checks that Git is actually available.
|
||||
"""
|
||||
logger.debug('Checking in %s for %s (%s)...',
|
||||
location, cls.dirname, cls.name)
|
||||
path = os.path.join(location, cls.dirname)
|
||||
return os.path.exists(path)
|
||||
return cls.is_repository_directory(location)
|
||||
|
||||
|
||||
def get_src_requirement(dist, location):
|
||||
|
|
|
@ -48,6 +48,17 @@ class Bazaar(VersionControl):
|
|||
cwd=temp_dir.path, show_stdout=False,
|
||||
)
|
||||
|
||||
def fetch_new(self, dest, url, rev_options):
|
||||
rev_display = rev_options.to_display()
|
||||
logger.info(
|
||||
'Checking out %s%s to %s',
|
||||
url,
|
||||
rev_display,
|
||||
display_path(dest),
|
||||
)
|
||||
cmd_args = ['branch', '-q'] + rev_options.to_args() + [url, dest]
|
||||
self.run_command(cmd_args)
|
||||
|
||||
def switch(self, dest, url, rev_options):
|
||||
self.run_command(['switch', url], cwd=dest)
|
||||
|
||||
|
@ -55,26 +66,12 @@ class Bazaar(VersionControl):
|
|||
cmd_args = ['pull', '-q'] + rev_options.to_args()
|
||||
self.run_command(cmd_args, cwd=dest)
|
||||
|
||||
def obtain(self, dest):
|
||||
url, rev = self.get_url_rev()
|
||||
rev_options = self.make_rev_options(rev)
|
||||
if self.check_destination(dest, url, rev_options):
|
||||
rev_display = rev_options.to_display()
|
||||
logger.info(
|
||||
'Checking out %s%s to %s',
|
||||
url,
|
||||
rev_display,
|
||||
display_path(dest),
|
||||
)
|
||||
cmd_args = ['branch', '-q'] + rev_options.to_args() + [url, dest]
|
||||
self.run_command(cmd_args)
|
||||
|
||||
def get_url_rev(self):
|
||||
def get_url_rev_and_auth(self, url):
|
||||
# hotfix the URL scheme after removing bzr+ from bzr+ssh:// readd it
|
||||
url, rev = super(Bazaar, self).get_url_rev()
|
||||
url, rev, user_pass = super(Bazaar, self).get_url_rev_and_auth(url)
|
||||
if url.startswith('ssh://'):
|
||||
url = 'bzr+' + url
|
||||
return url, rev
|
||||
return url, rev, user_pass
|
||||
|
||||
def get_url(self, location):
|
||||
urls = self.run_command(['info'], show_stdout=False, cwd=location)
|
||||
|
|
|
@ -155,6 +155,33 @@ class Git(VersionControl):
|
|||
|
||||
return self.get_revision(dest) == name
|
||||
|
||||
def fetch_new(self, dest, url, rev_options):
|
||||
rev_display = rev_options.to_display()
|
||||
logger.info(
|
||||
'Cloning %s%s to %s', url, rev_display, display_path(dest),
|
||||
)
|
||||
self.run_command(['clone', '-q', url, dest])
|
||||
|
||||
if rev_options.rev:
|
||||
# Then a specific revision was requested.
|
||||
rev_options = self.check_rev_options(dest, rev_options)
|
||||
# Only do a checkout if the current commit id doesn't match
|
||||
# the requested revision.
|
||||
if not self.is_commit_id_equal(dest, rev_options.rev):
|
||||
rev = rev_options.rev
|
||||
# Only fetch the revision if it's a ref
|
||||
if rev.startswith('refs/'):
|
||||
self.run_command(
|
||||
['fetch', '-q', url] + rev_options.to_args(),
|
||||
cwd=dest,
|
||||
)
|
||||
# Change the revision to the SHA of the ref we fetched
|
||||
rev = 'FETCH_HEAD'
|
||||
self.run_command(['checkout', '-q', rev], cwd=dest)
|
||||
|
||||
#: repo may contain submodules
|
||||
self.update_submodules(dest)
|
||||
|
||||
def switch(self, dest, url, rev_options):
|
||||
self.run_command(['config', 'remote.origin.url', url], cwd=dest)
|
||||
cmd_args = ['checkout', '-q'] + rev_options.to_args()
|
||||
|
@ -176,35 +203,6 @@ class Git(VersionControl):
|
|||
#: update submodules
|
||||
self.update_submodules(dest)
|
||||
|
||||
def obtain(self, dest):
|
||||
url, rev = self.get_url_rev()
|
||||
rev_options = self.make_rev_options(rev)
|
||||
if self.check_destination(dest, url, rev_options):
|
||||
rev_display = rev_options.to_display()
|
||||
logger.info(
|
||||
'Cloning %s%s to %s', url, rev_display, display_path(dest),
|
||||
)
|
||||
self.run_command(['clone', '-q', url, dest])
|
||||
|
||||
if rev:
|
||||
rev_options = self.check_rev_options(dest, rev_options)
|
||||
# Only do a checkout if the current commit id doesn't match
|
||||
# the requested revision.
|
||||
if not self.is_commit_id_equal(dest, rev_options.rev):
|
||||
rev = rev_options.rev
|
||||
# Only fetch the revision if it's a ref
|
||||
if rev.startswith('refs/'):
|
||||
self.run_command(
|
||||
['fetch', '-q', url] + rev_options.to_args(),
|
||||
cwd=dest,
|
||||
)
|
||||
# Change the revision to the SHA of the ref we fetched
|
||||
rev = 'FETCH_HEAD'
|
||||
self.run_command(['checkout', '-q', rev], cwd=dest)
|
||||
|
||||
#: repo may contain submodules
|
||||
self.update_submodules(dest)
|
||||
|
||||
def get_url(self, location):
|
||||
"""Return URL of the first remote encountered."""
|
||||
remotes = self.run_command(
|
||||
|
@ -267,22 +265,22 @@ class Git(VersionControl):
|
|||
req += '&subdirectory=' + subdirectory
|
||||
return req
|
||||
|
||||
def get_url_rev(self):
|
||||
def get_url_rev_and_auth(self, url):
|
||||
"""
|
||||
Prefixes stub URLs like 'user@hostname:user/repo.git' with 'ssh://'.
|
||||
That's required because although they use SSH they sometimes don't
|
||||
work with a ssh:// scheme (e.g. GitHub). But we need a scheme for
|
||||
parsing. Hence we remove it again afterwards and return it as a stub.
|
||||
"""
|
||||
if '://' not in self.url:
|
||||
assert 'file:' not in self.url
|
||||
self.url = self.url.replace('git+', 'git+ssh://')
|
||||
url, rev = super(Git, self).get_url_rev()
|
||||
if '://' not in url:
|
||||
assert 'file:' not in url
|
||||
url = url.replace('git+', 'git+ssh://')
|
||||
url, rev, user_pass = super(Git, self).get_url_rev_and_auth(url)
|
||||
url = url.replace('ssh://', '')
|
||||
else:
|
||||
url, rev = super(Git, self).get_url_rev()
|
||||
url, rev, user_pass = super(Git, self).get_url_rev_and_auth(url)
|
||||
|
||||
return url, rev
|
||||
return url, rev, user_pass
|
||||
|
||||
def update_submodules(self, location):
|
||||
if not os.path.exists(os.path.join(location, '.gitmodules')):
|
||||
|
|
|
@ -31,6 +31,18 @@ class Mercurial(VersionControl):
|
|||
['archive', location], show_stdout=False, cwd=temp_dir.path
|
||||
)
|
||||
|
||||
def fetch_new(self, dest, url, rev_options):
|
||||
rev_display = rev_options.to_display()
|
||||
logger.info(
|
||||
'Cloning hg %s%s to %s',
|
||||
url,
|
||||
rev_display,
|
||||
display_path(dest),
|
||||
)
|
||||
self.run_command(['clone', '--noupdate', '-q', url, dest])
|
||||
cmd_args = ['update', '-q'] + rev_options.to_args()
|
||||
self.run_command(cmd_args, cwd=dest)
|
||||
|
||||
def switch(self, dest, url, rev_options):
|
||||
repo_config = os.path.join(dest, self.dirname, 'hgrc')
|
||||
config = configparser.SafeConfigParser()
|
||||
|
@ -52,21 +64,6 @@ class Mercurial(VersionControl):
|
|||
cmd_args = ['update', '-q'] + rev_options.to_args()
|
||||
self.run_command(cmd_args, cwd=dest)
|
||||
|
||||
def obtain(self, dest):
|
||||
url, rev = self.get_url_rev()
|
||||
rev_options = self.make_rev_options(rev)
|
||||
if self.check_destination(dest, url, rev_options):
|
||||
rev_display = rev_options.to_display()
|
||||
logger.info(
|
||||
'Cloning hg %s%s to %s',
|
||||
url,
|
||||
rev_display,
|
||||
display_path(dest),
|
||||
)
|
||||
self.run_command(['clone', '--noupdate', '-q', url, dest])
|
||||
cmd_args = ['update', '-q'] + rev_options.to_args()
|
||||
self.run_command(cmd_args, cwd=dest)
|
||||
|
||||
def get_url(self, location):
|
||||
url = self.run_command(
|
||||
['showconfig', 'paths.default'],
|
||||
|
|
|
@ -4,17 +4,13 @@ import logging
|
|||
import os
|
||||
import re
|
||||
|
||||
from pip._vendor.six.moves.urllib import parse as urllib_parse
|
||||
|
||||
from pip._internal.models.link import Link
|
||||
from pip._internal.utils.logging import indent_log
|
||||
from pip._internal.utils.misc import display_path, remove_auth_from_url, rmtree
|
||||
from pip._internal.utils.misc import display_path, rmtree
|
||||
from pip._internal.vcs import VersionControl, vcs
|
||||
|
||||
_svn_xml_url_re = re.compile('url="([^"]+)"')
|
||||
_svn_rev_re = re.compile(r'committed-rev="(\d+)"')
|
||||
_svn_url_re = re.compile(r'URL: (.+)')
|
||||
_svn_revision_re = re.compile(r'Revision: (.+)')
|
||||
_svn_info_xml_rev_re = re.compile(r'\s*revision="(\d+)"')
|
||||
_svn_info_xml_url_re = re.compile(r'<url>(.*)</url>')
|
||||
|
||||
|
@ -31,39 +27,10 @@ class Subversion(VersionControl):
|
|||
def get_base_rev_args(self, rev):
|
||||
return ['-r', rev]
|
||||
|
||||
def get_info(self, location):
|
||||
"""Returns (url, revision), where both are strings"""
|
||||
assert not location.rstrip('/').endswith(self.dirname), \
|
||||
'Bad directory: %s' % location
|
||||
output = self.run_command(
|
||||
['info', location],
|
||||
show_stdout=False,
|
||||
extra_environ={'LANG': 'C'},
|
||||
)
|
||||
match = _svn_url_re.search(output)
|
||||
if not match:
|
||||
logger.warning(
|
||||
'Cannot determine URL of svn checkout %s',
|
||||
display_path(location),
|
||||
)
|
||||
logger.debug('Output that cannot be parsed: \n%s', output)
|
||||
return None, None
|
||||
url = match.group(1).strip()
|
||||
match = _svn_revision_re.search(output)
|
||||
if not match:
|
||||
logger.warning(
|
||||
'Cannot determine revision of svn checkout %s',
|
||||
display_path(location),
|
||||
)
|
||||
logger.debug('Output that cannot be parsed: \n%s', output)
|
||||
return url, None
|
||||
return url, match.group(1)
|
||||
|
||||
def export(self, location):
|
||||
"""Export the svn repository at the url to the destination location"""
|
||||
url, rev = self.get_url_rev()
|
||||
rev_options = get_rev_options(self, url, rev)
|
||||
url = remove_auth_from_url(url)
|
||||
url, rev_options = self.get_url_rev_options(self.url)
|
||||
|
||||
logger.info('Exporting svn repository %s to %s', url, location)
|
||||
with indent_log():
|
||||
if os.path.exists(location):
|
||||
|
@ -73,6 +40,17 @@ class Subversion(VersionControl):
|
|||
cmd_args = ['export'] + rev_options.to_args() + [url, location]
|
||||
self.run_command(cmd_args, show_stdout=False)
|
||||
|
||||
def fetch_new(self, dest, url, rev_options):
|
||||
rev_display = rev_options.to_display()
|
||||
logger.info(
|
||||
'Checking out %s%s to %s',
|
||||
url,
|
||||
rev_display,
|
||||
display_path(dest),
|
||||
)
|
||||
cmd_args = ['checkout', '-q'] + rev_options.to_args() + [url, dest]
|
||||
self.run_command(cmd_args)
|
||||
|
||||
def switch(self, dest, url, rev_options):
|
||||
cmd_args = ['switch'] + rev_options.to_args() + [url, dest]
|
||||
self.run_command(cmd_args)
|
||||
|
@ -81,21 +59,6 @@ class Subversion(VersionControl):
|
|||
cmd_args = ['update'] + rev_options.to_args() + [dest]
|
||||
self.run_command(cmd_args)
|
||||
|
||||
def obtain(self, dest):
|
||||
url, rev = self.get_url_rev()
|
||||
rev_options = get_rev_options(self, url, rev)
|
||||
url = remove_auth_from_url(url)
|
||||
if self.check_destination(dest, url, rev_options):
|
||||
rev_display = rev_options.to_display()
|
||||
logger.info(
|
||||
'Checking out %s%s to %s',
|
||||
url,
|
||||
rev_display,
|
||||
display_path(dest),
|
||||
)
|
||||
cmd_args = ['checkout', '-q'] + rev_options.to_args() + [url, dest]
|
||||
self.run_command(cmd_args)
|
||||
|
||||
def get_location(self, dist, dependency_links):
|
||||
for url in dependency_links:
|
||||
egg_fragment = Link(url).egg_fragment
|
||||
|
@ -137,12 +100,45 @@ class Subversion(VersionControl):
|
|||
revision = max(revision, localrev)
|
||||
return revision
|
||||
|
||||
def get_url_rev(self):
|
||||
def get_netloc_and_auth(self, netloc):
|
||||
"""
|
||||
Parse out and remove from the netloc the auth information.
|
||||
|
||||
This allows the auth information to be provided via the --username
|
||||
and --password options instead of via the URL.
|
||||
"""
|
||||
if '@' not in netloc:
|
||||
return netloc, (None, None)
|
||||
|
||||
# Split from the right because that's how urllib.parse.urlsplit()
|
||||
# behaves if more than one @ is present (by checking the password
|
||||
# attribute of urlsplit()'s return value).
|
||||
auth, netloc = netloc.rsplit('@', 1)
|
||||
if ':' in auth:
|
||||
# Split from the left because that's how urllib.parse.urlsplit()
|
||||
# behaves if more than one : is present (again by checking the
|
||||
# password attribute of the return value)
|
||||
user_pass = tuple(auth.split(':', 1))
|
||||
else:
|
||||
user_pass = auth, None
|
||||
|
||||
return netloc, user_pass
|
||||
|
||||
def get_url_rev_and_auth(self, url):
|
||||
# hotfix the URL scheme after removing svn+ from svn+ssh:// readd it
|
||||
url, rev = super(Subversion, self).get_url_rev()
|
||||
url, rev, user_pass = super(Subversion, self).get_url_rev_and_auth(url)
|
||||
if url.startswith('ssh://'):
|
||||
url = 'svn+' + url
|
||||
return url, rev
|
||||
return url, rev, user_pass
|
||||
|
||||
def make_rev_args(self, username, password):
|
||||
extra_args = []
|
||||
if username:
|
||||
extra_args += ['--username', username]
|
||||
if password:
|
||||
extra_args += ['--password', password]
|
||||
|
||||
return extra_args
|
||||
|
||||
def get_url(self, location):
|
||||
# In cases where the source is in a subdirectory, not alongside
|
||||
|
@ -222,32 +218,4 @@ class Subversion(VersionControl):
|
|||
return False
|
||||
|
||||
|
||||
def get_rev_options(vcs, url, rev):
|
||||
"""
|
||||
Return a RevOptions object.
|
||||
"""
|
||||
r = urllib_parse.urlsplit(url)
|
||||
if hasattr(r, 'username'):
|
||||
# >= Python-2.5
|
||||
username, password = r.username, r.password
|
||||
else:
|
||||
netloc = r[1]
|
||||
if '@' in netloc:
|
||||
auth = netloc.split('@')[0]
|
||||
if ':' in auth:
|
||||
username, password = auth.split(':', 1)
|
||||
else:
|
||||
username, password = auth, None
|
||||
else:
|
||||
username, password = None, None
|
||||
|
||||
extra_args = []
|
||||
if username:
|
||||
extra_args += ['--username', username]
|
||||
if password:
|
||||
extra_args += ['--password', password]
|
||||
|
||||
return vcs.make_rev_options(rev, extra_args=extra_args)
|
||||
|
||||
|
||||
vcs.register(Subversion)
|
||||
|
|
|
@ -163,7 +163,7 @@ def message_about_scripts_not_on_PATH(scripts):
|
|||
# We don't want to warn for directories that are on PATH.
|
||||
not_warn_dirs = [
|
||||
os.path.normcase(i).rstrip(os.sep) for i in
|
||||
os.environ["PATH"].split(os.pathsep)
|
||||
os.environ.get("PATH", "").split(os.pathsep)
|
||||
]
|
||||
# If an executable sits with sys.executable, we don't warn for it.
|
||||
# This covers the case of venv invocations without activating the venv.
|
||||
|
@ -506,8 +506,8 @@ if __name__ == '__main__':
|
|||
row[1], row[2] = rehash(row[0])
|
||||
writer.writerow(row)
|
||||
for f in generated:
|
||||
h, l = rehash(f)
|
||||
writer.writerow((normpath(f, lib_dir), h, l))
|
||||
digest, length = rehash(f)
|
||||
writer.writerow((normpath(f, lib_dir), digest, length))
|
||||
for f in installed:
|
||||
writer.writerow((installed[f], '', ''))
|
||||
shutil.move(temp_record, record)
|
||||
|
@ -528,7 +528,7 @@ def wheel_version(source_dir):
|
|||
version = wheel_data['Wheel-Version'].strip()
|
||||
version = tuple(map(int, version.split('.')))
|
||||
return version
|
||||
except:
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
|
@ -653,7 +653,7 @@ class WheelBuilder(object):
|
|||
)
|
||||
logger.info('Stored in directory: %s', output_dir)
|
||||
return wheel_path
|
||||
except:
|
||||
except Exception:
|
||||
pass
|
||||
# Ignore return, we can't do anything else useful.
|
||||
self._clean_one(req)
|
||||
|
@ -685,7 +685,7 @@ class WheelBuilder(object):
|
|||
call_subprocess(wheel_args, cwd=req.setup_py_dir,
|
||||
show_stdout=False, spinner=spinner)
|
||||
return True
|
||||
except:
|
||||
except Exception:
|
||||
spinner.finish("error")
|
||||
logger.error('Failed building wheel for %s', req.name)
|
||||
return False
|
||||
|
@ -698,7 +698,7 @@ class WheelBuilder(object):
|
|||
try:
|
||||
call_subprocess(clean_args, cwd=req.source_dir, show_stdout=False)
|
||||
return True
|
||||
except:
|
||||
except Exception:
|
||||
logger.error('Failed cleaning build dir for %s', req.name)
|
||||
return False
|
||||
|
||||
|
|
|
@ -22,9 +22,9 @@ Policy
|
|||
Rationale
|
||||
---------
|
||||
|
||||
Historically pip has not had any dependencies except for setuptools itself,
|
||||
Historically pip has not had any dependencies except for ``setuptools`` itself,
|
||||
choosing instead to implement any functionality it needed to prevent needing
|
||||
a dependency. However, starting with pip 1.5 we began to replace code that was
|
||||
a dependency. However, starting with pip 1.5, we began to replace code that was
|
||||
implemented inside of pip with reusable libraries from PyPI. This brought the
|
||||
typical benefits of reusing libraries instead of reinventing the wheel like
|
||||
higher quality and more battle tested code, centralization of bug fixes
|
||||
|
@ -43,7 +43,7 @@ way (via ``install_requires``) for pip. These issues are:
|
|||
|
||||
* **Making other libraries uninstallable.** One of pip's current dependencies is
|
||||
the ``requests`` library, for which pip requires a fairly recent version to run.
|
||||
If pip dependended on ``requests`` in the traditional manner, then we'd either
|
||||
If pip depended on ``requests`` in the traditional manner, then we'd either
|
||||
have to maintain compatibility with every ``requests`` version that has ever
|
||||
existed (and ever will), OR allow pip to render certain versions of ``requests``
|
||||
uninstallable. (The second issue, although technically true for any Python
|
||||
|
@ -117,7 +117,7 @@ Debundling
|
|||
As mentioned in the rationale, we, the pip team, would prefer it if pip was not
|
||||
debundled (other than optionally ``pip/_vendor/requests/cacert.pem``) and that
|
||||
pip was left intact. However, if you insist on doing so, we have a
|
||||
semi-supported method that we do test in our CI, but requires a bit of
|
||||
semi-supported method (that we don't test in our CI) and requires a bit of
|
||||
extra work on your end in order to solve the problems described above.
|
||||
|
||||
1. Delete everything in ``pip/_vendor/`` **except** for
|
||||
|
@ -131,6 +131,14 @@ extra work on your end in order to solve the problems described above.
|
|||
3. Modify ``pip/_vendor/__init__.py`` so that the ``DEBUNDLED`` variable is
|
||||
``True``.
|
||||
|
||||
4. *(Optional)* If you've placed the wheels in a location other than
|
||||
4. Upon installation, the ``INSTALLER`` file in pip's own ``dist-info``
|
||||
directory should be set to something other than ``pip``, so that pip
|
||||
can detect that it wasn't installed using itself.
|
||||
|
||||
5. *(optional)* If you've placed the wheels in a location other than
|
||||
``pip/_vendor/``, then modify ``pip/_vendor/__init__.py`` so that the
|
||||
``WHEEL_DIR`` variable points to the location you've placed them.
|
||||
|
||||
6. *(optional)* Update the ``pip_version_check`` logic to use the
|
||||
appropriate logic for determining the latest available version of pip and
|
||||
prompt the user with the correct upgrade message.
|
||||
|
|
|
@ -107,3 +107,4 @@ if DEBUNDLED:
|
|||
vendored("requests.packages.urllib3.util.ssl_")
|
||||
vendored("requests.packages.urllib3.util.timeout")
|
||||
vendored("requests.packages.urllib3.util.url")
|
||||
vendored("urllib3")
|
||||
|
|
|
@ -2,9 +2,9 @@
|
|||
|
||||
Make it easy to import from cachecontrol without long namespaces.
|
||||
"""
|
||||
__author__ = 'Eric Larson'
|
||||
__email__ = 'eric@ionrock.org'
|
||||
__version__ = '0.12.4'
|
||||
__author__ = "Eric Larson"
|
||||
__email__ = "eric@ionrock.org"
|
||||
__version__ = "0.12.5"
|
||||
|
||||
from .wrapper import CacheControl
|
||||
from .adapter import CacheControlAdapter
|
||||
|
|
|
@ -17,14 +17,11 @@ def setup_logging():
|
|||
|
||||
def get_session():
|
||||
adapter = CacheControlAdapter(
|
||||
DictCache(),
|
||||
cache_etags=True,
|
||||
serializer=None,
|
||||
heuristic=None,
|
||||
DictCache(), cache_etags=True, serializer=None, heuristic=None
|
||||
)
|
||||
sess = requests.Session()
|
||||
sess.mount('http://', adapter)
|
||||
sess.mount('https://', adapter)
|
||||
sess.mount("http://", adapter)
|
||||
sess.mount("https://", adapter)
|
||||
|
||||
sess.cache_controller = adapter.controller
|
||||
return sess
|
||||
|
@ -32,7 +29,7 @@ def get_session():
|
|||
|
||||
def get_args():
|
||||
parser = ArgumentParser()
|
||||
parser.add_argument('url', help='The URL to try and cache')
|
||||
parser.add_argument("url", help="The URL to try and cache")
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
|
@ -51,10 +48,10 @@ def main(args=None):
|
|||
|
||||
# Now try to get it
|
||||
if sess.cache_controller.cached_request(resp.request):
|
||||
print('Cached!')
|
||||
print("Cached!")
|
||||
else:
|
||||
print('Not cached :(')
|
||||
print("Not cached :(")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
|
@ -10,25 +10,27 @@ from .filewrapper import CallbackFileWrapper
|
|||
|
||||
|
||||
class CacheControlAdapter(HTTPAdapter):
|
||||
invalidating_methods = set(['PUT', 'DELETE'])
|
||||
invalidating_methods = {"PUT", "DELETE"}
|
||||
|
||||
def __init__(self, cache=None,
|
||||
cache_etags=True,
|
||||
controller_class=None,
|
||||
serializer=None,
|
||||
heuristic=None,
|
||||
cacheable_methods=None,
|
||||
*args, **kw):
|
||||
def __init__(
|
||||
self,
|
||||
cache=None,
|
||||
cache_etags=True,
|
||||
controller_class=None,
|
||||
serializer=None,
|
||||
heuristic=None,
|
||||
cacheable_methods=None,
|
||||
*args,
|
||||
**kw
|
||||
):
|
||||
super(CacheControlAdapter, self).__init__(*args, **kw)
|
||||
self.cache = cache or DictCache()
|
||||
self.heuristic = heuristic
|
||||
self.cacheable_methods = cacheable_methods or ('GET',)
|
||||
self.cacheable_methods = cacheable_methods or ("GET",)
|
||||
|
||||
controller_factory = controller_class or CacheController
|
||||
self.controller = controller_factory(
|
||||
self.cache,
|
||||
cache_etags=cache_etags,
|
||||
serializer=serializer,
|
||||
self.cache, cache_etags=cache_etags, serializer=serializer
|
||||
)
|
||||
|
||||
def send(self, request, cacheable_methods=None, **kw):
|
||||
|
@ -43,20 +45,18 @@ class CacheControlAdapter(HTTPAdapter):
|
|||
except zlib.error:
|
||||
cached_response = None
|
||||
if cached_response:
|
||||
return self.build_response(request, cached_response,
|
||||
from_cache=True)
|
||||
return self.build_response(request, cached_response, from_cache=True)
|
||||
|
||||
# check for etags and add headers if appropriate
|
||||
request.headers.update(
|
||||
self.controller.conditional_headers(request)
|
||||
)
|
||||
request.headers.update(self.controller.conditional_headers(request))
|
||||
|
||||
resp = super(CacheControlAdapter, self).send(request, **kw)
|
||||
|
||||
return resp
|
||||
|
||||
def build_response(self, request, response, from_cache=False,
|
||||
cacheable_methods=None):
|
||||
def build_response(
|
||||
self, request, response, from_cache=False, cacheable_methods=None
|
||||
):
|
||||
"""
|
||||
Build a response by making a request or using the cache.
|
||||
|
||||
|
@ -101,10 +101,8 @@ class CacheControlAdapter(HTTPAdapter):
|
|||
response._fp = CallbackFileWrapper(
|
||||
response._fp,
|
||||
functools.partial(
|
||||
self.controller.cache_response,
|
||||
request,
|
||||
response,
|
||||
)
|
||||
self.controller.cache_response, request, response
|
||||
),
|
||||
)
|
||||
if response.chunked:
|
||||
super_update_chunk_length = response._update_chunk_length
|
||||
|
@ -113,11 +111,12 @@ class CacheControlAdapter(HTTPAdapter):
|
|||
super_update_chunk_length()
|
||||
if self.chunk_left == 0:
|
||||
self._fp._close()
|
||||
response._update_chunk_length = types.MethodType(_update_chunk_length, response)
|
||||
|
||||
resp = super(CacheControlAdapter, self).build_response(
|
||||
request, response
|
||||
)
|
||||
response._update_chunk_length = types.MethodType(
|
||||
_update_chunk_length, response
|
||||
)
|
||||
|
||||
resp = super(CacheControlAdapter, self).build_response(request, response)
|
||||
|
||||
# See if we should invalidate the cache.
|
||||
if request.method in self.invalidating_methods and resp.ok:
|
||||
|
|
|
@ -8,13 +8,13 @@ from threading import Lock
|
|||
class BaseCache(object):
|
||||
|
||||
def get(self, key):
|
||||
raise NotImplemented()
|
||||
raise NotImplementedError()
|
||||
|
||||
def set(self, key, value):
|
||||
raise NotImplemented()
|
||||
raise NotImplementedError()
|
||||
|
||||
def delete(self, key):
|
||||
raise NotImplemented()
|
||||
raise NotImplementedError()
|
||||
|
||||
def close(self):
|
||||
pass
|
||||
|
|
|
@ -9,7 +9,7 @@ try:
|
|||
FileNotFoundError
|
||||
except NameError:
|
||||
# py2.X
|
||||
FileNotFoundError = OSError
|
||||
FileNotFoundError = (IOError, OSError)
|
||||
|
||||
|
||||
def _secure_open_write(filename, fmode):
|
||||
|
@ -46,6 +46,7 @@ def _secure_open_write(filename, fmode):
|
|||
fd = os.open(filename, flags, fmode)
|
||||
try:
|
||||
return os.fdopen(fd, "wb")
|
||||
|
||||
except:
|
||||
# An error occurred wrapping our FD in a file object
|
||||
os.close(fd)
|
||||
|
@ -53,8 +54,16 @@ def _secure_open_write(filename, fmode):
|
|||
|
||||
|
||||
class FileCache(BaseCache):
|
||||
def __init__(self, directory, forever=False, filemode=0o0600,
|
||||
dirmode=0o0700, use_dir_lock=None, lock_class=None):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
directory,
|
||||
forever=False,
|
||||
filemode=0o0600,
|
||||
dirmode=0o0700,
|
||||
use_dir_lock=None,
|
||||
lock_class=None,
|
||||
):
|
||||
|
||||
if use_dir_lock is not None and lock_class is not None:
|
||||
raise ValueError("Cannot use use_dir_lock and lock_class together")
|
||||
|
@ -63,12 +72,15 @@ class FileCache(BaseCache):
|
|||
from pip._vendor.lockfile import LockFile
|
||||
from pip._vendor.lockfile.mkdirlockfile import MkdirLockFile
|
||||
except ImportError:
|
||||
notice = dedent("""
|
||||
notice = dedent(
|
||||
"""
|
||||
NOTE: In order to use the FileCache you must have
|
||||
lockfile installed. You can install it via pip:
|
||||
pip install lockfile
|
||||
""")
|
||||
"""
|
||||
)
|
||||
raise ImportError(notice)
|
||||
|
||||
else:
|
||||
if use_dir_lock:
|
||||
lock_class = MkdirLockFile
|
||||
|
@ -95,11 +107,12 @@ class FileCache(BaseCache):
|
|||
|
||||
def get(self, key):
|
||||
name = self._fn(key)
|
||||
if not os.path.exists(name):
|
||||
return None
|
||||
try:
|
||||
with open(name, "rb") as fh:
|
||||
return fh.read()
|
||||
|
||||
with open(name, 'rb') as fh:
|
||||
return fh.read()
|
||||
except FileNotFoundError:
|
||||
return None
|
||||
|
||||
def set(self, key, value):
|
||||
name = self._fn(key)
|
||||
|
|
|
@ -4,16 +4,6 @@ from datetime import datetime
|
|||
from pip._vendor.cachecontrol.cache import BaseCache
|
||||
|
||||
|
||||
def total_seconds(td):
|
||||
"""Python 2.6 compatability"""
|
||||
if hasattr(td, 'total_seconds'):
|
||||
return int(td.total_seconds())
|
||||
|
||||
ms = td.microseconds
|
||||
secs = (td.seconds + td.days * 24 * 3600)
|
||||
return int((ms + secs * 10**6) / 10**6)
|
||||
|
||||
|
||||
class RedisCache(BaseCache):
|
||||
|
||||
def __init__(self, conn):
|
||||
|
@ -27,7 +17,7 @@ class RedisCache(BaseCache):
|
|||
self.conn.set(key, value)
|
||||
else:
|
||||
expires = expires - datetime.utcnow()
|
||||
self.conn.setex(key, total_seconds(expires), value)
|
||||
self.conn.setex(key, int(expires.total_seconds()), value)
|
||||
|
||||
def delete(self, key):
|
||||
self.conn.delete(key)
|
||||
|
|
|
@ -30,8 +30,10 @@ def parse_uri(uri):
|
|||
class CacheController(object):
|
||||
"""An interface to see if request should cached or not.
|
||||
"""
|
||||
def __init__(self, cache=None, cache_etags=True, serializer=None,
|
||||
status_codes=None):
|
||||
|
||||
def __init__(
|
||||
self, cache=None, cache_etags=True, serializer=None, status_codes=None
|
||||
):
|
||||
self.cache = cache or DictCache()
|
||||
self.cache_etags = cache_etags
|
||||
self.serializer = serializer or Serializer()
|
||||
|
@ -64,34 +66,35 @@ class CacheController(object):
|
|||
def parse_cache_control(self, headers):
|
||||
known_directives = {
|
||||
# https://tools.ietf.org/html/rfc7234#section-5.2
|
||||
'max-age': (int, True,),
|
||||
'max-stale': (int, False,),
|
||||
'min-fresh': (int, True,),
|
||||
'no-cache': (None, False,),
|
||||
'no-store': (None, False,),
|
||||
'no-transform': (None, False,),
|
||||
'only-if-cached' : (None, False,),
|
||||
'must-revalidate': (None, False,),
|
||||
'public': (None, False,),
|
||||
'private': (None, False,),
|
||||
'proxy-revalidate': (None, False,),
|
||||
's-maxage': (int, True,)
|
||||
"max-age": (int, True),
|
||||
"max-stale": (int, False),
|
||||
"min-fresh": (int, True),
|
||||
"no-cache": (None, False),
|
||||
"no-store": (None, False),
|
||||
"no-transform": (None, False),
|
||||
"only-if-cached": (None, False),
|
||||
"must-revalidate": (None, False),
|
||||
"public": (None, False),
|
||||
"private": (None, False),
|
||||
"proxy-revalidate": (None, False),
|
||||
"s-maxage": (int, True),
|
||||
}
|
||||
|
||||
cc_headers = headers.get('cache-control',
|
||||
headers.get('Cache-Control', ''))
|
||||
cc_headers = headers.get("cache-control", headers.get("Cache-Control", ""))
|
||||
|
||||
retval = {}
|
||||
|
||||
for cc_directive in cc_headers.split(','):
|
||||
parts = cc_directive.split('=', 1)
|
||||
for cc_directive in cc_headers.split(","):
|
||||
if not cc_directive.strip():
|
||||
continue
|
||||
|
||||
parts = cc_directive.split("=", 1)
|
||||
directive = parts[0].strip()
|
||||
|
||||
try:
|
||||
typ, required = known_directives[directive]
|
||||
except KeyError:
|
||||
logger.debug('Ignoring unknown cache-control directive: %s',
|
||||
directive)
|
||||
logger.debug("Ignoring unknown cache-control directive: %s", directive)
|
||||
continue
|
||||
|
||||
if not typ or not required:
|
||||
|
@ -101,11 +104,16 @@ class CacheController(object):
|
|||
retval[directive] = typ(parts[1].strip())
|
||||
except IndexError:
|
||||
if required:
|
||||
logger.debug('Missing value for cache-control '
|
||||
'directive: %s', directive)
|
||||
logger.debug(
|
||||
"Missing value for cache-control " "directive: %s",
|
||||
directive,
|
||||
)
|
||||
except ValueError:
|
||||
logger.debug('Invalid value for cache-control directive '
|
||||
'%s, must be %s', directive, typ.__name__)
|
||||
logger.debug(
|
||||
"Invalid value for cache-control directive " "%s, must be %s",
|
||||
directive,
|
||||
typ.__name__,
|
||||
)
|
||||
|
||||
return retval
|
||||
|
||||
|
@ -119,24 +127,24 @@ class CacheController(object):
|
|||
cc = self.parse_cache_control(request.headers)
|
||||
|
||||
# Bail out if the request insists on fresh data
|
||||
if 'no-cache' in cc:
|
||||
if "no-cache" in cc:
|
||||
logger.debug('Request header has "no-cache", cache bypassed')
|
||||
return False
|
||||
|
||||
if 'max-age' in cc and cc['max-age'] == 0:
|
||||
if "max-age" in cc and cc["max-age"] == 0:
|
||||
logger.debug('Request header has "max_age" as 0, cache bypassed')
|
||||
return False
|
||||
|
||||
# Request allows serving from the cache, let's see if we find something
|
||||
cache_data = self.cache.get(cache_url)
|
||||
if cache_data is None:
|
||||
logger.debug('No cache entry available')
|
||||
logger.debug("No cache entry available")
|
||||
return False
|
||||
|
||||
# Check whether it can be deserialized
|
||||
resp = self.serializer.loads(request, cache_data)
|
||||
if not resp:
|
||||
logger.warning('Cache entry deserialization failed, entry ignored')
|
||||
logger.warning("Cache entry deserialization failed, entry ignored")
|
||||
return False
|
||||
|
||||
# If we have a cached 301, return it immediately. We don't
|
||||
|
@ -148,27 +156,27 @@ class CacheController(object):
|
|||
# Client can try to refresh the value by repeating the request
|
||||
# with cache busting headers as usual (ie no-cache).
|
||||
if resp.status == 301:
|
||||
msg = ('Returning cached "301 Moved Permanently" response '
|
||||
'(ignoring date and etag information)')
|
||||
msg = (
|
||||
'Returning cached "301 Moved Permanently" response '
|
||||
"(ignoring date and etag information)"
|
||||
)
|
||||
logger.debug(msg)
|
||||
return resp
|
||||
|
||||
headers = CaseInsensitiveDict(resp.headers)
|
||||
if not headers or 'date' not in headers:
|
||||
if 'etag' not in headers:
|
||||
if not headers or "date" not in headers:
|
||||
if "etag" not in headers:
|
||||
# Without date or etag, the cached response can never be used
|
||||
# and should be deleted.
|
||||
logger.debug('Purging cached response: no date or etag')
|
||||
logger.debug("Purging cached response: no date or etag")
|
||||
self.cache.delete(cache_url)
|
||||
logger.debug('Ignoring cached response: no date')
|
||||
logger.debug("Ignoring cached response: no date")
|
||||
return False
|
||||
|
||||
now = time.time()
|
||||
date = calendar.timegm(
|
||||
parsedate_tz(headers['date'])
|
||||
)
|
||||
date = calendar.timegm(parsedate_tz(headers["date"]))
|
||||
current_age = max(0, now - date)
|
||||
logger.debug('Current age based on date: %i', current_age)
|
||||
logger.debug("Current age based on date: %i", current_age)
|
||||
|
||||
# TODO: There is an assumption that the result will be a
|
||||
# urllib3 response object. This may not be best since we
|
||||
|
@ -180,45 +188,41 @@ class CacheController(object):
|
|||
freshness_lifetime = 0
|
||||
|
||||
# Check the max-age pragma in the cache control header
|
||||
if 'max-age' in resp_cc:
|
||||
freshness_lifetime = resp_cc['max-age']
|
||||
logger.debug('Freshness lifetime from max-age: %i',
|
||||
freshness_lifetime)
|
||||
if "max-age" in resp_cc:
|
||||
freshness_lifetime = resp_cc["max-age"]
|
||||
logger.debug("Freshness lifetime from max-age: %i", freshness_lifetime)
|
||||
|
||||
# If there isn't a max-age, check for an expires header
|
||||
elif 'expires' in headers:
|
||||
expires = parsedate_tz(headers['expires'])
|
||||
elif "expires" in headers:
|
||||
expires = parsedate_tz(headers["expires"])
|
||||
if expires is not None:
|
||||
expire_time = calendar.timegm(expires) - date
|
||||
freshness_lifetime = max(0, expire_time)
|
||||
logger.debug("Freshness lifetime from expires: %i",
|
||||
freshness_lifetime)
|
||||
logger.debug("Freshness lifetime from expires: %i", freshness_lifetime)
|
||||
|
||||
# Determine if we are setting freshness limit in the
|
||||
# request. Note, this overrides what was in the response.
|
||||
if 'max-age' in cc:
|
||||
freshness_lifetime = cc['max-age']
|
||||
logger.debug('Freshness lifetime from request max-age: %i',
|
||||
freshness_lifetime)
|
||||
if "max-age" in cc:
|
||||
freshness_lifetime = cc["max-age"]
|
||||
logger.debug(
|
||||
"Freshness lifetime from request max-age: %i", freshness_lifetime
|
||||
)
|
||||
|
||||
if 'min-fresh' in cc:
|
||||
min_fresh = cc['min-fresh']
|
||||
if "min-fresh" in cc:
|
||||
min_fresh = cc["min-fresh"]
|
||||
# adjust our current age by our min fresh
|
||||
current_age += min_fresh
|
||||
logger.debug('Adjusted current age from min-fresh: %i',
|
||||
current_age)
|
||||
logger.debug("Adjusted current age from min-fresh: %i", current_age)
|
||||
|
||||
# Return entry if it is fresh enough
|
||||
if freshness_lifetime > current_age:
|
||||
logger.debug('The response is "fresh", returning cached response')
|
||||
logger.debug('%i > %i', freshness_lifetime, current_age)
|
||||
logger.debug("%i > %i", freshness_lifetime, current_age)
|
||||
return resp
|
||||
|
||||
# we're not fresh. If we don't have an Etag, clear it out
|
||||
if 'etag' not in headers:
|
||||
logger.debug(
|
||||
'The cached response is "stale" with no etag, purging'
|
||||
)
|
||||
if "etag" not in headers:
|
||||
logger.debug('The cached response is "stale" with no etag, purging')
|
||||
self.cache.delete(cache_url)
|
||||
|
||||
# return the original handler
|
||||
|
@ -232,16 +236,15 @@ class CacheController(object):
|
|||
if resp:
|
||||
headers = CaseInsensitiveDict(resp.headers)
|
||||
|
||||
if 'etag' in headers:
|
||||
new_headers['If-None-Match'] = headers['ETag']
|
||||
if "etag" in headers:
|
||||
new_headers["If-None-Match"] = headers["ETag"]
|
||||
|
||||
if 'last-modified' in headers:
|
||||
new_headers['If-Modified-Since'] = headers['Last-Modified']
|
||||
if "last-modified" in headers:
|
||||
new_headers["If-Modified-Since"] = headers["Last-Modified"]
|
||||
|
||||
return new_headers
|
||||
|
||||
def cache_response(self, request, response, body=None,
|
||||
status_codes=None):
|
||||
def cache_response(self, request, response, body=None, status_codes=None):
|
||||
"""
|
||||
Algorithm for caching requests.
|
||||
|
||||
|
@ -252,9 +255,7 @@ class CacheController(object):
|
|||
cacheable_status_codes = status_codes or self.cacheable_status_codes
|
||||
if response.status not in cacheable_status_codes:
|
||||
logger.debug(
|
||||
'Status code %s not in %s',
|
||||
response.status,
|
||||
cacheable_status_codes
|
||||
"Status code %s not in %s", response.status, cacheable_status_codes
|
||||
)
|
||||
return
|
||||
|
||||
|
@ -264,10 +265,12 @@ class CacheController(object):
|
|||
# Content-Length is valid then we can check to see if the body we've
|
||||
# been given matches the expected size, and if it doesn't we'll just
|
||||
# skip trying to cache it.
|
||||
if (body is not None and
|
||||
"content-length" in response_headers and
|
||||
response_headers["content-length"].isdigit() and
|
||||
int(response_headers["content-length"]) != len(body)):
|
||||
if (
|
||||
body is not None
|
||||
and "content-length" in response_headers
|
||||
and response_headers["content-length"].isdigit()
|
||||
and int(response_headers["content-length"]) != len(body)
|
||||
):
|
||||
return
|
||||
|
||||
cc_req = self.parse_cache_control(request.headers)
|
||||
|
@ -278,53 +281,49 @@ class CacheController(object):
|
|||
|
||||
# Delete it from the cache if we happen to have it stored there
|
||||
no_store = False
|
||||
if 'no-store' in cc:
|
||||
if "no-store" in cc:
|
||||
no_store = True
|
||||
logger.debug('Response header has "no-store"')
|
||||
if 'no-store' in cc_req:
|
||||
if "no-store" in cc_req:
|
||||
no_store = True
|
||||
logger.debug('Request header has "no-store"')
|
||||
if no_store and self.cache.get(cache_url):
|
||||
logger.debug('Purging existing cache entry to honor "no-store"')
|
||||
self.cache.delete(cache_url)
|
||||
if no_store:
|
||||
return
|
||||
|
||||
# If we've been given an etag, then keep the response
|
||||
if self.cache_etags and 'etag' in response_headers:
|
||||
logger.debug('Caching due to etag')
|
||||
if self.cache_etags and "etag" in response_headers:
|
||||
logger.debug("Caching due to etag")
|
||||
self.cache.set(
|
||||
cache_url,
|
||||
self.serializer.dumps(request, response, body=body),
|
||||
cache_url, self.serializer.dumps(request, response, body=body)
|
||||
)
|
||||
|
||||
# Add to the cache any 301s. We do this before looking that
|
||||
# the Date headers.
|
||||
elif response.status == 301:
|
||||
logger.debug('Caching permanant redirect')
|
||||
self.cache.set(
|
||||
cache_url,
|
||||
self.serializer.dumps(request, response)
|
||||
)
|
||||
logger.debug("Caching permanant redirect")
|
||||
self.cache.set(cache_url, self.serializer.dumps(request, response))
|
||||
|
||||
# Add to the cache if the response headers demand it. If there
|
||||
# is no date header then we can't do anything about expiring
|
||||
# the cache.
|
||||
elif 'date' in response_headers:
|
||||
elif "date" in response_headers:
|
||||
# cache when there is a max-age > 0
|
||||
if 'max-age' in cc and cc['max-age'] > 0:
|
||||
logger.debug('Caching b/c date exists and max-age > 0')
|
||||
if "max-age" in cc and cc["max-age"] > 0:
|
||||
logger.debug("Caching b/c date exists and max-age > 0")
|
||||
self.cache.set(
|
||||
cache_url,
|
||||
self.serializer.dumps(request, response, body=body),
|
||||
cache_url, self.serializer.dumps(request, response, body=body)
|
||||
)
|
||||
|
||||
# If the request can expire, it means we should cache it
|
||||
# in the meantime.
|
||||
elif 'expires' in response_headers:
|
||||
if response_headers['expires']:
|
||||
logger.debug('Caching b/c of expires header')
|
||||
elif "expires" in response_headers:
|
||||
if response_headers["expires"]:
|
||||
logger.debug("Caching b/c of expires header")
|
||||
self.cache.set(
|
||||
cache_url,
|
||||
self.serializer.dumps(request, response, body=body),
|
||||
cache_url, self.serializer.dumps(request, response, body=body)
|
||||
)
|
||||
|
||||
def update_cached_response(self, request, response):
|
||||
|
@ -336,10 +335,7 @@ class CacheController(object):
|
|||
"""
|
||||
cache_url = self.cache_url(request.url)
|
||||
|
||||
cached_response = self.serializer.loads(
|
||||
request,
|
||||
self.cache.get(cache_url)
|
||||
)
|
||||
cached_response = self.serializer.loads(request, self.cache.get(cache_url))
|
||||
|
||||
if not cached_response:
|
||||
# we didn't have a cached response
|
||||
|
@ -352,22 +348,20 @@ class CacheController(object):
|
|||
# the cached body invalid. But... just in case, we'll be sure
|
||||
# to strip out ones we know that might be problmatic due to
|
||||
# typical assumptions.
|
||||
excluded_headers = [
|
||||
"content-length",
|
||||
]
|
||||
excluded_headers = ["content-length"]
|
||||
|
||||
cached_response.headers.update(
|
||||
dict((k, v) for k, v in response.headers.items()
|
||||
if k.lower() not in excluded_headers)
|
||||
dict(
|
||||
(k, v)
|
||||
for k, v in response.headers.items()
|
||||
if k.lower() not in excluded_headers
|
||||
)
|
||||
)
|
||||
|
||||
# we want a 200 b/c we have content via the cache
|
||||
cached_response.status = 200
|
||||
|
||||
# update our cache
|
||||
self.cache.set(
|
||||
cache_url,
|
||||
self.serializer.dumps(request, cached_response),
|
||||
)
|
||||
self.cache.set(cache_url, self.serializer.dumps(request, cached_response))
|
||||
|
||||
return cached_response
|
||||
|
|
|
@ -27,17 +27,19 @@ class CallbackFileWrapper(object):
|
|||
# self.__fp hasn't been set.
|
||||
#
|
||||
# [0] https://docs.python.org/2/reference/expressions.html#atom-identifiers
|
||||
fp = self.__getattribute__('_CallbackFileWrapper__fp')
|
||||
fp = self.__getattribute__("_CallbackFileWrapper__fp")
|
||||
return getattr(fp, name)
|
||||
|
||||
def __is_fp_closed(self):
|
||||
try:
|
||||
return self.__fp.fp is None
|
||||
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
try:
|
||||
return self.__fp.closed
|
||||
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
|
@ -66,7 +68,7 @@ class CallbackFileWrapper(object):
|
|||
|
||||
def _safe_read(self, amt):
|
||||
data = self.__fp._safe_read(amt)
|
||||
if amt == 2 and data == b'\r\n':
|
||||
if amt == 2 and data == b"\r\n":
|
||||
# urllib executes this read to toss the CRLF at the end
|
||||
# of the chunk.
|
||||
return data
|
||||
|
|
|
@ -46,7 +46,7 @@ class BaseHeuristic(object):
|
|||
response.headers.update(updated_headers)
|
||||
warning_header_value = self.warning(response)
|
||||
if warning_header_value is not None:
|
||||
response.headers.update({'Warning': warning_header_value})
|
||||
response.headers.update({"Warning": warning_header_value})
|
||||
|
||||
return response
|
||||
|
||||
|
@ -56,15 +56,15 @@ class OneDayCache(BaseHeuristic):
|
|||
Cache the response by providing an expires 1 day in the
|
||||
future.
|
||||
"""
|
||||
|
||||
def update_headers(self, response):
|
||||
headers = {}
|
||||
|
||||
if 'expires' not in response.headers:
|
||||
date = parsedate(response.headers['date'])
|
||||
expires = expire_after(timedelta(days=1),
|
||||
date=datetime(*date[:6]))
|
||||
headers['expires'] = datetime_to_header(expires)
|
||||
headers['cache-control'] = 'public'
|
||||
if "expires" not in response.headers:
|
||||
date = parsedate(response.headers["date"])
|
||||
expires = expire_after(timedelta(days=1), date=datetime(*date[:6]))
|
||||
headers["expires"] = datetime_to_header(expires)
|
||||
headers["cache-control"] = "public"
|
||||
return headers
|
||||
|
||||
|
||||
|
@ -78,13 +78,10 @@ class ExpiresAfter(BaseHeuristic):
|
|||
|
||||
def update_headers(self, response):
|
||||
expires = expire_after(self.delta)
|
||||
return {
|
||||
'expires': datetime_to_header(expires),
|
||||
'cache-control': 'public',
|
||||
}
|
||||
return {"expires": datetime_to_header(expires), "cache-control": "public"}
|
||||
|
||||
def warning(self, response):
|
||||
tmpl = '110 - Automatically cached for %s. Response might be stale'
|
||||
tmpl = "110 - Automatically cached for %s. Response might be stale"
|
||||
return tmpl % self.delta
|
||||
|
||||
|
||||
|
@ -100,27 +97,27 @@ class LastModified(BaseHeuristic):
|
|||
http://lxr.mozilla.org/mozilla-release/source/netwerk/protocol/http/nsHttpResponseHead.cpp#397
|
||||
Unlike mozilla we limit this to 24-hr.
|
||||
"""
|
||||
cacheable_by_default_statuses = set([
|
||||
cacheable_by_default_statuses = {
|
||||
200, 203, 204, 206, 300, 301, 404, 405, 410, 414, 501
|
||||
])
|
||||
}
|
||||
|
||||
def update_headers(self, resp):
|
||||
headers = resp.headers
|
||||
|
||||
if 'expires' in headers:
|
||||
if "expires" in headers:
|
||||
return {}
|
||||
|
||||
if 'cache-control' in headers and headers['cache-control'] != 'public':
|
||||
if "cache-control" in headers and headers["cache-control"] != "public":
|
||||
return {}
|
||||
|
||||
if resp.status not in self.cacheable_by_default_statuses:
|
||||
return {}
|
||||
|
||||
if 'date' not in headers or 'last-modified' not in headers:
|
||||
if "date" not in headers or "last-modified" not in headers:
|
||||
return {}
|
||||
|
||||
date = calendar.timegm(parsedate_tz(headers['date']))
|
||||
last_modified = parsedate(headers['last-modified'])
|
||||
date = calendar.timegm(parsedate_tz(headers["date"]))
|
||||
last_modified = parsedate(headers["last-modified"])
|
||||
if date is None or last_modified is None:
|
||||
return {}
|
||||
|
||||
|
@ -132,7 +129,7 @@ class LastModified(BaseHeuristic):
|
|||
return {}
|
||||
|
||||
expires = date + freshness_lifetime
|
||||
return {'expires': time.strftime(TIME_FMT, time.gmtime(expires))}
|
||||
return {"expires": time.strftime(TIME_FMT, time.gmtime(expires))}
|
||||
|
||||
def warning(self, resp):
|
||||
return None
|
||||
|
|
|
@ -48,23 +48,22 @@ class Serializer(object):
|
|||
u"response": {
|
||||
u"body": body,
|
||||
u"headers": dict(
|
||||
(text_type(k), text_type(v))
|
||||
for k, v in response.headers.items()
|
||||
(text_type(k), text_type(v)) for k, v in response.headers.items()
|
||||
),
|
||||
u"status": response.status,
|
||||
u"version": response.version,
|
||||
u"reason": text_type(response.reason),
|
||||
u"strict": response.strict,
|
||||
u"decode_content": response.decode_content,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
# Construct our vary headers
|
||||
data[u"vary"] = {}
|
||||
if u"vary" in response_headers:
|
||||
varied_headers = response_headers[u'vary'].split(',')
|
||||
varied_headers = response_headers[u"vary"].split(",")
|
||||
for header in varied_headers:
|
||||
header = header.strip()
|
||||
header = text_type(header).strip()
|
||||
header_value = request.headers.get(header, None)
|
||||
if header_value is not None:
|
||||
header_value = text_type(header_value)
|
||||
|
@ -95,7 +94,8 @@ class Serializer(object):
|
|||
|
||||
# Dispatch to the actual load method for the given version
|
||||
try:
|
||||
return getattr(self, "_loads_v{0}".format(ver))(request, data)
|
||||
return getattr(self, "_loads_v{}".format(ver))(request, data)
|
||||
|
||||
except AttributeError:
|
||||
# This is a version we don't have a loads function for, so we'll
|
||||
# just treat it as a miss and return None
|
||||
|
@ -118,11 +118,11 @@ class Serializer(object):
|
|||
|
||||
body_raw = cached["response"].pop("body")
|
||||
|
||||
headers = CaseInsensitiveDict(data=cached['response']['headers'])
|
||||
if headers.get('transfer-encoding', '') == 'chunked':
|
||||
headers.pop('transfer-encoding')
|
||||
headers = CaseInsensitiveDict(data=cached["response"]["headers"])
|
||||
if headers.get("transfer-encoding", "") == "chunked":
|
||||
headers.pop("transfer-encoding")
|
||||
|
||||
cached['response']['headers'] = headers
|
||||
cached["response"]["headers"] = headers
|
||||
|
||||
try:
|
||||
body = io.BytesIO(body_raw)
|
||||
|
@ -133,13 +133,9 @@ class Serializer(object):
|
|||
# fail with:
|
||||
#
|
||||
# TypeError: 'str' does not support the buffer interface
|
||||
body = io.BytesIO(body_raw.encode('utf8'))
|
||||
body = io.BytesIO(body_raw.encode("utf8"))
|
||||
|
||||
return HTTPResponse(
|
||||
body=body,
|
||||
preload_content=False,
|
||||
**cached["response"]
|
||||
)
|
||||
return HTTPResponse(body=body, preload_content=False, **cached["response"])
|
||||
|
||||
def _loads_v0(self, request, data):
|
||||
# The original legacy cache data. This doesn't contain enough
|
||||
|
@ -162,16 +158,12 @@ class Serializer(object):
|
|||
return
|
||||
|
||||
# We need to decode the items that we've base64 encoded
|
||||
cached["response"]["body"] = _b64_decode_bytes(
|
||||
cached["response"]["body"]
|
||||
)
|
||||
cached["response"]["body"] = _b64_decode_bytes(cached["response"]["body"])
|
||||
cached["response"]["headers"] = dict(
|
||||
(_b64_decode_str(k), _b64_decode_str(v))
|
||||
for k, v in cached["response"]["headers"].items()
|
||||
)
|
||||
cached["response"]["reason"] = _b64_decode_str(
|
||||
cached["response"]["reason"],
|
||||
)
|
||||
cached["response"]["reason"] = _b64_decode_str(cached["response"]["reason"])
|
||||
cached["vary"] = dict(
|
||||
(_b64_decode_str(k), _b64_decode_str(v) if v is not None else v)
|
||||
for k, v in cached["vary"].items()
|
||||
|
@ -187,7 +179,7 @@ class Serializer(object):
|
|||
|
||||
def _loads_v4(self, request, data):
|
||||
try:
|
||||
cached = msgpack.loads(data, encoding='utf-8')
|
||||
cached = msgpack.loads(data, encoding="utf-8")
|
||||
except ValueError:
|
||||
return
|
||||
|
||||
|
|
|
@ -2,14 +2,16 @@ from .adapter import CacheControlAdapter
|
|||
from .cache import DictCache
|
||||
|
||||
|
||||
def CacheControl(sess,
|
||||
cache=None,
|
||||
cache_etags=True,
|
||||
serializer=None,
|
||||
heuristic=None,
|
||||
controller_class=None,
|
||||
adapter_class=None,
|
||||
cacheable_methods=None):
|
||||
def CacheControl(
|
||||
sess,
|
||||
cache=None,
|
||||
cache_etags=True,
|
||||
serializer=None,
|
||||
heuristic=None,
|
||||
controller_class=None,
|
||||
adapter_class=None,
|
||||
cacheable_methods=None,
|
||||
):
|
||||
|
||||
cache = cache or DictCache()
|
||||
adapter_class = adapter_class or CacheControlAdapter
|
||||
|
@ -19,9 +21,9 @@ def CacheControl(sess,
|
|||
serializer=serializer,
|
||||
heuristic=heuristic,
|
||||
controller_class=controller_class,
|
||||
cacheable_methods=cacheable_methods
|
||||
cacheable_methods=cacheable_methods,
|
||||
)
|
||||
sess.mount('http://', adapter)
|
||||
sess.mount('https://', adapter)
|
||||
sess.mount("http://", adapter)
|
||||
sess.mount("https://", adapter)
|
||||
|
||||
return sess
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
from .core import where, old_where
|
||||
|
||||
__version__ = "2018.01.18"
|
||||
__version__ = "2018.04.16"
|
||||
|
|
|
@ -3483,39 +3483,6 @@ AAoACxGV2lZFA4gKn2fQ1XmxqI1AbQ3CekD6819kR5LLU7m7Wc5P/dAVUwHY3+vZ
|
|||
5nbv0CO7O6l5s9UCKc2Jo5YPSjXnTkLAdc0Hz+Ys63su
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=T\xdcRKTRUST Elektronik Sertifika Hizmet Sa\u011flay\u0131c\u0131s\u0131 H5 O=T\xdcRKTRUST Bilgi \u0130leti\u015fim ve Bili\u015fim G\xfcvenli\u011fi Hizmetleri A.\u015e.
|
||||
# Subject: CN=T\xdcRKTRUST Elektronik Sertifika Hizmet Sa\u011flay\u0131c\u0131s\u0131 H5 O=T\xdcRKTRUST Bilgi \u0130leti\u015fim ve Bili\u015fim G\xfcvenli\u011fi Hizmetleri A.\u015e.
|
||||
# Label: "T\xdcRKTRUST Elektronik Sertifika Hizmet Sa\u011flay\u0131c\u0131s\u0131 H5"
|
||||
# Serial: 156233699172481
|
||||
# MD5 Fingerprint: da:70:8e:f0:22:df:93:26:f6:5f:9f:d3:15:06:52:4e
|
||||
# SHA1 Fingerprint: c4:18:f6:4d:46:d1:df:00:3d:27:30:13:72:43:a9:12:11:c6:75:fb
|
||||
# SHA256 Fingerprint: 49:35:1b:90:34:44:c1:85:cc:dc:5c:69:3d:24:d8:55:5c:b2:08:d6:a8:14:13:07:69:9f:4a:f0:63:19:9d:78
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIEJzCCAw+gAwIBAgIHAI4X/iQggTANBgkqhkiG9w0BAQsFADCBsTELMAkGA1UE
|
||||
BhMCVFIxDzANBgNVBAcMBkFua2FyYTFNMEsGA1UECgxEVMOcUktUUlVTVCBCaWxn
|
||||
aSDEsGxldGnFn2ltIHZlIEJpbGnFn2ltIEfDvHZlbmxpxJ9pIEhpem1ldGxlcmkg
|
||||
QS7Fni4xQjBABgNVBAMMOVTDnFJLVFJVU1QgRWxla3Ryb25payBTZXJ0aWZpa2Eg
|
||||
SGl6bWV0IFNhxJ9sYXnEsWPEsXPEsSBINTAeFw0xMzA0MzAwODA3MDFaFw0yMzA0
|
||||
MjgwODA3MDFaMIGxMQswCQYDVQQGEwJUUjEPMA0GA1UEBwwGQW5rYXJhMU0wSwYD
|
||||
VQQKDERUw5xSS1RSVVNUIEJpbGdpIMSwbGV0acWfaW0gdmUgQmlsacWfaW0gR8O8
|
||||
dmVubGnEn2kgSGl6bWV0bGVyaSBBLsWeLjFCMEAGA1UEAww5VMOcUktUUlVTVCBF
|
||||
bGVrdHJvbmlrIFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sxc8SxIEg1MIIB
|
||||
IjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEApCUZ4WWe60ghUEoI5RHwWrom
|
||||
/4NZzkQqL/7hzmAD/I0Dpe3/a6i6zDQGn1k19uwsu537jVJp45wnEFPzpALFp/kR
|
||||
Gml1bsMdi9GYjZOHp3GXDSHHmflS0yxjXVW86B8BSLlg/kJK9siArs1mep5Fimh3
|
||||
4khon6La8eHBEJ/rPCmBp+EyCNSgBbGM+42WAA4+Jd9ThiI7/PS98wl+d+yG6w8z
|
||||
5UNP9FR1bSmZLmZaQ9/LXMrI5Tjxfjs1nQ/0xVqhzPMggCTTV+wVunUlm+hkS7M0
|
||||
hO8EuPbJbKoCPrZV4jI3X/xml1/N1p7HIL9Nxqw/dV8c7TKcfGkAaZHjIxhT6QID
|
||||
AQABo0IwQDAdBgNVHQ4EFgQUVpkHHtOsDGlktAxQR95DLL4gwPswDgYDVR0PAQH/
|
||||
BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAJ5FdnsX
|
||||
SDLyOIspve6WSk6BGLFRRyDN0GSxDsnZAdkJzsiZ3GglE9Rc8qPoBP5yCccLqh0l
|
||||
VX6Wmle3usURehnmp349hQ71+S4pL+f5bFgWV1Al9j4uPqrtd3GqqpmWRgqujuwq
|
||||
URawXs3qZwQcWDD1YIq9pr1N5Za0/EKJAWv2cMhQOQwt1WbZyNKzMrcbGW3LM/nf
|
||||
peYVhDfwwvJllpKQd/Ct9JDpEXjXk4nAPQu6KfTomZ1yju2dL+6SfaHx/126M2CF
|
||||
Yv4HAqGEVka+lgqaE9chTLd8B59OTj+RdPsnnRHM3eaxynFNExc5JsUpISuTKWqW
|
||||
+qtB4Uu2NQvAmxU=
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
# Issuer: CN=Certinomis - Root CA O=Certinomis OU=0002 433998903
|
||||
# Subject: CN=Certinomis - Root CA O=Certinomis OU=0002 433998903
|
||||
# Label: "Certinomis - Root CA"
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# Copyright 2015,2016 Nir Cohen
|
||||
# Copyright 2015,2016,2017 Nir Cohen
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
|
@ -23,7 +23,7 @@ functionality. An alternative implementation became necessary because Python
|
|||
3.5 deprecated this function, and Python 3.7 is expected to remove it
|
||||
altogether. Its predecessor function :py:func:`platform.dist` was already
|
||||
deprecated since Python 2.6 and is also expected to be removed in Python 3.7.
|
||||
Still, there are many cases in which access to Linux distribution information
|
||||
Still, there are many cases in which access to OS distribution information
|
||||
is needed. See `Python issue 1322 <https://bugs.python.org/issue1322>`_ for
|
||||
more information.
|
||||
"""
|
||||
|
@ -94,7 +94,7 @@ _DISTRO_RELEASE_IGNORE_BASENAMES = (
|
|||
|
||||
def linux_distribution(full_distribution_name=True):
|
||||
"""
|
||||
Return information about the current Linux distribution as a tuple
|
||||
Return information about the current OS distribution as a tuple
|
||||
``(id_name, version, codename)`` with items as follows:
|
||||
|
||||
* ``id_name``: If *full_distribution_name* is false, the result of
|
||||
|
@ -110,22 +110,22 @@ def linux_distribution(full_distribution_name=True):
|
|||
|
||||
The data it returns may not exactly be the same, because it uses more data
|
||||
sources than the original function, and that may lead to different data if
|
||||
the Linux distribution is not consistent across multiple data sources it
|
||||
the OS distribution is not consistent across multiple data sources it
|
||||
provides (there are indeed such distributions ...).
|
||||
|
||||
Another reason for differences is the fact that the :func:`distro.id`
|
||||
method normalizes the distro ID string to a reliable machine-readable value
|
||||
for a number of popular Linux distributions.
|
||||
for a number of popular OS distributions.
|
||||
"""
|
||||
return _distro.linux_distribution(full_distribution_name)
|
||||
|
||||
|
||||
def id():
|
||||
"""
|
||||
Return the distro ID of the current Linux distribution, as a
|
||||
Return the distro ID of the current distribution, as a
|
||||
machine-readable string.
|
||||
|
||||
For a number of Linux distributions, the returned distro ID value is
|
||||
For a number of OS distributions, the returned distro ID value is
|
||||
*reliable*, in the sense that it is documented and that it does not change
|
||||
across releases of the distribution.
|
||||
|
||||
|
@ -158,6 +158,9 @@ def id():
|
|||
"scientific" Scientific Linux
|
||||
"slackware" Slackware
|
||||
"xenserver" XenServer
|
||||
"openbsd" OpenBSD
|
||||
"netbsd" NetBSD
|
||||
"freebsd" FreeBSD
|
||||
============== =========================================
|
||||
|
||||
If you have a need to get distros for reliable IDs added into this set,
|
||||
|
@ -187,7 +190,7 @@ def id():
|
|||
* a normalization of the ID is performed, based upon
|
||||
`normalization tables`_. The purpose of this normalization is to ensure
|
||||
that the ID is as reliable as possible, even across incompatible changes
|
||||
in the Linux distributions. A common reason for an incompatible change is
|
||||
in the OS distributions. A common reason for an incompatible change is
|
||||
the addition of an os-release file, or the addition of the lsb_release
|
||||
command, with ID values that differ from what was previously determined
|
||||
from the distro release file name.
|
||||
|
@ -197,7 +200,7 @@ def id():
|
|||
|
||||
def name(pretty=False):
|
||||
"""
|
||||
Return the name of the current Linux distribution, as a human-readable
|
||||
Return the name of the current OS distribution, as a human-readable
|
||||
string.
|
||||
|
||||
If *pretty* is false, the name is returned without version or codename.
|
||||
|
@ -236,7 +239,7 @@ def name(pretty=False):
|
|||
|
||||
def version(pretty=False, best=False):
|
||||
"""
|
||||
Return the version of the current Linux distribution, as a human-readable
|
||||
Return the version of the current OS distribution, as a human-readable
|
||||
string.
|
||||
|
||||
If *pretty* is false, the version is returned without codename (e.g.
|
||||
|
@ -280,7 +283,7 @@ def version(pretty=False, best=False):
|
|||
|
||||
def version_parts(best=False):
|
||||
"""
|
||||
Return the version of the current Linux distribution as a tuple
|
||||
Return the version of the current OS distribution as a tuple
|
||||
``(major, minor, build_number)`` with items as follows:
|
||||
|
||||
* ``major``: The result of :func:`distro.major_version`.
|
||||
|
@ -297,7 +300,7 @@ def version_parts(best=False):
|
|||
|
||||
def major_version(best=False):
|
||||
"""
|
||||
Return the major version of the current Linux distribution, as a string,
|
||||
Return the major version of the current OS distribution, as a string,
|
||||
if provided.
|
||||
Otherwise, the empty string is returned. The major version is the first
|
||||
part of the dot-separated version string.
|
||||
|
@ -310,7 +313,7 @@ def major_version(best=False):
|
|||
|
||||
def minor_version(best=False):
|
||||
"""
|
||||
Return the minor version of the current Linux distribution, as a string,
|
||||
Return the minor version of the current OS distribution, as a string,
|
||||
if provided.
|
||||
Otherwise, the empty string is returned. The minor version is the second
|
||||
part of the dot-separated version string.
|
||||
|
@ -323,7 +326,7 @@ def minor_version(best=False):
|
|||
|
||||
def build_number(best=False):
|
||||
"""
|
||||
Return the build number of the current Linux distribution, as a string,
|
||||
Return the build number of the current OS distribution, as a string,
|
||||
if provided.
|
||||
Otherwise, the empty string is returned. The build number is the third part
|
||||
of the dot-separated version string.
|
||||
|
@ -337,7 +340,7 @@ def build_number(best=False):
|
|||
def like():
|
||||
"""
|
||||
Return a space-separated list of distro IDs of distributions that are
|
||||
closely related to the current Linux distribution in regards to packaging
|
||||
closely related to the current OS distribution in regards to packaging
|
||||
and programming interfaces, for example distributions the current
|
||||
distribution is a derivative from.
|
||||
|
||||
|
@ -353,7 +356,7 @@ def like():
|
|||
|
||||
def codename():
|
||||
"""
|
||||
Return the codename for the release of the current Linux distribution,
|
||||
Return the codename for the release of the current OS distribution,
|
||||
as a string.
|
||||
|
||||
If the distribution does not have a codename, an empty string is returned.
|
||||
|
@ -377,7 +380,7 @@ def codename():
|
|||
|
||||
def info(pretty=False, best=False):
|
||||
"""
|
||||
Return certain machine-readable information items about the current Linux
|
||||
Return certain machine-readable information items about the current OS
|
||||
distribution in a dictionary, as shown in the following example:
|
||||
|
||||
.. sourcecode:: python
|
||||
|
@ -422,7 +425,7 @@ def info(pretty=False, best=False):
|
|||
def os_release_info():
|
||||
"""
|
||||
Return a dictionary containing key-value pairs for the information items
|
||||
from the os-release file data source of the current Linux distribution.
|
||||
from the os-release file data source of the current OS distribution.
|
||||
|
||||
See `os-release file`_ for details about these information items.
|
||||
"""
|
||||
|
@ -432,7 +435,7 @@ def os_release_info():
|
|||
def lsb_release_info():
|
||||
"""
|
||||
Return a dictionary containing key-value pairs for the information items
|
||||
from the lsb_release command data source of the current Linux distribution.
|
||||
from the lsb_release command data source of the current OS distribution.
|
||||
|
||||
See `lsb_release command output`_ for details about these information
|
||||
items.
|
||||
|
@ -443,17 +446,25 @@ def lsb_release_info():
|
|||
def distro_release_info():
|
||||
"""
|
||||
Return a dictionary containing key-value pairs for the information items
|
||||
from the distro release file data source of the current Linux distribution.
|
||||
from the distro release file data source of the current OS distribution.
|
||||
|
||||
See `distro release file`_ for details about these information items.
|
||||
"""
|
||||
return _distro.distro_release_info()
|
||||
|
||||
|
||||
def uname_info():
|
||||
"""
|
||||
Return a dictionary containing key-value pairs for the information items
|
||||
from the distro release file data source of the current OS distribution.
|
||||
"""
|
||||
return _distro.uname_info()
|
||||
|
||||
|
||||
def os_release_attr(attribute):
|
||||
"""
|
||||
Return a single named information item from the os-release file data source
|
||||
of the current Linux distribution.
|
||||
of the current OS distribution.
|
||||
|
||||
Parameters:
|
||||
|
||||
|
@ -472,7 +483,7 @@ def os_release_attr(attribute):
|
|||
def lsb_release_attr(attribute):
|
||||
"""
|
||||
Return a single named information item from the lsb_release command output
|
||||
data source of the current Linux distribution.
|
||||
data source of the current OS distribution.
|
||||
|
||||
Parameters:
|
||||
|
||||
|
@ -492,7 +503,7 @@ def lsb_release_attr(attribute):
|
|||
def distro_release_attr(attribute):
|
||||
"""
|
||||
Return a single named information item from the distro release file
|
||||
data source of the current Linux distribution.
|
||||
data source of the current OS distribution.
|
||||
|
||||
Parameters:
|
||||
|
||||
|
@ -508,6 +519,23 @@ def distro_release_attr(attribute):
|
|||
return _distro.distro_release_attr(attribute)
|
||||
|
||||
|
||||
def uname_attr(attribute):
|
||||
"""
|
||||
Return a single named information item from the distro release file
|
||||
data source of the current OS distribution.
|
||||
|
||||
Parameters:
|
||||
|
||||
* ``attribute`` (string): Key of the information item.
|
||||
|
||||
Returns:
|
||||
|
||||
* (string): Value of the information item, if the item exists.
|
||||
The empty string, if the item does not exist.
|
||||
"""
|
||||
return _distro.uname_attr(attribute)
|
||||
|
||||
|
||||
class cached_property(object):
|
||||
"""A version of @property which caches the value. On access, it calls the
|
||||
underlying function and sets the value in `__dict__` so future accesses
|
||||
|
@ -525,13 +553,13 @@ class cached_property(object):
|
|||
|
||||
class LinuxDistribution(object):
|
||||
"""
|
||||
Provides information about a Linux distribution.
|
||||
Provides information about a OS distribution.
|
||||
|
||||
This package creates a private module-global instance of this class with
|
||||
default initialization arguments, that is used by the
|
||||
`consolidated accessor functions`_ and `single source accessor functions`_.
|
||||
By using default initialization arguments, that module-global instance
|
||||
returns data about the current Linux distribution (i.e. the distro this
|
||||
returns data about the current OS distribution (i.e. the distro this
|
||||
package runs on).
|
||||
|
||||
Normally, it is not necessary to create additional instances of this class.
|
||||
|
@ -544,7 +572,8 @@ class LinuxDistribution(object):
|
|||
def __init__(self,
|
||||
include_lsb=True,
|
||||
os_release_file='',
|
||||
distro_release_file=''):
|
||||
distro_release_file='',
|
||||
include_uname=True):
|
||||
"""
|
||||
The initialization method of this class gathers information from the
|
||||
available data sources, and stores that in private instance attributes.
|
||||
|
@ -578,6 +607,11 @@ class LinuxDistribution(object):
|
|||
distro release file can be found, the data source for the distro
|
||||
release file will be empty.
|
||||
|
||||
* ``include_name`` (bool): Controls whether uname command output is
|
||||
included as a data source. If the uname command is not available in
|
||||
the program execution path the data source for the uname command will
|
||||
be empty.
|
||||
|
||||
Public instance attributes:
|
||||
|
||||
* ``os_release_file`` (string): The path name of the
|
||||
|
@ -591,6 +625,10 @@ class LinuxDistribution(object):
|
|||
* ``include_lsb`` (bool): The result of the ``include_lsb`` parameter.
|
||||
This controls whether the lsb information will be loaded.
|
||||
|
||||
* ``include_uname`` (bool): The result of the ``include_uname``
|
||||
parameter. This controls whether the uname information will
|
||||
be loaded.
|
||||
|
||||
Raises:
|
||||
|
||||
* :py:exc:`IOError`: Some I/O issue with an os-release file or distro
|
||||
|
@ -607,6 +645,7 @@ class LinuxDistribution(object):
|
|||
os.path.join(_UNIXCONFDIR, _OS_RELEASE_BASENAME)
|
||||
self.distro_release_file = distro_release_file or '' # updated later
|
||||
self.include_lsb = include_lsb
|
||||
self.include_uname = include_uname
|
||||
|
||||
def __repr__(self):
|
||||
"""Return repr of all info
|
||||
|
@ -616,14 +655,16 @@ class LinuxDistribution(object):
|
|||
"os_release_file={self.os_release_file!r}, " \
|
||||
"distro_release_file={self.distro_release_file!r}, " \
|
||||
"include_lsb={self.include_lsb!r}, " \
|
||||
"include_uname={self.include_uname!r}, " \
|
||||
"_os_release_info={self._os_release_info!r}, " \
|
||||
"_lsb_release_info={self._lsb_release_info!r}, " \
|
||||
"_distro_release_info={self._distro_release_info!r})".format(
|
||||
"_distro_release_info={self._distro_release_info!r}, " \
|
||||
"_uname_info={self._uname_info!r})".format(
|
||||
self=self)
|
||||
|
||||
def linux_distribution(self, full_distribution_name=True):
|
||||
"""
|
||||
Return information about the Linux distribution that is compatible
|
||||
Return information about the OS distribution that is compatible
|
||||
with Python's :func:`platform.linux_distribution`, supporting a subset
|
||||
of its parameters.
|
||||
|
||||
|
@ -636,7 +677,7 @@ class LinuxDistribution(object):
|
|||
)
|
||||
|
||||
def id(self):
|
||||
"""Return the distro ID of the Linux distribution, as a string.
|
||||
"""Return the distro ID of the OS distribution, as a string.
|
||||
|
||||
For details, see :func:`distro.id`.
|
||||
"""
|
||||
|
@ -656,22 +697,28 @@ class LinuxDistribution(object):
|
|||
if distro_id:
|
||||
return normalize(distro_id, NORMALIZED_DISTRO_ID)
|
||||
|
||||
distro_id = self.uname_attr('id')
|
||||
if distro_id:
|
||||
return normalize(distro_id, NORMALIZED_DISTRO_ID)
|
||||
|
||||
return ''
|
||||
|
||||
def name(self, pretty=False):
|
||||
"""
|
||||
Return the name of the Linux distribution, as a string.
|
||||
Return the name of the OS distribution, as a string.
|
||||
|
||||
For details, see :func:`distro.name`.
|
||||
"""
|
||||
name = self.os_release_attr('name') \
|
||||
or self.lsb_release_attr('distributor_id') \
|
||||
or self.distro_release_attr('name')
|
||||
or self.distro_release_attr('name') \
|
||||
or self.uname_attr('name')
|
||||
if pretty:
|
||||
name = self.os_release_attr('pretty_name') \
|
||||
or self.lsb_release_attr('description')
|
||||
if not name:
|
||||
name = self.distro_release_attr('name')
|
||||
name = self.distro_release_attr('name') \
|
||||
or self.uname_attr('name')
|
||||
version = self.version(pretty=True)
|
||||
if version:
|
||||
name = name + ' ' + version
|
||||
|
@ -679,7 +726,7 @@ class LinuxDistribution(object):
|
|||
|
||||
def version(self, pretty=False, best=False):
|
||||
"""
|
||||
Return the version of the Linux distribution, as a string.
|
||||
Return the version of the OS distribution, as a string.
|
||||
|
||||
For details, see :func:`distro.version`.
|
||||
"""
|
||||
|
@ -690,7 +737,8 @@ class LinuxDistribution(object):
|
|||
self._parse_distro_release_content(
|
||||
self.os_release_attr('pretty_name')).get('version_id', ''),
|
||||
self._parse_distro_release_content(
|
||||
self.lsb_release_attr('description')).get('version_id', '')
|
||||
self.lsb_release_attr('description')).get('version_id', ''),
|
||||
self.uname_attr('release')
|
||||
]
|
||||
version = ''
|
||||
if best:
|
||||
|
@ -712,7 +760,7 @@ class LinuxDistribution(object):
|
|||
|
||||
def version_parts(self, best=False):
|
||||
"""
|
||||
Return the version of the Linux distribution, as a tuple of version
|
||||
Return the version of the OS distribution, as a tuple of version
|
||||
numbers.
|
||||
|
||||
For details, see :func:`distro.version_parts`.
|
||||
|
@ -736,7 +784,7 @@ class LinuxDistribution(object):
|
|||
|
||||
def minor_version(self, best=False):
|
||||
"""
|
||||
Return the minor version number of the Linux distribution.
|
||||
Return the minor version number of the current distribution.
|
||||
|
||||
For details, see :func:`distro.minor_version`.
|
||||
"""
|
||||
|
@ -744,7 +792,7 @@ class LinuxDistribution(object):
|
|||
|
||||
def build_number(self, best=False):
|
||||
"""
|
||||
Return the build number of the Linux distribution.
|
||||
Return the build number of the current distribution.
|
||||
|
||||
For details, see :func:`distro.build_number`.
|
||||
"""
|
||||
|
@ -752,7 +800,7 @@ class LinuxDistribution(object):
|
|||
|
||||
def like(self):
|
||||
"""
|
||||
Return the IDs of distributions that are like the Linux distribution.
|
||||
Return the IDs of distributions that are like the OS distribution.
|
||||
|
||||
For details, see :func:`distro.like`.
|
||||
"""
|
||||
|
@ -760,7 +808,7 @@ class LinuxDistribution(object):
|
|||
|
||||
def codename(self):
|
||||
"""
|
||||
Return the codename of the Linux distribution.
|
||||
Return the codename of the OS distribution.
|
||||
|
||||
For details, see :func:`distro.codename`.
|
||||
"""
|
||||
|
@ -771,7 +819,7 @@ class LinuxDistribution(object):
|
|||
|
||||
def info(self, pretty=False, best=False):
|
||||
"""
|
||||
Return certain machine-readable information about the Linux
|
||||
Return certain machine-readable information about the OS
|
||||
distribution.
|
||||
|
||||
For details, see :func:`distro.info`.
|
||||
|
@ -791,7 +839,7 @@ class LinuxDistribution(object):
|
|||
def os_release_info(self):
|
||||
"""
|
||||
Return a dictionary containing key-value pairs for the information
|
||||
items from the os-release file data source of the Linux distribution.
|
||||
items from the os-release file data source of the OS distribution.
|
||||
|
||||
For details, see :func:`distro.os_release_info`.
|
||||
"""
|
||||
|
@ -800,7 +848,7 @@ class LinuxDistribution(object):
|
|||
def lsb_release_info(self):
|
||||
"""
|
||||
Return a dictionary containing key-value pairs for the information
|
||||
items from the lsb_release command data source of the Linux
|
||||
items from the lsb_release command data source of the OS
|
||||
distribution.
|
||||
|
||||
For details, see :func:`distro.lsb_release_info`.
|
||||
|
@ -810,17 +858,25 @@ class LinuxDistribution(object):
|
|||
def distro_release_info(self):
|
||||
"""
|
||||
Return a dictionary containing key-value pairs for the information
|
||||
items from the distro release file data source of the Linux
|
||||
items from the distro release file data source of the OS
|
||||
distribution.
|
||||
|
||||
For details, see :func:`distro.distro_release_info`.
|
||||
"""
|
||||
return self._distro_release_info
|
||||
|
||||
def uname_info(self):
|
||||
"""
|
||||
Return a dictionary containing key-value pairs for the information
|
||||
items from the uname command data source of the OS distribution.
|
||||
|
||||
For details, see :func:`distro.uname_info`.
|
||||
"""
|
||||
|
||||
def os_release_attr(self, attribute):
|
||||
"""
|
||||
Return a single named information item from the os-release file data
|
||||
source of the Linux distribution.
|
||||
source of the OS distribution.
|
||||
|
||||
For details, see :func:`distro.os_release_attr`.
|
||||
"""
|
||||
|
@ -829,7 +885,7 @@ class LinuxDistribution(object):
|
|||
def lsb_release_attr(self, attribute):
|
||||
"""
|
||||
Return a single named information item from the lsb_release command
|
||||
output data source of the Linux distribution.
|
||||
output data source of the OS distribution.
|
||||
|
||||
For details, see :func:`distro.lsb_release_attr`.
|
||||
"""
|
||||
|
@ -838,12 +894,21 @@ class LinuxDistribution(object):
|
|||
def distro_release_attr(self, attribute):
|
||||
"""
|
||||
Return a single named information item from the distro release file
|
||||
data source of the Linux distribution.
|
||||
data source of the OS distribution.
|
||||
|
||||
For details, see :func:`distro.distro_release_attr`.
|
||||
"""
|
||||
return self._distro_release_info.get(attribute, '')
|
||||
|
||||
def uname_attr(self, attribute):
|
||||
"""
|
||||
Return a single named information item from the uname command
|
||||
output data source of the OS distribution.
|
||||
|
||||
For details, see :func:`distro.uname_release_attr`.
|
||||
"""
|
||||
return self._uname_info.get(attribute, '')
|
||||
|
||||
@cached_property
|
||||
def _os_release_info(self):
|
||||
"""
|
||||
|
@ -960,6 +1025,34 @@ class LinuxDistribution(object):
|
|||
props.update({k.replace(' ', '_').lower(): v.strip()})
|
||||
return props
|
||||
|
||||
@cached_property
|
||||
def _uname_info(self):
|
||||
with open(os.devnull, 'w') as devnull:
|
||||
try:
|
||||
cmd = ('uname', '-rs')
|
||||
stdout = subprocess.check_output(cmd, stderr=devnull)
|
||||
except OSError:
|
||||
return {}
|
||||
content = stdout.decode(sys.getfilesystemencoding()).splitlines()
|
||||
return self._parse_uname_content(content)
|
||||
|
||||
@staticmethod
|
||||
def _parse_uname_content(lines):
|
||||
props = {}
|
||||
match = re.search(r'^([^\s]+)\s+([\d\.]+)', lines[0].strip())
|
||||
if match:
|
||||
name, version = match.groups()
|
||||
|
||||
# This is to prevent the Linux kernel version from
|
||||
# appearing as the 'best' version on otherwise
|
||||
# identifiable distributions.
|
||||
if name == 'Linux':
|
||||
return {}
|
||||
props['id'] = name.lower()
|
||||
props['name'] = name
|
||||
props['release'] = version
|
||||
return props
|
||||
|
||||
@cached_property
|
||||
def _distro_release_info(self):
|
||||
"""
|
||||
|
@ -1082,7 +1175,7 @@ def main():
|
|||
logger.setLevel(logging.DEBUG)
|
||||
logger.addHandler(logging.StreamHandler(sys.stdout))
|
||||
|
||||
parser = argparse.ArgumentParser(description="Linux distro info tool")
|
||||
parser = argparse.ArgumentParser(description="OS distro info tool")
|
||||
parser.add_argument(
|
||||
'--json',
|
||||
'-j',
|
||||
|
|
|
@ -34,7 +34,11 @@ class InvalidCodepointContext(IDNAError):
|
|||
|
||||
|
||||
def _combining_class(cp):
|
||||
return unicodedata.combining(unichr(cp))
|
||||
v = unicodedata.combining(unichr(cp))
|
||||
if v == 0:
|
||||
if not unicodedata.name(unichr(cp)):
|
||||
raise ValueError("Unknown character in unicodedata")
|
||||
return v
|
||||
|
||||
def _is_script(cp, script):
|
||||
return intranges_contain(ord(cp), idnadata.scripts[script])
|
||||
|
@ -71,7 +75,6 @@ def check_bidi(label, check_ltr=False):
|
|||
raise IDNABidiError('Unknown directionality in label {0} at position {1}'.format(repr(label), idx))
|
||||
if direction in ['R', 'AL', 'AN']:
|
||||
bidi_label = True
|
||||
break
|
||||
if not bidi_label and not check_ltr:
|
||||
return True
|
||||
|
||||
|
@ -244,8 +247,13 @@ def check_label(label):
|
|||
if intranges_contain(cp_value, idnadata.codepoint_classes['PVALID']):
|
||||
continue
|
||||
elif intranges_contain(cp_value, idnadata.codepoint_classes['CONTEXTJ']):
|
||||
if not valid_contextj(label, pos):
|
||||
raise InvalidCodepointContext('Joiner {0} not allowed at position {1} in {2}'.format(_unot(cp_value), pos+1, repr(label)))
|
||||
try:
|
||||
if not valid_contextj(label, pos):
|
||||
raise InvalidCodepointContext('Joiner {0} not allowed at position {1} in {2}'.format(
|
||||
_unot(cp_value), pos+1, repr(label)))
|
||||
except ValueError:
|
||||
raise IDNAError('Unknown codepoint adjacent to joiner {0} at position {1} in {2}'.format(
|
||||
_unot(cp_value), pos+1, repr(label)))
|
||||
elif intranges_contain(cp_value, idnadata.codepoint_classes['CONTEXTO']):
|
||||
if not valid_contexto(label, pos):
|
||||
raise InvalidCodepointContext('Codepoint {0} not allowed at position {1} in {2}'.format(_unot(cp_value), pos+1, repr(label)))
|
||||
|
@ -317,10 +325,10 @@ def uts46_remap(domain, std3_rules=True, transitional=False):
|
|||
replacement = uts46row[2] if len(uts46row) == 3 else None
|
||||
if (status == "V" or
|
||||
(status == "D" and not transitional) or
|
||||
(status == "3" and std3_rules and replacement is None)):
|
||||
(status == "3" and not std3_rules and replacement is None)):
|
||||
output += char
|
||||
elif replacement is not None and (status == "M" or
|
||||
(status == "3" and std3_rules) or
|
||||
(status == "3" and not std3_rules) or
|
||||
(status == "D" and transitional)):
|
||||
output += replacement
|
||||
elif status != "I":
|
||||
|
@ -344,15 +352,17 @@ def encode(s, strict=False, uts46=False, std3_rules=False, transitional=False):
|
|||
labels = s.split('.')
|
||||
else:
|
||||
labels = _unicode_dots_re.split(s)
|
||||
while labels and not labels[0]:
|
||||
del labels[0]
|
||||
if not labels:
|
||||
if not labels or labels == ['']:
|
||||
raise IDNAError('Empty domain')
|
||||
if labels[-1] == '':
|
||||
del labels[-1]
|
||||
trailing_dot = True
|
||||
for label in labels:
|
||||
result.append(alabel(label))
|
||||
s = alabel(label)
|
||||
if s:
|
||||
result.append(s)
|
||||
else:
|
||||
raise IDNAError('Empty label')
|
||||
if trailing_dot:
|
||||
result.append(b'')
|
||||
s = b'.'.join(result)
|
||||
|
@ -373,15 +383,17 @@ def decode(s, strict=False, uts46=False, std3_rules=False):
|
|||
labels = _unicode_dots_re.split(s)
|
||||
else:
|
||||
labels = s.split(u'.')
|
||||
while labels and not labels[0]:
|
||||
del labels[0]
|
||||
if not labels:
|
||||
if not labels or labels == ['']:
|
||||
raise IDNAError('Empty domain')
|
||||
if not labels[-1]:
|
||||
del labels[-1]
|
||||
trailing_dot = True
|
||||
for label in labels:
|
||||
result.append(ulabel(label))
|
||||
s = ulabel(label)
|
||||
if s:
|
||||
result.append(s)
|
||||
else:
|
||||
raise IDNAError('Empty label')
|
||||
if trailing_dot:
|
||||
result.append(u'')
|
||||
return u'.'.join(result)
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue