1
1
Fork 0
mirror of https://github.com/pypa/pip synced 2023-12-13 21:30:23 +01:00

Merge branch 'master' into req-types-finished

This commit is contained in:
Maxim Kurnikov 2019-01-25 20:00:20 +03:00
commit 0c0bfe256d
180 changed files with 4323 additions and 2473 deletions

View file

@ -1,8 +1,11 @@
AceGentile <ventogrigio83@gmail.com> AceGentile <ventogrigio83@gmail.com>
Adam Chainz <adam@adamj.eu> Adam Chainz <adam@adamj.eu>
Adam Tse <atse@users.noreply.github.com>
Adam Wentz <awentz@theonion.com> Adam Wentz <awentz@theonion.com>
Adrien Morison <adrien.morison@gmail.com> Adrien Morison <adrien.morison@gmail.com>
Alan Yee <alyee@ucsd.edu> Alan Yee <alyee@ucsd.edu>
Albert-Guan <albert.guan94@gmail.com>
albertg <albert.guan94@gmail.com>
Aleks Bunin <github@compuix.com> Aleks Bunin <github@compuix.com>
Alethea Flowers <magicalgirl@google.com> Alethea Flowers <magicalgirl@google.com>
Alex Gaynor <alex.gaynor@gmail.com> Alex Gaynor <alex.gaynor@gmail.com>
@ -14,6 +17,7 @@ Alexandre Conrad <alexandre.conrad@gmail.com>
Alexey Popravka <a.popravka@smartweb.com.ua> Alexey Popravka <a.popravka@smartweb.com.ua>
Alexey Popravka <alexey.popravka@horsedevel.com> Alexey Popravka <alexey.popravka@horsedevel.com>
Alli <alzeih@users.noreply.github.com> Alli <alzeih@users.noreply.github.com>
Ami Fischman <ami@fischman.org>
Anatoly Techtonik <techtonik@gmail.com> Anatoly Techtonik <techtonik@gmail.com>
Andrei Geacar <andrei.geacar@gmail.com> Andrei Geacar <andrei.geacar@gmail.com>
Andrew Gaul <andrew@gaul.org> Andrew Gaul <andrew@gaul.org>
@ -49,6 +53,7 @@ Ben Darnell <ben@bendarnell.com>
Ben Hoyt <benhoyt@gmail.com> Ben Hoyt <benhoyt@gmail.com>
Ben Rosser <rosser.bjr@gmail.com> Ben Rosser <rosser.bjr@gmail.com>
Bence Nagy <bence@underyx.me> Bence Nagy <bence@underyx.me>
Benjamin Peterson <benjamin@python.org>
Benjamin VanEvery <ben@simondata.com> Benjamin VanEvery <ben@simondata.com>
Benoit Pierre <benoit.pierre@gmail.com> Benoit Pierre <benoit.pierre@gmail.com>
Berker Peksag <berker.peksag@gmail.com> Berker Peksag <berker.peksag@gmail.com>
@ -59,6 +64,8 @@ Brad Erickson <eosrei@gmail.com>
Bradley Ayers <bradley.ayers@gmail.com> Bradley Ayers <bradley.ayers@gmail.com>
Brandon L. Reiss <brandon@damyata.co> Brandon L. Reiss <brandon@damyata.co>
Brett Randall <javabrett@gmail.com> Brett Randall <javabrett@gmail.com>
Brian Cristante <33549821+brcrista@users.noreply.github.com>
Brian Cristante <brcrista@microsoft.com>
Brian Rosner <brosner@gmail.com> Brian Rosner <brosner@gmail.com>
BrownTruck <BrownTruck@users.noreply.github.com> BrownTruck <BrownTruck@users.noreply.github.com>
Bruno Oliveira <nicoddemus@gmail.com> Bruno Oliveira <nicoddemus@gmail.com>
@ -75,6 +82,7 @@ Carol Willing <carolcode@willingconsulting.com>
Carter Thayer <carterwthayer@gmail.com> Carter Thayer <carterwthayer@gmail.com>
Cass <cass.petrus@gmail.com> Cass <cass.petrus@gmail.com>
Chandrasekhar Atina <chandu.atina@gmail.com> Chandrasekhar Atina <chandu.atina@gmail.com>
Chih-Hsuan Yen <yen@chyen.cc>
Chris Brinker <chris.brinker@gmail.com> Chris Brinker <chris.brinker@gmail.com>
Chris Jerdonek <chris.jerdonek@gmail.com> Chris Jerdonek <chris.jerdonek@gmail.com>
Chris McDonough <chrism@plope.com> Chris McDonough <chrism@plope.com>
@ -133,9 +141,11 @@ Ed Morley <501702+edmorley@users.noreply.github.com>
Ed Morley <emorley@mozilla.com> Ed Morley <emorley@mozilla.com>
elainechan <elaine.chan@outlook.com> elainechan <elaine.chan@outlook.com>
Eli Schwartz <eschwartz93@gmail.com> Eli Schwartz <eschwartz93@gmail.com>
Eli Schwartz <eschwartz@archlinux.org>
Emil Styrke <emil.styrke@gmail.com> Emil Styrke <emil.styrke@gmail.com>
Endoh Takanao <djmchl@gmail.com> Endoh Takanao <djmchl@gmail.com>
enoch <lanxenet@gmail.com> enoch <lanxenet@gmail.com>
Erdinc Mutlu <erdinc_mutlu@yahoo.com>
Eric Gillingham <Gillingham@bikezen.net> Eric Gillingham <Gillingham@bikezen.net>
Eric Hanchrow <eric.hanchrow@gmail.com> Eric Hanchrow <eric.hanchrow@gmail.com>
Eric Hopper <hopper@omnifarious.org> Eric Hopper <hopper@omnifarious.org>
@ -149,6 +159,7 @@ Felix Yan <felixonmars@archlinux.org>
fiber-space <fiber-space@users.noreply.github.com> fiber-space <fiber-space@users.noreply.github.com>
Filip Kokosiński <filip.kokosinski@gmail.com> Filip Kokosiński <filip.kokosinski@gmail.com>
Florian Briand <ownerfrance+github@hotmail.com> Florian Briand <ownerfrance+github@hotmail.com>
Florian Rathgeber <florian.rathgeber@gmail.com>
Francesco <f.guerrieri@gmail.com> Francesco <f.guerrieri@gmail.com>
Francesco Montesano <franz.bergesund@gmail.com> Francesco Montesano <franz.bergesund@gmail.com>
Gabriel Curio <g.curio@gmail.com> Gabriel Curio <g.curio@gmail.com>
@ -253,6 +264,8 @@ Marc Tamlyn <marc.tamlyn@gmail.com>
Marcus Smith <qwcode@gmail.com> Marcus Smith <qwcode@gmail.com>
Mariatta <Mariatta@users.noreply.github.com> Mariatta <Mariatta@users.noreply.github.com>
Mark Kohler <mark.kohler@proteinsimple.com> Mark Kohler <mark.kohler@proteinsimple.com>
Mark Williams <markrwilliams@gmail.com>
Mark Williams <mrw@enotuniq.org>
Markus Hametner <fin+github@xbhd.org> Markus Hametner <fin+github@xbhd.org>
Masklinn <bitbucket.org@masklinn.net> Masklinn <bitbucket.org@masklinn.net>
Matej Stuchlik <mstuchli@redhat.com> Matej Stuchlik <mstuchli@redhat.com>
@ -271,6 +284,7 @@ Matthias Bussonnier <bussonniermatthias@gmail.com>
mattip <matti.picus@gmail.com> mattip <matti.picus@gmail.com>
Maxim Kurnikov <maxim.kurnikov@gmail.com> Maxim Kurnikov <maxim.kurnikov@gmail.com>
Maxime Rouyrre <rouyrre+git@gmail.com> Maxime Rouyrre <rouyrre+git@gmail.com>
mbaluna <44498973+mbaluna@users.noreply.github.com>
memoselyk <memoselyk@gmail.com> memoselyk <memoselyk@gmail.com>
Michael <michael-k@users.noreply.github.com> Michael <michael-k@users.noreply.github.com>
Michael Aquilina <michaelaquilina@gmail.com> Michael Aquilina <michaelaquilina@gmail.com>
@ -284,6 +298,7 @@ Mihir Singh <git.service@mihirsingh.com>
Min RK <benjaminrk@gmail.com> Min RK <benjaminrk@gmail.com>
MinRK <benjaminrk@gmail.com> MinRK <benjaminrk@gmail.com>
Miro Hrončok <miro@hroncok.cz> Miro Hrončok <miro@hroncok.cz>
Monica Baluna <mbaluna@bloomberg.net>
montefra <franz.bergesund@gmail.com> montefra <franz.bergesund@gmail.com>
Monty Taylor <mordred@inaugust.com> Monty Taylor <mordred@inaugust.com>
Nate Coraor <nate@bx.psu.edu> Nate Coraor <nate@bx.psu.edu>
@ -318,6 +333,7 @@ Paulus Schoutsen <paulus@paulusschoutsen.nl>
Pavithra Eswaramoorthy <33131404+QueenCoffee@users.noreply.github.com> Pavithra Eswaramoorthy <33131404+QueenCoffee@users.noreply.github.com>
Pawel Jasinski <pawel.jasinski@gmail.com> Pawel Jasinski <pawel.jasinski@gmail.com>
Pekka Klärck <peke@iki.fi> Pekka Klärck <peke@iki.fi>
Peter Lisák <peter.lisak@showmax.com>
Peter Waller <peter.waller@gmail.com> Peter Waller <peter.waller@gmail.com>
petr-tik <petr-tik@users.noreply.github.com> petr-tik <petr-tik@users.noreply.github.com>
Phaneendra Chiruvella <hi@pcx.io> Phaneendra Chiruvella <hi@pcx.io>
@ -329,6 +345,7 @@ Philippe Ombredanne <pombredanne@gmail.com>
Pi Delport <pjdelport@gmail.com> Pi Delport <pjdelport@gmail.com>
Pierre-Yves Rofes <github@rofes.fr> Pierre-Yves Rofes <github@rofes.fr>
pip <pypa-dev@googlegroups.com> pip <pypa-dev@googlegroups.com>
Prabhjyotsing Surjit Singh Sodhi <psinghsodhi@bloomberg.net>
Pradyun Gedam <pradyunsg@gmail.com> Pradyun Gedam <pradyunsg@gmail.com>
Pratik Mallya <mallya@us.ibm.com> Pratik Mallya <mallya@us.ibm.com>
Preston Holmes <preston@ptone.com> Preston Holmes <preston@ptone.com>
@ -379,6 +396,7 @@ Stephan Erb <github@stephanerb.eu>
stepshal <nessento@openmailbox.org> stepshal <nessento@openmailbox.org>
Steve (Gadget) Barnes <gadgetsteve@hotmail.com> Steve (Gadget) Barnes <gadgetsteve@hotmail.com>
Steve Barnes <gadgetsteve@hotmail.com> Steve Barnes <gadgetsteve@hotmail.com>
Steve Dower <steve.dower@microsoft.com>
Steve Kowalik <steven@wedontsleep.org> Steve Kowalik <steven@wedontsleep.org>
Steven Myint <git@stevenmyint.com> Steven Myint <git@stevenmyint.com>
stonebig <stonebig34@gmail.com> stonebig <stonebig34@gmail.com>
@ -418,11 +436,13 @@ W. Trevor King <wking@drexel.edu>
Wil Tan <wil@dready.org> Wil Tan <wil@dready.org>
Wilfred Hughes <me@wilfred.me.uk> Wilfred Hughes <me@wilfred.me.uk>
William ML Leslie <william.leslie.ttg@gmail.com> William ML Leslie <william.leslie.ttg@gmail.com>
William T Olson <trevor@heytrevor.com>
wim glenn <wim.glenn@gmail.com> wim glenn <wim.glenn@gmail.com>
Wolfgang Maier <wolfgang.maier@biologie.uni-freiburg.de> Wolfgang Maier <wolfgang.maier@biologie.uni-freiburg.de>
Xavier Fernandez <xav.fernandez@gmail.com> Xavier Fernandez <xav.fernandez@gmail.com>
Xavier Fernandez <xavier.fernandez@polyconseil.fr> Xavier Fernandez <xavier.fernandez@polyconseil.fr>
xoviat <xoviat@users.noreply.github.com> xoviat <xoviat@users.noreply.github.com>
xtreak <tir.karthi@gmail.com>
YAMAMOTO Takashi <yamamoto@midokura.com> YAMAMOTO Takashi <yamamoto@midokura.com>
Yen Chi Hsuan <yan12125@gmail.com> Yen Chi Hsuan <yan12125@gmail.com>
Yoval P <yoval@gmx.com> Yoval P <yoval@gmx.com>

View file

@ -1,4 +1,4 @@
Copyright (c) 2008-2018 The pip developers (see AUTHORS.txt file) Copyright (c) 2008-2019 The pip developers (see AUTHORS.txt file)
Permission is hereby granted, free of charge, to any person obtaining Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the a copy of this software and associated documentation files (the

View file

@ -7,6 +7,89 @@
.. towncrier release notes start .. towncrier release notes start
19.0.1 (2019-01-23)
===================
Bug Fixes
---------
- Fix a crash when using --no-cache-dir with PEP 517 distributions (`#6158 <https://github.com/pypa/pip/issues/6158>`_, `#6171 <https://github.com/pypa/pip/issues/6171>`_)
19.0 (2019-01-22)
=================
Deprecations and Removals
-------------------------
- Deprecate support for Python 3.4 (`#6106 <https://github.com/pypa/pip/issues/6106>`_)
- Start printing a warning for Python 2.7 to warn of impending Python 2.7 End-of-life and
prompt users to start migrating to Python 3. (`#6148 <https://github.com/pypa/pip/issues/6148>`_)
- Remove the deprecated ``--process-dependency-links`` option. (`#6060 <https://github.com/pypa/pip/issues/6060>`_)
- Remove the deprecated SVN editable detection based on dependency links
during freeze. (`#5866 <https://github.com/pypa/pip/issues/5866>`_)
Features
--------
- Implement PEP 517 (allow projects to specify a build backend via pyproject.toml). (`#5743 <https://github.com/pypa/pip/issues/5743>`_)
- Implement manylinux2010 platform tag support. manylinux2010 is the successor
to manylinux1. It allows carefully compiled binary wheels to be installed
on compatible Linux platforms. (`#5008 <https://github.com/pypa/pip/issues/5008>`_)
- Improve build isolation: handle ``.pth`` files, so namespace packages are correctly supported under Python 3.2 and earlier. (`#5656 <https://github.com/pypa/pip/issues/5656>`_)
- Include the package name in a freeze warning if the package is not installed. (`#5943 <https://github.com/pypa/pip/issues/5943>`_)
- Warn when dropping an ``--[extra-]index-url`` value that points to an existing local directory. (`#5827 <https://github.com/pypa/pip/issues/5827>`_)
- Prefix pip's ``--log`` file lines with their timestamp. (`#6141 <https://github.com/pypa/pip/issues/6141>`_)
Bug Fixes
---------
- Avoid creating excessively long temporary paths when uninstalling packages. (`#3055 <https://github.com/pypa/pip/issues/3055>`_)
- Redact the password from the URL in various log messages. (`#4746 <https://github.com/pypa/pip/issues/4746>`_, `#6124 <https://github.com/pypa/pip/issues/6124>`_)
- Avoid creating excessively long temporary paths when uninstalling packages. (`#3055 <https://github.com/pypa/pip/issues/3055>`_)
- Avoid printing a stack trace when given an invalid requirement. (`#5147 <https://github.com/pypa/pip/issues/5147>`_)
- Present 401 warning if username/password do not work for URL (`#4833 <https://github.com/pypa/pip/issues/4833>`_)
- Handle ``requests.exceptions.RetryError`` raised in ``PackageFinder`` that was causing pip to fail silently when some indexes were unreachable. (`#5270 <https://github.com/pypa/pip/issues/5270>`_, `#5483 <https://github.com/pypa/pip/issues/5483>`_)
- Handle a broken stdout pipe more gracefully (e.g. when running ``pip list | head``). (`#4170 <https://github.com/pypa/pip/issues/4170>`_)
- Fix crash from setting ``PIP_NO_CACHE_DIR=yes``. (`#5385 <https://github.com/pypa/pip/issues/5385>`_)
- Fix crash from unparseable requirements when checking installed packages. (`#5839 <https://github.com/pypa/pip/issues/5839>`_)
- Fix content type detection if a directory named like an archive is used as a package source. (`#5838 <https://github.com/pypa/pip/issues/5838>`_)
- Fix listing of outdated packages that are not dependencies of installed packages in ``pip list --outdated --not-required`` (`#5737 <https://github.com/pypa/pip/issues/5737>`_)
- Fix sorting ``TypeError`` in ``move_wheel_files()`` when installing some packages. (`#5868 <https://github.com/pypa/pip/issues/5868>`_)
- Fix support for invoking pip using ``python src/pip ...``. (`#5841 <https://github.com/pypa/pip/issues/5841>`_)
- Greatly reduce memory usage when installing wheels containing large files. (`#5848 <https://github.com/pypa/pip/issues/5848>`_)
- Editable non-VCS installs now freeze as editable. (`#5031 <https://github.com/pypa/pip/issues/5031>`_)
- Editable Git installs without a remote now freeze as editable. (`#4759 <https://github.com/pypa/pip/issues/4759>`_)
- Canonicalize sdist file names so they can be matched to a canonicalized package name passed to ``pip install``. (`#5870 <https://github.com/pypa/pip/issues/5870>`_)
- Properly decode special characters in SVN URL credentials. (`#5968 <https://github.com/pypa/pip/issues/5968>`_)
- Make ``PIP_NO_CACHE_DIR`` disable the cache also for truthy values like ``"true"``, ``"yes"``, ``"1"``, etc. (`#5735 <https://github.com/pypa/pip/issues/5735>`_)
Vendored Libraries
------------------
- Include license text of vendored 3rd party libraries. (`#5213 <https://github.com/pypa/pip/issues/5213>`_)
- Update certifi to 2018.11.29
- Update colorama to 0.4.1
- Update distlib to 0.2.8
- Update idna to 2.8
- Update packaging to 19.0
- Update pep517 to 0.5.0
- Update pkg_resources to 40.6.3 (via setuptools)
- Update pyparsing to 2.3.1
- Update pytoml to 0.1.20
- Update requests to 2.21.0
- Update six to 1.12.0
- Update urllib3 to 1.24.1
Improved Documentation
----------------------
- Include the Vendoring Policy in the documentation. (`#5958 <https://github.com/pypa/pip/issues/5958>`_)
- Add instructions for running pip from source to Development documentation. (`#5949 <https://github.com/pypa/pip/issues/5949>`_)
- Remove references to removed ``#egg=<name>-<version>`` functionality (`#5888 <https://github.com/pypa/pip/issues/5888>`_)
- Fix omission of command name in HTML usage documentation (`#5984 <https://github.com/pypa/pip/issues/5984>`_)
18.1 (2018-10-05) 18.1 (2018-10-05)
================= =================

View file

@ -1 +0,0 @@
Include the package name in a freeze warning if the package is not installed.

View file

@ -1 +0,0 @@
Avoids creating excessively long temporary paths when uninstalling packages.

View file

@ -1 +0,0 @@
Redact the password from the URL in various log messages.

View file

@ -1 +0,0 @@
give 401 warning if username/password do not work for URL

View file

@ -1,3 +0,0 @@
Implement manylinux2010 platform tag support. manylinux2010 is the successor
to manylinux1. It allows carefully compiled binary wheels to be installed
on compatible Linux platforms.

View file

@ -1 +0,0 @@
Editable, non-VCS installs now freeze as editable.

View file

@ -1 +0,0 @@
Invalid requirement no longer causes stack trace to be printed.

View file

@ -1 +0,0 @@
Pip now includes license text of 3rd party libraries.

View file

@ -1,2 +0,0 @@
Handle `requests.exceptions.RetryError` raised in `PackageFinder` that was
causing pip to fail silently when some indexes were unreachable.

View file

@ -1 +0,0 @@
Setting ``PIP_NO_CACHE_DIR=yes`` no longer causes pip to crash.

View file

@ -1,2 +0,0 @@
Handle `requests.exceptions.RetryError` raised in `PackageFinder` that was
causing pip to fail silently when some indexes were unreachable.

View file

@ -1 +0,0 @@
- Improve PEP 518 build isolation: handle .pth files, so namespace packages are correctly supported under Python 3.2 and earlier.

View file

@ -1,2 +0,0 @@
Make ``PIP_NO_CACHE_DIR`` disable the cache also for truthy values like
``"true"``, ``"yes"``, ``"1"``, etc.

View file

@ -1 +0,0 @@
Implement PEP 517 (allow projects to specify a build backend via pyproject.toml).

View file

@ -1 +0,0 @@
A warning message is emitted when dropping an ``--[extra-]index-url`` value that points to an existing local directory.

View file

@ -1 +0,0 @@
Fix content type detection if a directory named like an archive is used as a package source.

View file

@ -1 +0,0 @@
Fix crashes from unparseable requirements when checking installed packages.

View file

@ -1 +0,0 @@
Fix support for invoking pip using `python src/pip ...`.

View file

@ -1 +0,0 @@
Greatly reduce memory usage when installing wheels containing large files.

View file

@ -1,2 +0,0 @@
Remove the deprecated SVN editable detection based on dependency links
during freeze.

View file

@ -1 +0,0 @@
Fix sorting `TypeError` in `move_wheel_files()` when installing some packages.

View file

@ -1 +0,0 @@
Canonicalize sdist file names so they can be matched to a canonicalized package name passed to ``pip install``.

View file

@ -1 +0,0 @@
Remove references to removed #egg=<name>-<version> functionality

View file

@ -1 +0,0 @@
Adds instructions for running pip from source to Development documentation.

View file

@ -1 +0,0 @@
Include the Vendoring Policy in the documentation.

View file

@ -1,2 +0,0 @@
Adds hyperlinks to User IRC and Dev IRC in README.

View file

@ -1 +0,0 @@
Percent-decode special characters in SVN URL credentials.

View file

@ -1 +0,0 @@
Add command information in usage document for pip cmd

1
news/6165.bugfix Normal file
View file

@ -0,0 +1 @@
Allow ``RECORD`` lines with more than three elements, and display a warning.

View file

@ -1 +0,0 @@

View file

@ -1 +1 @@
__version__ = "19.0.dev0" __version__ = "19.1.dev0"

View file

@ -189,8 +189,6 @@ class BuildEnvironment(object):
args.extend(['--trusted-host', host]) args.extend(['--trusted-host', host])
if finder.allow_all_prereleases: if finder.allow_all_prereleases:
args.append('--pre') args.append('--pre')
if finder.process_dependency_links:
args.append('--process-dependency-links')
args.append('--') args.append('--')
args.extend(requirements) args.extend(requirements)
with open_spinner(message) as spinner: with open_spinner(message) as spinner:

View file

@ -1,11 +1,12 @@
"""Base Command class, and related routines""" """Base Command class, and related routines"""
from __future__ import absolute_import from __future__ import absolute_import, print_function
import logging import logging
import logging.config import logging.config
import optparse import optparse
import os import os
import sys import sys
import traceback
from pip._internal.cli import cmdoptions from pip._internal.cli import cmdoptions
from pip._internal.cli.parser import ( from pip._internal.cli.parser import (
@ -26,8 +27,11 @@ from pip._internal.req.constructors import (
install_req_from_editable, install_req_from_line, install_req_from_editable, install_req_from_line,
) )
from pip._internal.req.req_file import parse_requirements from pip._internal.req.req_file import parse_requirements
from pip._internal.utils.logging import setup_logging from pip._internal.utils.deprecation import deprecated
from pip._internal.utils.misc import get_prog, normalize_path from pip._internal.utils.logging import BrokenStdoutLoggingError, setup_logging
from pip._internal.utils.misc import (
get_prog, normalize_path, redact_password_from_url,
)
from pip._internal.utils.outdated import pip_version_check from pip._internal.utils.outdated import pip_version_check
from pip._internal.utils.typing import MYPY_CHECK_RUNNING from pip._internal.utils.typing import MYPY_CHECK_RUNNING
@ -126,12 +130,30 @@ class Command(object):
# Set verbosity so that it can be used elsewhere. # Set verbosity so that it can be used elsewhere.
self.verbosity = options.verbose - options.quiet self.verbosity = options.verbose - options.quiet
setup_logging( level_number = setup_logging(
verbosity=self.verbosity, verbosity=self.verbosity,
no_color=options.no_color, no_color=options.no_color,
user_log_file=options.log, user_log_file=options.log,
) )
if sys.version_info[:2] == (3, 4):
deprecated(
"Python 3.4 support has been deprecated. pip 19.1 will be the "
"last one supporting it. Please upgrade your Python as Python "
"3.4 won't be maintained after March 2019 (cf PEP 429).",
replacement=None,
gone_in='19.2',
)
elif sys.version_info[:2] == (2, 7):
deprecated(
"Python 2.7 will reach the end of its life on January 1st, "
"2020. Please upgrade your Python as Python 2.7 won't be "
"maintained after that date. A future version of pip will "
"drop support for Python 2.7.",
replacement=None,
gone_in=None,
)
# TODO: Try to get these passing down from the command? # TODO: Try to get these passing down from the command?
# without resorting to os.environ to hold these. # without resorting to os.environ to hold these.
# This also affects isolated builds and it should. # This also affects isolated builds and it should.
@ -170,6 +192,14 @@ class Command(object):
logger.critical('ERROR: %s', exc) logger.critical('ERROR: %s', exc)
logger.debug('Exception information:', exc_info=True) logger.debug('Exception information:', exc_info=True)
return ERROR
except BrokenStdoutLoggingError:
# Bypass our logger and write any remaining messages to stderr
# because stdout no longer works.
print('ERROR: Pipe to stdout was broken', file=sys.stderr)
if level_number <= logging.DEBUG:
traceback.print_exc(file=sys.stderr)
return ERROR return ERROR
except KeyboardInterrupt: except KeyboardInterrupt:
logger.critical('Operation cancelled by user') logger.critical('Operation cancelled by user')
@ -287,7 +317,10 @@ class RequirementCommand(Command):
""" """
index_urls = [options.index_url] + options.extra_index_urls index_urls = [options.index_url] + options.extra_index_urls
if options.no_index: if options.no_index:
logger.debug('Ignoring indexes: %s', ','.join(index_urls)) logger.debug(
'Ignoring indexes: %s',
','.join(redact_password_from_url(url) for url in index_urls),
)
index_urls = [] index_urls = []
return PackageFinder( return PackageFinder(
@ -296,7 +329,6 @@ class RequirementCommand(Command):
index_urls=index_urls, index_urls=index_urls,
trusted_hosts=options.trusted_hosts, trusted_hosts=options.trusted_hosts,
allow_all_prereleases=options.pre, allow_all_prereleases=options.pre,
process_dependency_links=options.process_dependency_links,
session=session, session=session,
platform=platform, platform=platform,
versions=python_versions, versions=python_versions,

View file

@ -9,6 +9,7 @@ pass on state. To be consistent, all options will follow this design.
""" """
from __future__ import absolute_import from __future__ import absolute_import
import textwrap
import warnings import warnings
from distutils.util import strtobool from distutils.util import strtobool
from functools import partial from functools import partial
@ -28,6 +29,20 @@ if MYPY_CHECK_RUNNING:
from pip._internal.cli.parser import ConfigOptionParser # noqa: F401 from pip._internal.cli.parser import ConfigOptionParser # noqa: F401
def raise_option_error(parser, option, msg):
"""
Raise an option parsing error using parser.error().
Args:
parser: an OptionParser instance.
option: an Option instance.
msg: the error text.
"""
msg = '{} error: {}'.format(option, msg)
msg = textwrap.fill(' '.join(msg.split()))
parser.error(msg)
def make_option_group(group, parser): def make_option_group(group, parser):
# type: (Dict[str, Any], ConfigOptionParser) -> OptionGroup # type: (Dict[str, Any], ConfigOptionParser) -> OptionGroup
""" """
@ -347,17 +362,6 @@ def trusted_host():
) )
# Remove after 1.5
process_dependency_links = partial(
Option,
"--process-dependency-links",
dest="process_dependency_links",
action="store_true",
default=False,
help="Enable the processing of dependency links.",
) # type: Callable[..., Option]
def constraints(): def constraints():
# type: () -> Option # type: () -> Option
return Option( return Option(
@ -551,7 +555,10 @@ def no_cache_dir_callback(option, opt, value, parser):
# environment variable, like PIP_NO_CACHE_DIR=true. # environment variable, like PIP_NO_CACHE_DIR=true.
if value is not None: if value is not None:
# Then parse the string value to get argument error-checking. # Then parse the string value to get argument error-checking.
strtobool(value) try:
strtobool(value)
except ValueError as exc:
raise_option_error(parser, option=option, msg=str(exc))
# Originally, setting PIP_NO_CACHE_DIR to a value that strtobool() # Originally, setting PIP_NO_CACHE_DIR to a value that strtobool()
# converted to 0 (like "false" or "no") caused cache_dir to be disabled # converted to 0 (like "false" or "no") caused cache_dir to be disabled
@ -612,6 +619,30 @@ no_build_isolation = partial(
'if this option is used.' 'if this option is used.'
) # type: Callable[..., Option] ) # type: Callable[..., Option]
def no_use_pep517_callback(option, opt, value, parser):
"""
Process a value provided for the --no-use-pep517 option.
This is an optparse.Option callback for the no_use_pep517 option.
"""
# Since --no-use-pep517 doesn't accept arguments, the value argument
# will be None if --no-use-pep517 is passed via the command-line.
# However, the value can be non-None if the option is triggered e.g.
# by an environment variable, for example "PIP_NO_USE_PEP517=true".
if value is not None:
msg = """A value was passed for --no-use-pep517,
probably using either the PIP_NO_USE_PEP517 environment variable
or the "no-use-pep517" config file option. Use an appropriate value
of the PIP_USE_PEP517 environment variable or the "use-pep517"
config file option instead.
"""
raise_option_error(parser, option=option, msg=msg)
# Otherwise, --no-use-pep517 was passed via the command-line.
parser.values.use_pep517 = False
use_pep517 = partial( use_pep517 = partial(
Option, Option,
'--use-pep517', '--use-pep517',
@ -626,7 +657,8 @@ no_use_pep517 = partial(
Option, Option,
'--no-use-pep517', '--no-use-pep517',
dest='use_pep517', dest='use_pep517',
action='store_false', action='callback',
callback=no_use_pep517_callback,
default=None, default=None,
help=SUPPRESS_HELP help=SUPPRESS_HELP
) # type: Any ) # type: Any
@ -773,6 +805,5 @@ index_group = {
extra_index_url, extra_index_url,
no_index, no_index,
find_links, find_links,
process_dependency_links,
] ]
} # type: Dict[str, Any] } # type: Dict[str, Any]

View file

@ -118,7 +118,6 @@ class ListCommand(Command):
index_urls=index_urls, index_urls=index_urls,
allow_all_prereleases=options.pre, allow_all_prereleases=options.pre,
trusted_hosts=options.trusted_hosts, trusted_hosts=options.trusted_hosts,
process_dependency_links=options.process_dependency_links,
session=session, session=session,
) )
@ -134,14 +133,18 @@ class ListCommand(Command):
include_editables=options.include_editable, include_editables=options.include_editable,
) )
# get_not_required must be called firstly in order to find and
# filter out all dependencies correctly. Otherwise a package
# can't be identified as requirement because some parent packages
# could be filtered out before.
if options.not_required:
packages = self.get_not_required(packages, options)
if options.outdated: if options.outdated:
packages = self.get_outdated(packages, options) packages = self.get_outdated(packages, options)
elif options.uptodate: elif options.uptodate:
packages = self.get_uptodate(packages, options) packages = self.get_uptodate(packages, options)
if options.not_required:
packages = self.get_not_required(packages, options)
self.output_package_listing(packages, options) self.output_package_listing(packages, options)
def get_outdated(self, packages, options): def get_outdated(self, packages, options):
@ -168,16 +171,8 @@ class ListCommand(Command):
logger.debug('Ignoring indexes: %s', ','.join(index_urls)) logger.debug('Ignoring indexes: %s', ','.join(index_urls))
index_urls = [] index_urls = []
dependency_links = []
for dist in packages:
if dist.has_metadata('dependency_links.txt'):
dependency_links.extend(
dist.get_metadata_lines('dependency_links.txt'),
)
with self._build_session(options) as session: with self._build_session(options) as session:
finder = self._build_package_finder(options, index_urls, session) finder = self._build_package_finder(options, index_urls, session)
finder.add_dependency_links(dependency_links)
for dist in packages: for dist in packages:
typ = 'unknown' typ = 'unknown'

View file

@ -31,7 +31,6 @@ from pip._internal.models.index import PyPI
from pip._internal.models.link import Link from pip._internal.models.link import Link
from pip._internal.pep425tags import get_supported from pip._internal.pep425tags import get_supported
from pip._internal.utils.compat import ipaddress from pip._internal.utils.compat import ipaddress
from pip._internal.utils.deprecation import deprecated
from pip._internal.utils.logging import indent_log from pip._internal.utils.logging import indent_log
from pip._internal.utils.misc import ( from pip._internal.utils.misc import (
ARCHIVE_EXTENSIONS, SUPPORTED_EXTENSIONS, WHEEL_EXTENSION, normalize_path, ARCHIVE_EXTENSIONS, SUPPORTED_EXTENSIONS, WHEEL_EXTENSION, normalize_path,
@ -268,7 +267,6 @@ class PackageFinder(object):
index_urls, # type: List[str] index_urls, # type: List[str]
allow_all_prereleases=False, # type: bool allow_all_prereleases=False, # type: bool
trusted_hosts=None, # type: Optional[Iterable[str]] trusted_hosts=None, # type: Optional[Iterable[str]]
process_dependency_links=False, # type: bool
session=None, # type: Optional[PipSession] session=None, # type: Optional[PipSession]
format_control=None, # type: Optional[FormatControl] format_control=None, # type: Optional[FormatControl]
platform=None, # type: Optional[str] platform=None, # type: Optional[str]
@ -315,7 +313,6 @@ class PackageFinder(object):
self.find_links.append(link) self.find_links.append(link)
self.index_urls = index_urls self.index_urls = index_urls
self.dependency_links = [] # type: List[str]
# These are boring links that have already been logged somehow: # These are boring links that have already been logged somehow:
self.logged_links = set() # type: Set[Link] self.logged_links = set() # type: Set[Link]
@ -331,9 +328,6 @@ class PackageFinder(object):
# Do we want to allow _all_ pre-releases? # Do we want to allow _all_ pre-releases?
self.allow_all_prereleases = allow_all_prereleases self.allow_all_prereleases = allow_all_prereleases
# Do we process dependency links?
self.process_dependency_links = process_dependency_links
# The Session we'll use to make requests # The Session we'll use to make requests
self.session = session self.session = session
@ -375,22 +369,6 @@ class PackageFinder(object):
) )
return "\n".join(lines) return "\n".join(lines)
def add_dependency_links(self, links):
# type: (Iterable[str]) -> None
# FIXME: this shouldn't be global list this, it should only
# apply to requirements of the package that specifies the
# dependency_links value
# FIXME: also, we should track comes_from (i.e., use Link)
if self.process_dependency_links:
deprecated(
"Dependency Links processing has been deprecated and will be "
"removed in a future release.",
replacement="PEP 508 URL dependencies",
gone_in="19.0",
issue=4187,
)
self.dependency_links.extend(links)
@staticmethod @staticmethod
def _sort_locations(locations, expand_dir=False): def _sort_locations(locations, expand_dir=False):
# type: (Sequence[str], bool) -> Tuple[List[str], List[str]] # type: (Sequence[str], bool) -> Tuple[List[str], List[str]]
@ -587,7 +565,7 @@ class PackageFinder(object):
# type: (str) -> List[Optional[InstallationCandidate]] # type: (str) -> List[Optional[InstallationCandidate]]
"""Find all available InstallationCandidate for project_name """Find all available InstallationCandidate for project_name
This checks index_urls, find_links and dependency_links. This checks index_urls and find_links.
All versions found are returned as an InstallationCandidate list. All versions found are returned as an InstallationCandidate list.
See _link_package_versions for details on which files are accepted See _link_package_versions for details on which files are accepted
@ -597,21 +575,18 @@ class PackageFinder(object):
fl_file_loc, fl_url_loc = self._sort_locations( fl_file_loc, fl_url_loc = self._sort_locations(
self.find_links, expand_dir=True, self.find_links, expand_dir=True,
) )
dep_file_loc, dep_url_loc = self._sort_locations(self.dependency_links)
file_locations = (Link(url) for url in itertools.chain( file_locations = (Link(url) for url in itertools.chain(
index_file_loc, fl_file_loc, dep_file_loc, index_file_loc, fl_file_loc,
)) ))
# We trust every url that the user has given us whether it was given # We trust every url that the user has given us whether it was given
# via --index-url or --find-links # via --index-url or --find-links.
# We explicitly do not trust links that came from dependency_links
# We want to filter out any thing which does not have a secure origin. # We want to filter out any thing which does not have a secure origin.
url_locations = [ url_locations = [
link for link in itertools.chain( link for link in itertools.chain(
(Link(url) for url in index_url_loc), (Link(url) for url in index_url_loc),
(Link(url) for url in fl_url_loc), (Link(url) for url in fl_url_loc),
(Link(url) for url in dep_url_loc),
) )
if self._validate_secure_origin(logger, link) if self._validate_secure_origin(logger, link)
] ]
@ -639,17 +614,6 @@ class PackageFinder(object):
self._package_versions(page.iter_links(), search) self._package_versions(page.iter_links(), search)
) )
dependency_versions = self._package_versions(
(Link(url) for url in self.dependency_links), search
)
if dependency_versions:
logger.debug(
'dependency_links found: %s',
', '.join([
version.location.url for version in dependency_versions
])
)
file_versions = self._package_versions(file_locations, search) file_versions = self._package_versions(file_locations, search)
if file_versions: if file_versions:
file_versions.sort(reverse=True) file_versions.sort(reverse=True)
@ -662,10 +626,7 @@ class PackageFinder(object):
) )
# This is an intentional priority ordering # This is an intentional priority ordering
return ( return file_versions + find_links_versions + page_versions
file_versions + find_links_versions + page_versions +
dependency_versions
)
def find_requirement(self, req, upgrade): def find_requirement(self, req, upgrade):
# type: (InstallRequirement, bool) -> Optional[Link] # type: (InstallRequirement, bool) -> Optional[Link]

View file

@ -120,9 +120,6 @@ def check_install_conflicts(to_install):
) )
# NOTE from @pradyunsg
# This required a minor update in dependency link handling logic over at
# operations.prepare.IsSDist.dist() to get it working
def _simulate_installation_of(to_install, package_set): def _simulate_installation_of(to_install, package_set):
# type: (List[InstallRequirement], PackageSet) -> Set[str] # type: (List[InstallRequirement], PackageSet) -> Set[str]
"""Computes the version of packages after installing to_install. """Computes the version of packages after installing to_install.
@ -133,7 +130,7 @@ def _simulate_installation_of(to_install, package_set):
# Modify it as installing requirement_set would (assuming no errors) # Modify it as installing requirement_set would (assuming no errors)
for inst_req in to_install: for inst_req in to_install:
dist = make_abstract_dist(inst_req).dist(finder=None) dist = make_abstract_dist(inst_req).dist()
name = canonicalize_name(dist.key) name = canonicalize_name(dist.key)
package_set[name] = PackageDetails(dist.version, dist.requires()) package_set[name] = PackageDetails(dist.version, dist.requires())

View file

@ -172,7 +172,7 @@ def get_requirement_info(dist):
location = os.path.normcase(os.path.abspath(dist.location)) location = os.path.normcase(os.path.abspath(dist.location))
from pip._internal.vcs import vcs from pip._internal.vcs import vcs, RemoteNotFoundError
vc_type = vcs.get_backend_type(location) vc_type = vcs.get_backend_type(location)
if not vc_type: if not vc_type:
@ -182,12 +182,21 @@ def get_requirement_info(dist):
location, location,
) )
comments = [ comments = [
'# Editable, no version control detected ({})'.format(req) '# Editable install with no version control ({})'.format(req)
] ]
return (location, True, comments) return (location, True, comments)
try: try:
req = vc_type().get_src_requirement(location, dist.project_name) req = vc_type.get_src_requirement(location, dist.project_name)
except RemoteNotFoundError:
req = dist.as_requirement()
comments = [
'# Editable {} install with no remote ({})'.format(
vc_type.__name__, req,
)
]
return (location, True, comments)
except BadCommand: except BadCommand:
logger.warning( logger.warning(
'cannot determine version of editable source in %s ' 'cannot determine version of editable source in %s '

View file

@ -71,21 +71,21 @@ class DistAbstraction(object):
# type: (InstallRequirement) -> None # type: (InstallRequirement) -> None
self.req = req # type: InstallRequirement self.req = req # type: InstallRequirement
def dist(self, finder): def dist(self):
# type: (PackageFinder) -> Any # type: () -> Any
"""Return a setuptools Dist object.""" """Return a setuptools Dist object."""
raise NotImplementedError(self.dist) raise NotImplementedError
def prep_for_dist(self, finder, build_isolation): def prep_for_dist(self, finder, build_isolation):
# type: (PackageFinder, bool) -> Any # type: (PackageFinder, bool) -> Any
"""Ensure that we can get a Dist for this requirement.""" """Ensure that we can get a Dist for this requirement."""
raise NotImplementedError(self.dist) raise NotImplementedError
class IsWheel(DistAbstraction): class IsWheel(DistAbstraction):
def dist(self, finder): def dist(self):
# type: (PackageFinder) -> pkg_resources.Distribution # type: () -> pkg_resources.Distribution
return list(pkg_resources.find_distributions( return list(pkg_resources.find_distributions(
self.req.source_dir))[0] self.req.source_dir))[0]
@ -97,15 +97,8 @@ class IsWheel(DistAbstraction):
class IsSDist(DistAbstraction): class IsSDist(DistAbstraction):
def dist(self, finder): def dist(self):
# type: (PackageFinder) -> pkg_resources.Distribution return self.req.get_dist()
dist = self.req.get_dist()
# FIXME: shouldn't be globally added.
if finder and dist.has_metadata('dependency_links.txt'):
finder.add_dependency_links(
dist.get_metadata_lines('dependency_links.txt')
)
return dist
def prep_for_dist(self, finder, build_isolation): def prep_for_dist(self, finder, build_isolation):
# type: (PackageFinder, bool) -> None # type: (PackageFinder, bool) -> None
@ -168,8 +161,8 @@ class IsSDist(DistAbstraction):
class Installed(DistAbstraction): class Installed(DistAbstraction):
def dist(self, finder): def dist(self):
# type: (PackageFinder) -> pkg_resources.Distribution # type: () -> pkg_resources.Distribution
return self.req.satisfied_by return self.req.satisfied_by
def prep_for_dist(self, finder, build_isolation): def prep_for_dist(self, finder, build_isolation):

View file

@ -57,17 +57,20 @@ def load_pyproject_toml(
build_system = None build_system = None
# The following cases must use PEP 517 # The following cases must use PEP 517
# We check for use_pep517 equalling False because that # We check for use_pep517 being non-None and falsey because that means
# means the user explicitly requested --no-use-pep517 # the user explicitly requested --no-use-pep517. The value 0 as
# opposed to False can occur when the value is provided via an
# environment variable or config file option (due to the quirk of
# strtobool() returning an integer in pip's configuration code).
if has_pyproject and not has_setup: if has_pyproject and not has_setup:
if use_pep517 is False: if use_pep517 is not None and not use_pep517:
raise InstallationError( raise InstallationError(
"Disabling PEP 517 processing is invalid: " "Disabling PEP 517 processing is invalid: "
"project does not have a setup.py" "project does not have a setup.py"
) )
use_pep517 = True use_pep517 = True
elif build_system and "build-backend" in build_system: elif build_system and "build-backend" in build_system:
if use_pep517 is False: if use_pep517 is not None and not use_pep517:
raise InstallationError( raise InstallationError(
"Disabling PEP 517 processing is invalid: " "Disabling PEP 517 processing is invalid: "
"project specifies a build backend of {} " "project specifies a build backend of {} "

View file

@ -55,7 +55,6 @@ SUPPORTED_OPTIONS = [
cmdoptions.no_binary, cmdoptions.no_binary,
cmdoptions.only_binary, cmdoptions.only_binary,
cmdoptions.pre, cmdoptions.pre,
cmdoptions.process_dependency_links,
cmdoptions.trusted_host, cmdoptions.trusted_host,
cmdoptions.require_hashes, cmdoptions.require_hashes,
] # type: List[Callable[..., optparse.Option]] ] # type: List[Callable[..., optparse.Option]]
@ -251,8 +250,6 @@ def process_line(
finder.find_links.append(value) finder.find_links.append(value)
if opts.pre: if opts.pre:
finder.allow_all_prereleases = True finder.allow_all_prereleases = True
if opts.process_dependency_links:
finder.process_dependency_links = True
if opts.trusted_hosts: if opts.trusted_hosts:
finder.secure_origins.extend( finder.secure_origins.extend(
("*", host, "*") for host in opts.trusted_hosts) ("*", host, "*") for host in opts.trusted_hosts)

View file

@ -294,7 +294,7 @@ class Resolver(object):
abstract_dist = self._get_abstract_dist_for(req_to_install) abstract_dist = self._get_abstract_dist_for(req_to_install)
# Parse and return dependencies # Parse and return dependencies
dist = abstract_dist.dist(self.finder) dist = abstract_dist.dist()
try: try:
check_dist_requires_python(dist) check_dist_requires_python(dist)
except UnsupportedPythonVersion as err: except UnsupportedPythonVersion as err:

View file

@ -1,9 +1,13 @@
from __future__ import absolute_import from __future__ import absolute_import
import contextlib import contextlib
import errno
import logging import logging
import logging.handlers import logging.handlers
import os import os
import sys
from pip._vendor.six import PY2
from pip._internal.utils.compat import WINDOWS from pip._internal.utils.compat import WINDOWS
from pip._internal.utils.misc import ensure_dir from pip._internal.utils.misc import ensure_dir
@ -26,6 +30,48 @@ _log_state = threading.local()
_log_state.indentation = 0 _log_state.indentation = 0
class BrokenStdoutLoggingError(Exception):
"""
Raised if BrokenPipeError occurs for the stdout stream while logging.
"""
pass
# BrokenPipeError does not exist in Python 2 and, in addition, manifests
# differently in Windows and non-Windows.
if WINDOWS:
# In Windows, a broken pipe can show up as EINVAL rather than EPIPE:
# https://bugs.python.org/issue19612
# https://bugs.python.org/issue30418
if PY2:
def _is_broken_pipe_error(exc_class, exc):
"""See the docstring for non-Windows Python 3 below."""
return (exc_class is IOError and
exc.errno in (errno.EINVAL, errno.EPIPE))
else:
# In Windows, a broken pipe IOError became OSError in Python 3.
def _is_broken_pipe_error(exc_class, exc):
"""See the docstring for non-Windows Python 3 below."""
return ((exc_class is BrokenPipeError) or # noqa: F821
(exc_class is OSError and
exc.errno in (errno.EINVAL, errno.EPIPE)))
elif PY2:
def _is_broken_pipe_error(exc_class, exc):
"""See the docstring for non-Windows Python 3 below."""
return (exc_class is IOError and exc.errno == errno.EPIPE)
else:
# Then we are in the non-Windows Python 3 case.
def _is_broken_pipe_error(exc_class, exc):
"""
Return whether an exception is a broken pipe error.
Args:
exc_class: an exception class.
exc: an exception instance.
"""
return (exc_class is BrokenPipeError) # noqa: F821
@contextlib.contextmanager @contextlib.contextmanager
def indent_log(num=2): def indent_log(num=2):
""" """
@ -44,15 +90,28 @@ def get_indentation():
class IndentingFormatter(logging.Formatter): class IndentingFormatter(logging.Formatter):
def __init__(self, *args, **kwargs):
"""
A logging.Formatter obeying containing indent_log contexts.
:param add_timestamp: A bool indicating output lines should be prefixed
with their record's timestamp.
"""
self.add_timestamp = kwargs.pop("add_timestamp", False)
super(IndentingFormatter, self).__init__(*args, **kwargs)
def format(self, record): def format(self, record):
""" """
Calls the standard formatter, but will indent all of the log messages Calls the standard formatter, but will indent all of the log messages
by our current indentation level. by our current indentation level.
""" """
formatted = logging.Formatter.format(self, record) formatted = super(IndentingFormatter, self).format(record)
prefix = ''
if self.add_timestamp:
prefix = self.formatTime(record, "%Y-%m-%dT%H:%M:%S ")
prefix += " " * get_indentation()
formatted = "".join([ formatted = "".join([
(" " * get_indentation()) + line prefix + line
for line in formatted.splitlines(True) for line in formatted.splitlines(True)
]) ])
return formatted return formatted
@ -83,6 +142,16 @@ class ColorizedStreamHandler(logging.StreamHandler):
if WINDOWS and colorama: if WINDOWS and colorama:
self.stream = colorama.AnsiToWin32(self.stream) self.stream = colorama.AnsiToWin32(self.stream)
def _using_stdout(self):
"""
Return whether the handler is using sys.stdout.
"""
if WINDOWS and colorama:
# Then self.stream is an AnsiToWin32 object.
return self.stream.wrapped is sys.stdout
return self.stream is sys.stdout
def should_color(self): def should_color(self):
# Don't colorize things if we do not have colorama or if told not to # Don't colorize things if we do not have colorama or if told not to
if not colorama or self._no_color: if not colorama or self._no_color:
@ -115,6 +184,19 @@ class ColorizedStreamHandler(logging.StreamHandler):
return msg return msg
# The logging module says handleError() can be customized.
def handleError(self, record):
exc_class, exc = sys.exc_info()[:2]
# If a broken pipe occurred while calling write() or flush() on the
# stdout stream in logging's Handler.emit(), then raise our special
# exception so we can handle it in main() instead of logging the
# broken pipe error and continuing.
if (exc_class and self._using_stdout() and
_is_broken_pipe_error(exc_class, exc)):
raise BrokenStdoutLoggingError()
return super(ColorizedStreamHandler, self).handleError(record)
class BetterRotatingFileHandler(logging.handlers.RotatingFileHandler): class BetterRotatingFileHandler(logging.handlers.RotatingFileHandler):
@ -134,6 +216,8 @@ class MaxLevelFilter(logging.Filter):
def setup_logging(verbosity, no_color, user_log_file): def setup_logging(verbosity, no_color, user_log_file):
"""Configures and sets up all of the logging """Configures and sets up all of the logging
Returns the requested logging level, as its integer value.
""" """
# Determine the level to be logging at. # Determine the level to be logging at.
@ -148,6 +232,8 @@ def setup_logging(verbosity, no_color, user_log_file):
else: else:
level = "INFO" level = "INFO"
level_number = getattr(logging, level)
# The "root" logger should match the "console" level *unless* we also need # The "root" logger should match the "console" level *unless* we also need
# to log to a user log file. # to log to a user log file.
include_user_log = user_log_file is not None include_user_log = user_log_file is not None
@ -186,6 +272,11 @@ def setup_logging(verbosity, no_color, user_log_file):
"()": IndentingFormatter, "()": IndentingFormatter,
"format": "%(message)s", "format": "%(message)s",
}, },
"indent_with_timestamp": {
"()": IndentingFormatter,
"format": "%(message)s",
"add_timestamp": True,
},
}, },
"handlers": { "handlers": {
"console": { "console": {
@ -208,7 +299,7 @@ def setup_logging(verbosity, no_color, user_log_file):
"class": handler_classes["file"], "class": handler_classes["file"],
"filename": additional_log_file, "filename": additional_log_file,
"delay": True, "delay": True,
"formatter": "indent", "formatter": "indent_with_timestamp",
}, },
}, },
"root": { "root": {
@ -223,3 +314,5 @@ def setup_logging(verbosity, no_color, user_log_file):
} }
}, },
}) })
return level_number

View file

@ -653,6 +653,7 @@ def call_subprocess(
show_stdout=True, # type: bool show_stdout=True, # type: bool
cwd=None, # type: Optional[str] cwd=None, # type: Optional[str]
on_returncode='raise', # type: str on_returncode='raise', # type: str
extra_ok_returncodes=None, # type: Optional[Iterable[int]]
command_desc=None, # type: Optional[str] command_desc=None, # type: Optional[str]
extra_environ=None, # type: Optional[Mapping[str, Any]] extra_environ=None, # type: Optional[Mapping[str, Any]]
unset_environ=None, # type: Optional[Iterable[str]] unset_environ=None, # type: Optional[Iterable[str]]
@ -661,9 +662,13 @@ def call_subprocess(
# type: (...) -> Optional[Text] # type: (...) -> Optional[Text]
""" """
Args: Args:
extra_ok_returncodes: an iterable of integer return codes that are
acceptable, in addition to 0. Defaults to None, which means [].
unset_environ: an iterable of environment variable names to unset unset_environ: an iterable of environment variable names to unset
prior to calling subprocess.Popen(). prior to calling subprocess.Popen().
""" """
if extra_ok_returncodes is None:
extra_ok_returncodes = []
if unset_environ is None: if unset_environ is None:
unset_environ = [] unset_environ = []
# This function's handling of subprocess output is confusing and I # This function's handling of subprocess output is confusing and I
@ -740,7 +745,7 @@ def call_subprocess(
spinner.finish("error") spinner.finish("error")
else: else:
spinner.finish("done") spinner.finish("done")
if proc.returncode: if proc.returncode and proc.returncode not in extra_ok_returncodes:
if on_returncode == 'raise': if on_returncode == 'raise':
if (logger.getEffectiveLevel() > std_logging.DEBUG and if (logger.getEffectiveLevel() > std_logging.DEBUG and
not show_stdout): not show_stdout):
@ -859,6 +864,13 @@ def captured_stdout():
return captured_output('stdout') return captured_output('stdout')
def captured_stderr():
"""
See captured_stdout().
"""
return captured_output('stderr')
class cached_property(object): class cached_property(object):
"""A property that is only computed once per instance and then replaces """A property that is only computed once per instance and then replaces
itself with an ordinary attribute. Deleting the attribute resets the itself with an ordinary attribute. Deleting the attribute resets the

View file

@ -127,7 +127,6 @@ def pip_version_check(session, options):
index_urls=[options.index_url] + options.extra_index_urls, index_urls=[options.index_url] + options.extra_index_urls,
allow_all_prereleases=False, # Explicitly set to False allow_all_prereleases=False, # Explicitly set to False
trusted_hosts=options.trusted_hosts, trusted_hosts=options.trusted_hosts,
process_dependency_links=options.process_dependency_links,
session=session, session=session,
) )
all_candidates = finder.find_all_candidates("pip") all_candidates = finder.find_all_candidates("pip")

View file

@ -17,7 +17,7 @@ from pip._internal.utils.typing import MYPY_CHECK_RUNNING
if MYPY_CHECK_RUNNING: if MYPY_CHECK_RUNNING:
from typing import ( # noqa: F401 from typing import ( # noqa: F401
Dict, Optional, Tuple, List, Type, Any, Mapping, Text Any, Dict, Iterable, List, Mapping, Optional, Text, Tuple, Type
) )
from pip._internal.utils.ui import SpinnerInterface # noqa: F401 from pip._internal.utils.ui import SpinnerInterface # noqa: F401
@ -29,6 +29,10 @@ __all__ = ['vcs']
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
class RemoteNotFoundError(Exception):
pass
class RevOptions(object): class RevOptions(object):
""" """
@ -206,7 +210,8 @@ class VersionControl(object):
""" """
return RevOptions(self, rev, extra_args=extra_args) return RevOptions(self, rev, extra_args=extra_args)
def _is_local_repository(self, repo): @classmethod
def _is_local_repository(cls, repo):
# type: (str) -> bool # type: (str) -> bool
""" """
posix absolute paths start with os.path.sep, posix absolute paths start with os.path.sep,
@ -440,7 +445,8 @@ class VersionControl(object):
rmtree(location) rmtree(location)
self.obtain(location) self.obtain(location)
def get_src_requirement(self, location, project_name): @classmethod
def get_src_requirement(cls, location, project_name):
""" """
Return a string representing the requirement needed to Return a string representing the requirement needed to
redownload the files currently present in location, something redownload the files currently present in location, something
@ -449,24 +455,31 @@ class VersionControl(object):
""" """
raise NotImplementedError raise NotImplementedError
def get_remote_url(self, location): @classmethod
def get_remote_url(cls, location):
""" """
Return the url used at location Return the url used at location
Raises RemoteNotFoundError if the repository does not have a remote
url configured.
""" """
raise NotImplementedError raise NotImplementedError
def get_revision(self, location): @classmethod
def get_revision(cls, location):
""" """
Return the current commit id of the files at the given location. Return the current commit id of the files at the given location.
""" """
raise NotImplementedError raise NotImplementedError
@classmethod
def run_command( def run_command(
self, cls,
cmd, # type: List[str] cmd, # type: List[str]
show_stdout=True, # type: bool show_stdout=True, # type: bool
cwd=None, # type: Optional[str] cwd=None, # type: Optional[str]
on_returncode='raise', # type: str on_returncode='raise', # type: str
extra_ok_returncodes=None, # type: Optional[Iterable[int]]
command_desc=None, # type: Optional[str] command_desc=None, # type: Optional[str]
extra_environ=None, # type: Optional[Mapping[str, Any]] extra_environ=None, # type: Optional[Mapping[str, Any]]
spinner=None # type: Optional[SpinnerInterface] spinner=None # type: Optional[SpinnerInterface]
@ -477,12 +490,14 @@ class VersionControl(object):
This is simply a wrapper around call_subprocess that adds the VCS This is simply a wrapper around call_subprocess that adds the VCS
command name, and checks that the VCS is available command name, and checks that the VCS is available
""" """
cmd = [self.name] + cmd cmd = [cls.name] + cmd
try: try:
return call_subprocess(cmd, show_stdout, cwd, return call_subprocess(cmd, show_stdout, cwd,
on_returncode, on_returncode=on_returncode,
command_desc, extra_environ, extra_ok_returncodes=extra_ok_returncodes,
unset_environ=self.unset_environ, command_desc=command_desc,
extra_environ=extra_environ,
unset_environ=cls.unset_environ,
spinner=spinner) spinner=spinner)
except OSError as e: except OSError as e:
# errno.ENOENT = no such file or directory # errno.ENOENT = no such file or directory
@ -491,7 +506,7 @@ class VersionControl(object):
raise BadCommand( raise BadCommand(
'Cannot find command %r - do you have ' 'Cannot find command %r - do you have '
'%r installed and in your ' '%r installed and in your '
'PATH?' % (self.name, self.name)) 'PATH?' % (cls.name, cls.name))
else: else:
raise # re-raise exception if a different error occurred raise # re-raise exception if a different error occurred

View file

@ -75,32 +75,35 @@ class Bazaar(VersionControl):
url = 'bzr+' + url url = 'bzr+' + url
return url, rev, user_pass return url, rev, user_pass
def get_remote_url(self, location): @classmethod
urls = self.run_command(['info'], show_stdout=False, cwd=location) def get_remote_url(cls, location):
urls = cls.run_command(['info'], show_stdout=False, cwd=location)
for line in urls.splitlines(): for line in urls.splitlines():
line = line.strip() line = line.strip()
for x in ('checkout of branch: ', for x in ('checkout of branch: ',
'parent branch: '): 'parent branch: '):
if line.startswith(x): if line.startswith(x):
repo = line.split(x)[1] repo = line.split(x)[1]
if self._is_local_repository(repo): if cls._is_local_repository(repo):
return path_to_url(repo) return path_to_url(repo)
return repo return repo
return None return None
def get_revision(self, location): @classmethod
revision = self.run_command( def get_revision(cls, location):
revision = cls.run_command(
['revno'], show_stdout=False, cwd=location, ['revno'], show_stdout=False, cwd=location,
) )
return revision.splitlines()[-1] return revision.splitlines()[-1]
def get_src_requirement(self, location, project_name): @classmethod
repo = self.get_remote_url(location) def get_src_requirement(cls, location, project_name):
repo = cls.get_remote_url(location)
if not repo: if not repo:
return None return None
if not repo.lower().startswith('bzr:'): if not repo.lower().startswith('bzr:'):
repo = 'bzr+' + repo repo = 'bzr+' + repo
current_rev = self.get_revision(location) current_rev = cls.get_revision(location)
return make_vcs_requirement_url(repo, current_rev, project_name) return make_vcs_requirement_url(repo, current_rev, project_name)
def is_commit_id_equal(self, dest, name): def is_commit_id_equal(self, dest, name):

View file

@ -14,7 +14,7 @@ from pip._internal.utils.misc import (
display_path, make_vcs_requirement_url, redact_password_from_url, display_path, make_vcs_requirement_url, redact_password_from_url,
) )
from pip._internal.utils.temp_dir import TempDirectory from pip._internal.utils.temp_dir import TempDirectory
from pip._internal.vcs import VersionControl, vcs from pip._internal.vcs import RemoteNotFoundError, VersionControl, vcs
urlsplit = urllib_parse.urlsplit urlsplit = urllib_parse.urlsplit
urlunsplit = urllib_parse.urlunsplit urlunsplit = urllib_parse.urlunsplit
@ -79,19 +79,25 @@ class Git(VersionControl):
version = '.'.join(version.split('.')[:3]) version = '.'.join(version.split('.')[:3])
return parse_version(version) return parse_version(version)
def get_branch(self, location): def get_current_branch(self, location):
""" """
Return the current branch, or None if HEAD isn't at a branch Return the current branch, or None if HEAD isn't at a branch
(e.g. detached HEAD). (e.g. detached HEAD).
""" """
args = ['rev-parse', '--abbrev-ref', 'HEAD'] # git-symbolic-ref exits with empty stdout if "HEAD" is a detached
output = self.run_command(args, show_stdout=False, cwd=location) # HEAD rather than a symbolic ref. In addition, the -q causes the
branch = output.strip() # command to exit with status code 1 instead of 128 in this case
# and to suppress the message to stderr.
args = ['symbolic-ref', '-q', 'HEAD']
output = self.run_command(
args, extra_ok_returncodes=(1, ), show_stdout=False, cwd=location,
)
ref = output.strip()
if branch == 'HEAD': if ref.startswith('refs/heads/'):
return None return ref[len('refs/heads/'):]
return branch return None
def export(self, location): def export(self, location):
"""Export the Git repository at the url to the destination location""" """Export the Git repository at the url to the destination location"""
@ -210,7 +216,7 @@ class Git(VersionControl):
if not self.is_commit_id_equal(dest, rev_options.rev): if not self.is_commit_id_equal(dest, rev_options.rev):
cmd_args = ['checkout', '-q'] + rev_options.to_args() cmd_args = ['checkout', '-q'] + rev_options.to_args()
self.run_command(cmd_args, cwd=dest) self.run_command(cmd_args, cwd=dest)
elif self.get_branch(dest) != branch_name: elif self.get_current_branch(dest) != branch_name:
# Then a specific branch was requested, and that branch # Then a specific branch was requested, and that branch
# is not yet checked out. # is not yet checked out.
track_branch = 'origin/{}'.format(branch_name) track_branch = 'origin/{}'.format(branch_name)
@ -243,14 +249,26 @@ class Git(VersionControl):
#: update submodules #: update submodules
self.update_submodules(dest) self.update_submodules(dest)
def get_remote_url(self, location): @classmethod
"""Return URL of the first remote encountered.""" def get_remote_url(cls, location):
remotes = self.run_command( """
Return URL of the first remote encountered.
Raises RemoteNotFoundError if the repository does not have a remote
url configured.
"""
# We need to pass 1 for extra_ok_returncodes since the command
# exits with return code 1 if there are no matching lines.
stdout = cls.run_command(
['config', '--get-regexp', r'remote\..*\.url'], ['config', '--get-regexp', r'remote\..*\.url'],
show_stdout=False, cwd=location, extra_ok_returncodes=(1, ), show_stdout=False, cwd=location,
) )
remotes = remotes.splitlines() remotes = stdout.splitlines()
found_remote = remotes[0] try:
found_remote = remotes[0]
except IndexError:
raise RemoteNotFoundError
for remote in remotes: for remote in remotes:
if remote.startswith('remote.origin.url '): if remote.startswith('remote.origin.url '):
found_remote = remote found_remote = remote
@ -258,19 +276,21 @@ class Git(VersionControl):
url = found_remote.split(' ')[1] url = found_remote.split(' ')[1]
return url.strip() return url.strip()
def get_revision(self, location, rev=None): @classmethod
def get_revision(cls, location, rev=None):
if rev is None: if rev is None:
rev = 'HEAD' rev = 'HEAD'
current_rev = self.run_command( current_rev = cls.run_command(
['rev-parse', rev], show_stdout=False, cwd=location, ['rev-parse', rev], show_stdout=False, cwd=location,
) )
return current_rev.strip() return current_rev.strip()
def _get_subdirectory(self, location): @classmethod
def _get_subdirectory(cls, location):
"""Return the relative path of setup.py to the git repo root.""" """Return the relative path of setup.py to the git repo root."""
# find the repo root # find the repo root
git_dir = self.run_command(['rev-parse', '--git-dir'], git_dir = cls.run_command(['rev-parse', '--git-dir'],
show_stdout=False, cwd=location).strip() show_stdout=False, cwd=location).strip()
if not os.path.isabs(git_dir): if not os.path.isabs(git_dir):
git_dir = os.path.join(location, git_dir) git_dir = os.path.join(location, git_dir)
root_dir = os.path.join(git_dir, '..') root_dir = os.path.join(git_dir, '..')
@ -293,12 +313,13 @@ class Git(VersionControl):
return None return None
return os.path.relpath(location, root_dir) return os.path.relpath(location, root_dir)
def get_src_requirement(self, location, project_name): @classmethod
repo = self.get_remote_url(location) def get_src_requirement(cls, location, project_name):
repo = cls.get_remote_url(location)
if not repo.lower().startswith('git:'): if not repo.lower().startswith('git:'):
repo = 'git+' + repo repo = 'git+' + repo
current_rev = self.get_revision(location) current_rev = cls.get_revision(location)
subdir = self._get_subdirectory(location) subdir = cls._get_subdirectory(location)
req = make_vcs_requirement_url(repo, current_rev, project_name, req = make_vcs_requirement_url(repo, current_rev, project_name,
subdir=subdir) subdir=subdir)
@ -334,10 +355,10 @@ class Git(VersionControl):
if super(Git, cls).controls_location(location): if super(Git, cls).controls_location(location):
return True return True
try: try:
r = cls().run_command(['rev-parse'], r = cls.run_command(['rev-parse'],
cwd=location, cwd=location,
show_stdout=False, show_stdout=False,
on_returncode='ignore') on_returncode='ignore')
return not r return not r
except BadCommand: except BadCommand:
logger.debug("could not determine if %s is under git control " logger.debug("could not determine if %s is under git control "

View file

@ -64,31 +64,35 @@ class Mercurial(VersionControl):
cmd_args = ['update', '-q'] + rev_options.to_args() cmd_args = ['update', '-q'] + rev_options.to_args()
self.run_command(cmd_args, cwd=dest) self.run_command(cmd_args, cwd=dest)
def get_remote_url(self, location): @classmethod
url = self.run_command( def get_remote_url(cls, location):
url = cls.run_command(
['showconfig', 'paths.default'], ['showconfig', 'paths.default'],
show_stdout=False, cwd=location).strip() show_stdout=False, cwd=location).strip()
if self._is_local_repository(url): if cls._is_local_repository(url):
url = path_to_url(url) url = path_to_url(url)
return url.strip() return url.strip()
def get_revision(self, location): @classmethod
current_revision = self.run_command( def get_revision(cls, location):
current_revision = cls.run_command(
['parents', '--template={rev}'], ['parents', '--template={rev}'],
show_stdout=False, cwd=location).strip() show_stdout=False, cwd=location).strip()
return current_revision return current_revision
def get_revision_hash(self, location): @classmethod
current_rev_hash = self.run_command( def get_revision_hash(cls, location):
current_rev_hash = cls.run_command(
['parents', '--template={node}'], ['parents', '--template={node}'],
show_stdout=False, cwd=location).strip() show_stdout=False, cwd=location).strip()
return current_rev_hash return current_rev_hash
def get_src_requirement(self, location, project_name): @classmethod
repo = self.get_remote_url(location) def get_src_requirement(cls, location, project_name):
repo = cls.get_remote_url(location)
if not repo.lower().startswith('hg:'): if not repo.lower().startswith('hg:'):
repo = 'hg+' + repo repo = 'hg+' + repo
current_rev_hash = self.get_revision_hash(location) current_rev_hash = cls.get_revision_hash(location)
return make_vcs_requirement_url(repo, current_rev_hash, project_name) return make_vcs_requirement_url(repo, current_rev_hash, project_name)
def is_commit_id_equal(self, dest, name): def is_commit_id_equal(self, dest, name):

View file

@ -4,7 +4,6 @@ import logging
import os import os
import re import re
from pip._internal.models.link import Link
from pip._internal.utils.logging import indent_log from pip._internal.utils.logging import indent_log
from pip._internal.utils.misc import ( from pip._internal.utils.misc import (
display_path, make_vcs_requirement_url, rmtree, split_auth_from_netloc, display_path, make_vcs_requirement_url, rmtree, split_auth_from_netloc,
@ -61,21 +60,8 @@ class Subversion(VersionControl):
cmd_args = ['update'] + rev_options.to_args() + [dest] cmd_args = ['update'] + rev_options.to_args() + [dest]
self.run_command(cmd_args) self.run_command(cmd_args)
def get_location(self, dist, dependency_links): @classmethod
for url in dependency_links: def get_revision(cls, location):
egg_fragment = Link(url).egg_fragment
if not egg_fragment:
continue
if '-' in egg_fragment:
# FIXME: will this work when a package has - in the name?
key = '-'.join(egg_fragment.split('-')[:-1]).lower()
else:
key = egg_fragment
if key == dist.key:
return url.split('#', 1)[0]
return None
def get_revision(self, location):
""" """
Return the maximum revision for all files under a given location Return the maximum revision for all files under a given location
""" """
@ -83,16 +69,16 @@ class Subversion(VersionControl):
revision = 0 revision = 0
for base, dirs, files in os.walk(location): for base, dirs, files in os.walk(location):
if self.dirname not in dirs: if cls.dirname not in dirs:
dirs[:] = [] dirs[:] = []
continue # no sense walking uncontrolled subdirs continue # no sense walking uncontrolled subdirs
dirs.remove(self.dirname) dirs.remove(cls.dirname)
entries_fn = os.path.join(base, self.dirname, 'entries') entries_fn = os.path.join(base, cls.dirname, 'entries')
if not os.path.exists(entries_fn): if not os.path.exists(entries_fn):
# FIXME: should we warn? # FIXME: should we warn?
continue continue
dirurl, localrev = self._get_svn_url_rev(base) dirurl, localrev = cls._get_svn_url_rev(base)
if base == location: if base == location:
base = dirurl + '/' # save the root url base = dirurl + '/' # save the root url
@ -131,7 +117,8 @@ class Subversion(VersionControl):
return extra_args return extra_args
def get_remote_url(self, location): @classmethod
def get_remote_url(cls, location):
# In cases where the source is in a subdirectory, not alongside # In cases where the source is in a subdirectory, not alongside
# setup.py we have to look up in the location until we find a real # setup.py we have to look up in the location until we find a real
# setup.py # setup.py
@ -149,12 +136,13 @@ class Subversion(VersionControl):
) )
return None return None
return self._get_svn_url_rev(location)[0] return cls._get_svn_url_rev(location)[0]
def _get_svn_url_rev(self, location): @classmethod
def _get_svn_url_rev(cls, location):
from pip._internal.exceptions import InstallationError from pip._internal.exceptions import InstallationError
entries_path = os.path.join(location, self.dirname, 'entries') entries_path = os.path.join(location, cls.dirname, 'entries')
if os.path.exists(entries_path): if os.path.exists(entries_path):
with open(entries_path) as f: with open(entries_path) as f:
data = f.read() data = f.read()
@ -177,7 +165,7 @@ class Subversion(VersionControl):
else: else:
try: try:
# subversion >= 1.7 # subversion >= 1.7
xml = self.run_command( xml = cls.run_command(
['info', '--xml', location], ['info', '--xml', location],
show_stdout=False, show_stdout=False,
) )
@ -195,12 +183,13 @@ class Subversion(VersionControl):
return url, rev return url, rev
def get_src_requirement(self, location, project_name): @classmethod
repo = self.get_remote_url(location) def get_src_requirement(cls, location, project_name):
repo = cls.get_remote_url(location)
if repo is None: if repo is None:
return None return None
repo = 'svn+' + repo repo = 'svn+' + repo
rev = self.get_revision(location) rev = cls.get_revision(location)
return make_vcs_requirement_url(repo, rev, project_name) return make_vcs_requirement_url(repo, rev, project_name)
def is_commit_id_equal(self, dest, name): def is_commit_id_equal(self, dest, name):

View file

@ -55,7 +55,7 @@ if MYPY_CHECK_RUNNING:
from pip._internal.cache import WheelCache # noqa: F401 from pip._internal.cache import WheelCache # noqa: F401
from pip._internal.pep425tags import Pep425Tag # noqa: F401 from pip._internal.pep425tags import Pep425Tag # noqa: F401
InstalledCSVRow = Tuple[str, Union[str, Text], str] InstalledCSVRow = Tuple[str, ...]
VERSION_COMPATIBLE = (1, 0) VERSION_COMPATIBLE = (1, 0)
@ -64,6 +64,10 @@ VERSION_COMPATIBLE = (1, 0)
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
def normpath(src, p):
return os.path.relpath(src, p).replace(os.path.sep, '/')
def rehash(path, blocksize=1 << 20): def rehash(path, blocksize=1 << 20):
# type: (str, int) -> Tuple[str, str] # type: (str, int) -> Tuple[str, str]
"""Return (hash, length) for path using hashlib.sha256()""" """Return (hash, length) for path using hashlib.sha256()"""
@ -255,6 +259,35 @@ def sorted_outrows(outrows):
return sorted(outrows, key=lambda row: tuple(str(x) for x in row)) return sorted(outrows, key=lambda row: tuple(str(x) for x in row))
def get_csv_rows_for_installed(
old_csv_rows, # type: Iterable[List[str]]
installed, # type: Dict[str, str]
changed, # type: set
generated, # type: List[str]
lib_dir, # type: str
):
# type: (...) -> List[InstalledCSVRow]
installed_rows = [] # type: List[InstalledCSVRow]
for row in old_csv_rows:
if len(row) > 3:
logger.warning(
'RECORD line has more than three elements: {}'.format(row)
)
fpath = row[0]
fpath = installed.pop(fpath, fpath)
if fpath in changed:
digest, length = rehash(fpath)
row[1] = digest
row[2] = length
installed_rows.append(tuple(row))
for f in generated:
digest, length = rehash(f)
installed_rows.append((normpath(f, lib_dir), digest, str(length)))
for f in installed:
installed_rows.append((installed[f], '', ''))
return installed_rows
def move_wheel_files( def move_wheel_files(
name, # type: str name, # type: str
req, # type: Requirement req, # type: Requirement
@ -305,9 +338,6 @@ def move_wheel_files(
compileall.compile_dir(source, force=True, quiet=True) compileall.compile_dir(source, force=True, quiet=True)
logger.debug(stdout.getvalue()) logger.debug(stdout.getvalue())
def normpath(src, p):
return os.path.relpath(src, p).replace(os.path.sep, '/')
def record_installed(srcfile, destfile, modified=False): def record_installed(srcfile, destfile, modified=False):
"""Map archive RECORD paths to installation RECORD paths.""" """Map archive RECORD paths to installation RECORD paths."""
oldpath = normpath(srcfile, wheeldir) oldpath = normpath(srcfile, wheeldir)
@ -559,28 +589,16 @@ if __name__ == '__main__':
shutil.move(temp_installer, installer) shutil.move(temp_installer, installer)
generated.append(installer) generated.append(installer)
def get_csv_rows_for_installed(old_csv_rows):
# type: (Iterable[List[str]]) -> List[InstalledCSVRow]
installed_rows = [] # type: List[InstalledCSVRow]
for fpath, digest, length in old_csv_rows:
fpath = installed.pop(fpath, fpath)
if fpath in changed:
digest, length = rehash(fpath)
installed_rows.append((fpath, digest, str(length)))
for f in generated:
digest, length = rehash(f)
installed_rows.append((normpath(f, lib_dir), digest, str(length)))
for f in installed:
installed_rows.append((installed[f], '', ''))
return installed_rows
# Record details of all files installed # Record details of all files installed
record = os.path.join(info_dir[0], 'RECORD') record = os.path.join(info_dir[0], 'RECORD')
temp_record = os.path.join(info_dir[0], 'RECORD.pip') temp_record = os.path.join(info_dir[0], 'RECORD.pip')
with open_for_csv(record, 'r') as record_in: with open_for_csv(record, 'r') as record_in:
with open_for_csv(temp_record, 'w+') as record_out: with open_for_csv(temp_record, 'w+') as record_out:
reader = csv.reader(record_in) reader = csv.reader(record_in)
outrows = get_csv_rows_for_installed(reader) outrows = get_csv_rows_for_installed(
reader, installed=installed, changed=changed,
generated=generated, lib_dir=lib_dir,
)
writer = csv.writer(record_out) writer = csv.writer(record_out)
# Sort to simplify testing. # Sort to simplify testing.
for row in sorted_outrows(outrows): for row in sorted_outrows(outrows):
@ -840,12 +858,6 @@ class WheelBuilder(object):
newly built wheel, in preparation for installation. newly built wheel, in preparation for installation.
:return: True if all the wheels built correctly. :return: True if all the wheels built correctly.
""" """
# TODO: This check fails if --no-cache-dir is set. And yet we
# might be able to build into the ephemeral cache, surely?
building_is_possible = self._wheel_dir or (
autobuilding and self.wheel_cache.cache_dir
)
assert building_is_possible
buildset = [] buildset = []
format_control = self.finder.format_control format_control = self.finder.format_control
@ -884,6 +896,13 @@ class WheelBuilder(object):
if not buildset: if not buildset:
return [] return []
# Is any wheel build not using the ephemeral cache?
if any(not ephem_cache for _, ephem_cache in buildset):
have_directory_for_build = self._wheel_dir or (
autobuilding and self.wheel_cache.cache_dir
)
assert have_directory_for_build
# TODO by @pradyunsg # TODO by @pradyunsg
# Should break up this method into 2 separate methods. # Should break up this method into 2 separate methods.

View file

@ -20,6 +20,8 @@ Vendoring Policy
``pip/_vendor/README.rst`` and their corresponding patches **MUST** be ``pip/_vendor/README.rst`` and their corresponding patches **MUST** be
included ``tasks/vendoring/patches``. included ``tasks/vendoring/patches``.
* Vendored libraries should have corresponding ``vendored()`` entries in
``pip/_vendor/__init__.py``.
Rationale Rationale
--------- ---------

View file

@ -1,3 +1,3 @@
from .core import where, old_where from .core import where
__version__ = "2018.08.24" __version__ = "2018.11.29"

View file

@ -326,36 +326,6 @@ OCiNUW7dFGdTbHFcJoRNdVq2fmBWqU2t+5sel/MN2dKXVHfaPRK34B7vCAas+YWH
QMAJKOSLakhT2+zNVVXxxvjpoixMptEmX36vWkzaH6byHCx+rgIW0lbQL1dTR+iS QMAJKOSLakhT2+zNVVXxxvjpoixMptEmX36vWkzaH6byHCx+rgIW0lbQL1dTR+iS
-----END CERTIFICATE----- -----END CERTIFICATE-----
# Issuer: CN=Visa eCommerce Root O=VISA OU=Visa International Service Association
# Subject: CN=Visa eCommerce Root O=VISA OU=Visa International Service Association
# Label: "Visa eCommerce Root"
# Serial: 25952180776285836048024890241505565794
# MD5 Fingerprint: fc:11:b8:d8:08:93:30:00:6d:23:f9:7e:eb:52:1e:02
# SHA1 Fingerprint: 70:17:9b:86:8c:00:a4:fa:60:91:52:22:3f:9f:3e:32:bd:e0:05:62
# SHA256 Fingerprint: 69:fa:c9:bd:55:fb:0a:c7:8d:53:bb:ee:5c:f1:d5:97:98:9f:d0:aa:ab:20:a2:51:51:bd:f1:73:3e:e7:d1:22
-----BEGIN CERTIFICATE-----
MIIDojCCAoqgAwIBAgIQE4Y1TR0/BvLB+WUF1ZAcYjANBgkqhkiG9w0BAQUFADBr
MQswCQYDVQQGEwJVUzENMAsGA1UEChMEVklTQTEvMC0GA1UECxMmVmlzYSBJbnRl
cm5hdGlvbmFsIFNlcnZpY2UgQXNzb2NpYXRpb24xHDAaBgNVBAMTE1Zpc2EgZUNv
bW1lcmNlIFJvb3QwHhcNMDIwNjI2MDIxODM2WhcNMjIwNjI0MDAxNjEyWjBrMQsw
CQYDVQQGEwJVUzENMAsGA1UEChMEVklTQTEvMC0GA1UECxMmVmlzYSBJbnRlcm5h
dGlvbmFsIFNlcnZpY2UgQXNzb2NpYXRpb24xHDAaBgNVBAMTE1Zpc2EgZUNvbW1l
cmNlIFJvb3QwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvV95WHm6h
2mCxlCfLF9sHP4CFT8icttD0b0/Pmdjh28JIXDqsOTPHH2qLJj0rNfVIsZHBAk4E
lpF7sDPwsRROEW+1QK8bRaVK7362rPKgH1g/EkZgPI2h4H3PVz4zHvtH8aoVlwdV
ZqW1LS7YgFmypw23RuwhY/81q6UCzyr0TP579ZRdhE2o8mCP2w4lPJ9zcc+U30rq
299yOIzzlr3xF7zSujtFWsan9sYXiwGd/BmoKoMWuDpI/k4+oKsGGelT84ATB+0t
vz8KPFUgOSwsAGl0lUq8ILKpeeUYiZGo3BxN77t+Nwtd/jmliFKMAGzsGHxBvfaL
dXe6YJ2E5/4tAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD
AgEGMB0GA1UdDgQWBBQVOIMPPyw/cDMezUb+B4wg4NfDtzANBgkqhkiG9w0BAQUF
AAOCAQEAX/FBfXxcCLkr4NWSR/pnXKUTwwMhmytMiUbPWU3J/qVAtmPN3XEolWcR
zCSs00Rsca4BIGsDoo8Ytyk6feUWYFN4PMCvFYP3j1IzJL1kk5fui/fbGKhtcbP3
LBfQdCVp9/5rPJS+TUtBjE7ic9DjkCJzQ83z7+pzzkWKsKZJ/0x9nXGIxHYdkFsd
7v3M9+79YKWxehZx0RbQfBI8bGmX265fOZpwLwU8GUYEmSA20GBuYQa7FkKMcPcw
++DbZqMAAb3mLNqRX6BGi01qnD093QVG/na/oAo85ADmJ7f/hC3euiInlhBx6yLt
398znM/jra6O1I7mT1GvFpLgXPYHDw==
-----END CERTIFICATE-----
# Issuer: CN=AAA Certificate Services O=Comodo CA Limited # Issuer: CN=AAA Certificate Services O=Comodo CA Limited
# Subject: CN=AAA Certificate Services O=Comodo CA Limited # Subject: CN=AAA Certificate Services O=Comodo CA Limited
# Label: "Comodo AAA Services root" # Label: "Comodo AAA Services root"
@ -4298,3 +4268,245 @@ rYy0UGYwEAYJKwYBBAGCNxUBBAMCAQAwCgYIKoZIzj0EAwMDaAAwZQIwJsdpW9zV
57LnyAyMjMPdeYwbY9XJUpROTYJKcx6ygISpJcBMWm1JKWB4E+J+SOtkAjEA2zQg 57LnyAyMjMPdeYwbY9XJUpROTYJKcx6ygISpJcBMWm1JKWB4E+J+SOtkAjEA2zQg
Mgj/mkkCtojeFK9dbJlxjRo/i9fgojaGHAeCOnZT/cKi7e97sIBPWA9LUzm9 Mgj/mkkCtojeFK9dbJlxjRo/i9fgojaGHAeCOnZT/cKi7e97sIBPWA9LUzm9
-----END CERTIFICATE----- -----END CERTIFICATE-----
# Issuer: CN=GTS Root R1 O=Google Trust Services LLC
# Subject: CN=GTS Root R1 O=Google Trust Services LLC
# Label: "GTS Root R1"
# Serial: 146587175971765017618439757810265552097
# MD5 Fingerprint: 82:1a:ef:d4:d2:4a:f2:9f:e2:3d:97:06:14:70:72:85
# SHA1 Fingerprint: e1:c9:50:e6:ef:22:f8:4c:56:45:72:8b:92:20:60:d7:d5:a7:a3:e8
# SHA256 Fingerprint: 2a:57:54:71:e3:13:40:bc:21:58:1c:bd:2c:f1:3e:15:84:63:20:3e:ce:94:bc:f9:d3:cc:19:6b:f0:9a:54:72
-----BEGIN CERTIFICATE-----
MIIFWjCCA0KgAwIBAgIQbkepxUtHDA3sM9CJuRz04TANBgkqhkiG9w0BAQwFADBH
MQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExM
QzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIy
MDAwMDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNl
cnZpY2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwggIiMA0GCSqGSIb3DQEB
AQUAA4ICDwAwggIKAoICAQC2EQKLHuOhd5s73L+UPreVp0A8of2C+X0yBoJx9vaM
f/vo27xqLpeXo4xL+Sv2sfnOhB2x+cWX3u+58qPpvBKJXqeqUqv4IyfLpLGcY9vX
mX7wCl7raKb0xlpHDU0QM+NOsROjyBhsS+z8CZDfnWQpJSMHobTSPS5g4M/SCYe7
zUjwTcLCeoiKu7rPWRnWr4+wB7CeMfGCwcDfLqZtbBkOtdh+JhpFAz2weaSUKK0P
fyblqAj+lug8aJRT7oM6iCsVlgmy4HqMLnXWnOunVmSPlk9orj2XwoSPwLxAwAtc
vfaHszVsrBhQf4TgTM2S0yDpM7xSma8ytSmzJSq0SPly4cpk9+aCEI3oncKKiPo4
Zor8Y/kB+Xj9e1x3+naH+uzfsQ55lVe0vSbv1gHR6xYKu44LtcXFilWr06zqkUsp
zBmkMiVOKvFlRNACzqrOSbTqn3yDsEB750Orp2yjj32JgfpMpf/VjsPOS+C12LOO
Rc92wO1AK/1TD7Cn1TsNsYqiA94xrcx36m97PtbfkSIS5r762DL8EGMUUXLeXdYW
k70paDPvOmbsB4om3xPXV2V4J95eSRQAogB/mqghtqmxlbCluQ0WEdrHbEg8QOB+
DVrNVjzRlwW5y0vtOUucxD/SVRNuJLDWcfr0wbrM7Rv1/oFB2ACYPTrIrnqYNxgF
lQIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV
HQ4EFgQU5K8rJnEaK0gnhS9SZizv8IkTcT4wDQYJKoZIhvcNAQEMBQADggIBADiW
Cu49tJYeX++dnAsznyvgyv3SjgofQXSlfKqE1OXyHuY3UjKcC9FhHb8owbZEKTV1
d5iyfNm9dKyKaOOpMQkpAWBz40d8U6iQSifvS9efk+eCNs6aaAyC58/UEBZvXw6Z
XPYfcX3v73svfuo21pdwCxXu11xWajOl40k4DLh9+42FpLFZXvRq4d2h9mREruZR
gyFmxhE+885H7pwoHyXa/6xmld01D1zvICxi/ZG6qcz8WpyTgYMpl0p8WnK0OdC3
d8t5/Wk6kjftbjhlRn7pYL15iJdfOBL07q9bgsiG1eGZbYwE8na6SfZu6W0eX6Dv
J4J2QPim01hcDyxC2kLGe4g0x8HYRZvBPsVhHdljUEn2NIVq4BjFbkerQUIpm/Zg
DdIx02OYI5NaAIFItO/Nis3Jz5nu2Z6qNuFoS3FJFDYoOj0dzpqPJeaAcWErtXvM
+SUWgeExX6GjfhaknBZqlxi9dnKlC54dNuYvoS++cJEPqOba+MSSQGwlfnuzCdyy
F62ARPBopY+Udf90WuioAnwMCeKpSwughQtiue+hMZL77/ZRBIls6Kl0obsXs7X9
SQ98POyDGCBDTtWTurQ0sR8WNh8M5mQ5Fkzc4P4dyKliPUDqysU0ArSuiYgzNdws
E3PYJ/HQcu51OyLemGhmW/HGY0dVHLqlCFF1pkgl
-----END CERTIFICATE-----
# Issuer: CN=GTS Root R2 O=Google Trust Services LLC
# Subject: CN=GTS Root R2 O=Google Trust Services LLC
# Label: "GTS Root R2"
# Serial: 146587176055767053814479386953112547951
# MD5 Fingerprint: 44:ed:9a:0e:a4:09:3b:00:f2:ae:4c:a3:c6:61:b0:8b
# SHA1 Fingerprint: d2:73:96:2a:2a:5e:39:9f:73:3f:e1:c7:1e:64:3f:03:38:34:fc:4d
# SHA256 Fingerprint: c4:5d:7b:b0:8e:6d:67:e6:2e:42:35:11:0b:56:4e:5f:78:fd:92:ef:05:8c:84:0a:ea:4e:64:55:d7:58:5c:60
-----BEGIN CERTIFICATE-----
MIIFWjCCA0KgAwIBAgIQbkepxlqz5yDFMJo/aFLybzANBgkqhkiG9w0BAQwFADBH
MQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExM
QzEUMBIGA1UEAxMLR1RTIFJvb3QgUjIwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIy
MDAwMDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNl
cnZpY2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjIwggIiMA0GCSqGSIb3DQEB
AQUAA4ICDwAwggIKAoICAQDO3v2m++zsFDQ8BwZabFn3GTXd98GdVarTzTukk3Lv
CvptnfbwhYBboUhSnznFt+4orO/LdmgUud+tAWyZH8QiHZ/+cnfgLFuv5AS/T3Kg
GjSY6Dlo7JUle3ah5mm5hRm9iYz+re026nO8/4Piy33B0s5Ks40FnotJk9/BW9Bu
XvAuMC6C/Pq8tBcKSOWIm8Wba96wyrQD8Nr0kLhlZPdcTK3ofmZemde4wj7I0BOd
re7kRXuJVfeKH2JShBKzwkCX44ofR5GmdFrS+LFjKBC4swm4VndAoiaYecb+3yXu
PuWgf9RhD1FLPD+M2uFwdNjCaKH5wQzpoeJ/u1U8dgbuak7MkogwTZq9TwtImoS1
mKPV+3PBV2HdKFZ1E66HjucMUQkQdYhMvI35ezzUIkgfKtzra7tEscszcTJGr61K
8YzodDqs5xoic4DSMPclQsciOzsSrZYuxsN2B6ogtzVJV+mSSeh2FnIxZyuWfoqj
x5RWIr9qS34BIbIjMt/kmkRtWVtd9QCgHJvGeJeNkP+byKq0rxFROV7Z+2et1VsR
nTKaG73VululycslaVNVJ1zgyjbLiGH7HrfQy+4W+9OmTN6SpdTi3/UGVN4unUu0
kzCqgc7dGtxRcw1PcOnlthYhGXmy5okLdWTK1au8CcEYof/UVKGFPP0UJAOyh9Ok
twIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV
HQ4EFgQUu//KjiOfT5nK2+JopqUVJxce2Q4wDQYJKoZIhvcNAQEMBQADggIBALZp
8KZ3/p7uC4Gt4cCpx/k1HUCCq+YEtN/L9x0Pg/B+E02NjO7jMyLDOfxA325BS0JT
vhaI8dI4XsRomRyYUpOM52jtG2pzegVATX9lO9ZY8c6DR2Dj/5epnGB3GFW1fgiT
z9D2PGcDFWEJ+YF59exTpJ/JjwGLc8R3dtyDovUMSRqodt6Sm2T4syzFJ9MHwAiA
pJiS4wGWAqoC7o87xdFtCjMwc3i5T1QWvwsHoaRc5svJXISPD+AVdyx+Jn7axEvb
pxZ3B7DNdehyQtaVhJ2Gg/LkkM0JR9SLA3DaWsYDQvTtN6LwG1BUSw7YhN4ZKJmB
R64JGz9I0cNv4rBgF/XuIwKl2gBbbZCr7qLpGzvpx0QnRY5rn/WkhLx3+WuXrD5R
RaIRpsyF7gpo8j5QOHokYh4XIDdtak23CZvJ/KRY9bb7nE4Yu5UC56GtmwfuNmsk
0jmGwZODUNKBRqhfYlcsu2xkiAhu7xNUX90txGdj08+JN7+dIPT7eoOboB6BAFDC
5AwiWVIQ7UNWhwD4FFKnHYuTjKJNRn8nxnGbJN7k2oaLDX5rIMHAnuFl2GqjpuiF
izoHCBy69Y9Vmhh1fuXsgWbRIXOhNUQLgD1bnF5vKheW0YMjiGZt5obicDIvUiLn
yOd/xCxgXS/Dr55FBcOEArf9LAhST4Ldo/DUhgkC
-----END CERTIFICATE-----
# Issuer: CN=GTS Root R3 O=Google Trust Services LLC
# Subject: CN=GTS Root R3 O=Google Trust Services LLC
# Label: "GTS Root R3"
# Serial: 146587176140553309517047991083707763997
# MD5 Fingerprint: 1a:79:5b:6b:04:52:9c:5d:c7:74:33:1b:25:9a:f9:25
# SHA1 Fingerprint: 30:d4:24:6f:07:ff:db:91:89:8a:0b:e9:49:66:11:eb:8c:5e:46:e5
# SHA256 Fingerprint: 15:d5:b8:77:46:19:ea:7d:54:ce:1c:a6:d0:b0:c4:03:e0:37:a9:17:f1:31:e8:a0:4e:1e:6b:7a:71:ba:bc:e5
-----BEGIN CERTIFICATE-----
MIICDDCCAZGgAwIBAgIQbkepx2ypcyRAiQ8DVd2NHTAKBggqhkjOPQQDAzBHMQsw
CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU
MBIGA1UEAxMLR1RTIFJvb3QgUjMwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw
MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp
Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjMwdjAQBgcqhkjOPQIBBgUrgQQA
IgNiAAQfTzOHMymKoYTey8chWEGJ6ladK0uFxh1MJ7x/JlFyb+Kf1qPKzEUURout
736GjOyxfi//qXGdGIRFBEFVbivqJn+7kAHjSxm65FSWRQmx1WyRRK2EE46ajA2A
DDL24CejQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud
DgQWBBTB8Sa6oC2uhYHP0/EqEr24Cmf9vDAKBggqhkjOPQQDAwNpADBmAjEAgFuk
fCPAlaUs3L6JbyO5o91lAFJekazInXJ0glMLfalAvWhgxeG4VDvBNhcl2MG9AjEA
njWSdIUlUfUk7GRSJFClH9voy8l27OyCbvWFGFPouOOaKaqW04MjyaR7YbPMAuhd
-----END CERTIFICATE-----
# Issuer: CN=GTS Root R4 O=Google Trust Services LLC
# Subject: CN=GTS Root R4 O=Google Trust Services LLC
# Label: "GTS Root R4"
# Serial: 146587176229350439916519468929765261721
# MD5 Fingerprint: 5d:b6:6a:c4:60:17:24:6a:1a:99:a8:4b:ee:5e:b4:26
# SHA1 Fingerprint: 2a:1d:60:27:d9:4a:b1:0a:1c:4d:91:5c:cd:33:a0:cb:3e:2d:54:cb
# SHA256 Fingerprint: 71:cc:a5:39:1f:9e:79:4b:04:80:25:30:b3:63:e1:21:da:8a:30:43:bb:26:66:2f:ea:4d:ca:7f:c9:51:a4:bd
-----BEGIN CERTIFICATE-----
MIICCjCCAZGgAwIBAgIQbkepyIuUtui7OyrYorLBmTAKBggqhkjOPQQDAzBHMQsw
CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU
MBIGA1UEAxMLR1RTIFJvb3QgUjQwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw
MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp
Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjQwdjAQBgcqhkjOPQIBBgUrgQQA
IgNiAATzdHOnaItgrkO4NcWBMHtLSZ37wWHO5t5GvWvVYRg1rkDdc/eJkTBa6zzu
hXyiQHY7qca4R9gq55KRanPpsXI5nymfopjTX15YhmUPoYRlBtHci8nHc8iMai/l
xKvRHYqjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud
DgQWBBSATNbrdP9JNqPV2Py1PsVq8JQdjDAKBggqhkjOPQQDAwNnADBkAjBqUFJ0
CMRw3J5QdCHojXohw0+WbhXRIjVhLfoIN+4Zba3bssx9BzT1YBkstTTZbyACMANx
sbqjYAuG7ZoIapVon+Kz4ZNkfF6Tpt95LY2F45TPI11xzPKwTdb+mciUqXWi4w==
-----END CERTIFICATE-----
# Issuer: CN=UCA Global G2 Root O=UniTrust
# Subject: CN=UCA Global G2 Root O=UniTrust
# Label: "UCA Global G2 Root"
# Serial: 124779693093741543919145257850076631279
# MD5 Fingerprint: 80:fe:f0:c4:4a:f0:5c:62:32:9f:1c:ba:78:a9:50:f8
# SHA1 Fingerprint: 28:f9:78:16:19:7a:ff:18:25:18:aa:44:fe:c1:a0:ce:5c:b6:4c:8a
# SHA256 Fingerprint: 9b:ea:11:c9:76:fe:01:47:64:c1:be:56:a6:f9:14:b5:a5:60:31:7a:bd:99:88:39:33:82:e5:16:1a:a0:49:3c
-----BEGIN CERTIFICATE-----
MIIFRjCCAy6gAwIBAgIQXd+x2lqj7V2+WmUgZQOQ7zANBgkqhkiG9w0BAQsFADA9
MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxGzAZBgNVBAMMElVDQSBH
bG9iYWwgRzIgUm9vdDAeFw0xNjAzMTEwMDAwMDBaFw00MDEyMzEwMDAwMDBaMD0x
CzAJBgNVBAYTAkNOMREwDwYDVQQKDAhVbmlUcnVzdDEbMBkGA1UEAwwSVUNBIEds
b2JhbCBHMiBSb290MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxeYr
b3zvJgUno4Ek2m/LAfmZmqkywiKHYUGRO8vDaBsGxUypK8FnFyIdK+35KYmToni9
kmugow2ifsqTs6bRjDXVdfkX9s9FxeV67HeToI8jrg4aA3++1NDtLnurRiNb/yzm
VHqUwCoV8MmNsHo7JOHXaOIxPAYzRrZUEaalLyJUKlgNAQLx+hVRZ2zA+te2G3/R
VogvGjqNO7uCEeBHANBSh6v7hn4PJGtAnTRnvI3HLYZveT6OqTwXS3+wmeOwcWDc
C/Vkw85DvG1xudLeJ1uK6NjGruFZfc8oLTW4lVYa8bJYS7cSN8h8s+1LgOGN+jIj
tm+3SJUIsUROhYw6AlQgL9+/V087OpAh18EmNVQg7Mc/R+zvWr9LesGtOxdQXGLY
D0tK3Cv6brxzks3sx1DoQZbXqX5t2Okdj4q1uViSukqSKwxW/YDrCPBeKW4bHAyv
j5OJrdu9o54hyokZ7N+1wxrrFv54NkzWbtA+FxyQF2smuvt6L78RHBgOLXMDj6Dl
NaBa4kx1HXHhOThTeEDMg5PXCp6dW4+K5OXgSORIskfNTip1KnvyIvbJvgmRlld6
iIis7nCs+dwp4wwcOxJORNanTrAmyPPZGpeRaOrvjUYG0lZFWJo8DA+DuAUlwznP
O6Q0ibd5Ei9Hxeepl2n8pndntd978XplFeRhVmUCAwEAAaNCMEAwDgYDVR0PAQH/
BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFIHEjMz15DD/pQwIX4wV
ZyF0Ad/fMA0GCSqGSIb3DQEBCwUAA4ICAQATZSL1jiutROTL/7lo5sOASD0Ee/oj
L3rtNtqyzm325p7lX1iPyzcyochltq44PTUbPrw7tgTQvPlJ9Zv3hcU2tsu8+Mg5
1eRfB70VVJd0ysrtT7q6ZHafgbiERUlMjW+i67HM0cOU2kTC5uLqGOiiHycFutfl
1qnN3e92mI0ADs0b+gO3joBYDic/UvuUospeZcnWhNq5NXHzJsBPd+aBJ9J3O5oU
b3n09tDh05S60FdRvScFDcH9yBIw7m+NESsIndTUv4BFFJqIRNow6rSn4+7vW4LV
PtateJLbXDzz2K36uGt/xDYotgIVilQsnLAXc47QN6MUPJiVAAwpBVueSUmxX8fj
y88nZY41F7dXyDDZQVu5FLbowg+UMaeUmMxq67XhJ/UQqAHojhJi6IjMtX9Gl8Cb
EGY4GjZGXyJoPd/JxhMnq1MGrKI8hgZlb7F+sSlEmqO6SWkoaY/X5V+tBIZkbxqg
DMUIYs6Ao9Dz7GjevjPHF1t/gMRMTLGmhIrDO7gJzRSBuhjjVFc2/tsvfEehOjPI
+Vg7RE+xygKJBJYoaMVLuCaJu9YzL1DV/pqJuhgyklTGW+Cd+V7lDSKb9triyCGy
YiGqhkCyLmTTX8jjfhFnRR8F/uOi77Oos/N9j/gMHyIfLXC0uAE0djAA5SN4p1bX
UB+K+wb1whnw0A==
-----END CERTIFICATE-----
# Issuer: CN=UCA Extended Validation Root O=UniTrust
# Subject: CN=UCA Extended Validation Root O=UniTrust
# Label: "UCA Extended Validation Root"
# Serial: 106100277556486529736699587978573607008
# MD5 Fingerprint: a1:f3:5f:43:c6:34:9b:da:bf:8c:7e:05:53:ad:96:e2
# SHA1 Fingerprint: a3:a1:b0:6f:24:61:23:4a:e3:36:a5:c2:37:fc:a6:ff:dd:f0:d7:3a
# SHA256 Fingerprint: d4:3a:f9:b3:54:73:75:5c:96:84:fc:06:d7:d8:cb:70:ee:5c:28:e7:73:fb:29:4e:b4:1e:e7:17:22:92:4d:24
-----BEGIN CERTIFICATE-----
MIIFWjCCA0KgAwIBAgIQT9Irj/VkyDOeTzRYZiNwYDANBgkqhkiG9w0BAQsFADBH
MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNVBAMMHFVDQSBF
eHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwHhcNMTUwMzEzMDAwMDAwWhcNMzgxMjMx
MDAwMDAwWjBHMQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNV
BAMMHFVDQSBFeHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwggIiMA0GCSqGSIb3DQEB
AQUAA4ICDwAwggIKAoICAQCpCQcoEwKwmeBkqh5DFnpzsZGgdT6o+uM4AHrsiWog
D4vFsJszA1qGxliG1cGFu0/GnEBNyr7uaZa4rYEwmnySBesFK5pI0Lh2PpbIILvS
sPGP2KxFRv+qZ2C0d35qHzwaUnoEPQc8hQ2E0B92CvdqFN9y4zR8V05WAT558aop
O2z6+I9tTcg1367r3CTueUWnhbYFiN6IXSV8l2RnCdm/WhUFhvMJHuxYMjMR83dk
sHYf5BA1FxvyDrFspCqjc/wJHx4yGVMR59mzLC52LqGj3n5qiAno8geK+LLNEOfi
c0CTuwjRP+H8C5SzJe98ptfRr5//lpr1kXuYC3fUfugH0mK1lTnj8/FtDw5lhIpj
VMWAtuCeS31HJqcBCF3RiJ7XwzJE+oJKCmhUfzhTA8ykADNkUVkLo4KRel7sFsLz
KuZi2irbWWIQJUoqgQtHB0MGcIfS+pMRKXpITeuUx3BNr2fVUbGAIAEBtHoIppB/
TuDvB0GHr2qlXov7z1CymlSvw4m6WC31MJixNnI5fkkE/SmnTHnkBVfblLkWU41G
sx2VYVdWf6/wFlthWG82UBEL2KwrlRYaDh8IzTY0ZRBiZtWAXxQgXy0MoHgKaNYs
1+lvK9JKBZP8nm9rZ/+I8U6laUpSNwXqxhaN0sSZ0YIrO7o1dfdRUVjzyAfd5LQD
fwIDAQABo0IwQDAdBgNVHQ4EFgQU2XQ65DA9DfcS3H5aBZ8eNJr34RQwDwYDVR0T
AQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQADggIBADaN
l8xCFWQpN5smLNb7rhVpLGsaGvdftvkHTFnq88nIua7Mui563MD1sC3AO6+fcAUR
ap8lTwEpcOPlDOHqWnzcSbvBHiqB9RZLcpHIojG5qtr8nR/zXUACE/xOHAbKsxSQ
VBcZEhrxH9cMaVr2cXj0lH2RC47skFSOvG+hTKv8dGT9cZr4QQehzZHkPJrgmzI5
c6sq1WnIeJEmMX3ixzDx/BR4dxIOE/TdFpS/S2d7cFOFyrC78zhNLJA5wA3CXWvp
4uXViI3WLL+rG761KIcSF3Ru/H38j9CHJrAb+7lsq+KePRXBOy5nAliRn+/4Qh8s
t2j1da3Ptfb/EX3C8CSlrdP6oDyp+l3cpaDvRKS+1ujl5BOWF3sGPjLtx7dCvHaj
2GU4Kzg1USEODm8uNBNA4StnDG1KQTAYI1oyVZnJF+A83vbsea0rWBmirSwiGpWO
vpaQXUJXxPkUAzUrHC1RVwinOt4/5Mi0A3PCwSaAuwtCH60NryZy2sy+s6ODWA2C
xR9GUeOcGMyNm43sSet1UNWMKFnKdDTajAshqx7qG+XH/RU+wBeq+yNuJkbL+vmx
cmtpzyKEC2IPrNkZAJSidjzULZrtBJ4tBmIQN1IchXIbJ+XMxjHsN+xjWZsLHXbM
fjKaiJUINlK73nZfdklJrX+9ZSCyycErdhh2n1ax
-----END CERTIFICATE-----
# Issuer: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036
# Subject: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036
# Label: "Certigna Root CA"
# Serial: 269714418870597844693661054334862075617
# MD5 Fingerprint: 0e:5c:30:62:27:eb:5b:bc:d7:ae:62:ba:e9:d5:df:77
# SHA1 Fingerprint: 2d:0d:52:14:ff:9e:ad:99:24:01:74:20:47:6e:6c:85:27:27:f5:43
# SHA256 Fingerprint: d4:8d:3d:23:ee:db:50:a4:59:e5:51:97:60:1c:27:77:4b:9d:7b:18:c9:4d:5a:05:95:11:a1:02:50:b9:31:68
-----BEGIN CERTIFICATE-----
MIIGWzCCBEOgAwIBAgIRAMrpG4nxVQMNo+ZBbcTjpuEwDQYJKoZIhvcNAQELBQAw
WjELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCURoaW15b3RpczEcMBoGA1UECwwTMDAw
MiA0ODE0NjMwODEwMDAzNjEZMBcGA1UEAwwQQ2VydGlnbmEgUm9vdCBDQTAeFw0x
MzEwMDEwODMyMjdaFw0zMzEwMDEwODMyMjdaMFoxCzAJBgNVBAYTAkZSMRIwEAYD
VQQKDAlEaGlteW90aXMxHDAaBgNVBAsMEzAwMDIgNDgxNDYzMDgxMDAwMzYxGTAX
BgNVBAMMEENlcnRpZ25hIFJvb3QgQ0EwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAw
ggIKAoICAQDNGDllGlmx6mQWDoyUJJV8g9PFOSbcDO8WV43X2KyjQn+Cyu3NW9sO
ty3tRQgXstmzy9YXUnIo245Onoq2C/mehJpNdt4iKVzSs9IGPjA5qXSjklYcoW9M
CiBtnyN6tMbaLOQdLNyzKNAT8kxOAkmhVECe5uUFoC2EyP+YbNDrihqECB63aCPu
I9Vwzm1RaRDuoXrC0SIxwoKF0vJVdlB8JXrJhFwLrN1CTivngqIkicuQstDuI7pm
TLtipPlTWmR7fJj6o0ieD5Wupxj0auwuA0Wv8HT4Ks16XdG+RCYyKfHx9WzMfgIh
C59vpD++nVPiz32pLHxYGpfhPTc3GGYo0kDFUYqMwy3OU4gkWGQwFsWq4NYKpkDf
ePb1BHxpE4S80dGnBs8B92jAqFe7OmGtBIyT46388NtEbVncSVmurJqZNjBBe3Yz
IoejwpKGbvlw7q6Hh5UbxHq9MfPU0uWZ/75I7HX1eBYdpnDBfzwboZL7z8g81sWT
Co/1VTp2lc5ZmIoJlXcymoO6LAQ6l73UL77XbJuiyn1tJslV1c/DeVIICZkHJC1k
JWumIWmbat10TWuXekG9qxf5kBdIjzb5LdXF2+6qhUVB+s06RbFo5jZMm5BX7CO5
hwjCxAnxl4YqKE3idMDaxIzb3+KhF1nOJFl0Mdp//TBt2dzhauH8XwIDAQABo4IB
GjCCARYwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE
FBiHVuBud+4kNTxOc5of1uHieX4rMB8GA1UdIwQYMBaAFBiHVuBud+4kNTxOc5of
1uHieX4rMEQGA1UdIAQ9MDswOQYEVR0gADAxMC8GCCsGAQUFBwIBFiNodHRwczov
L3d3d3cuY2VydGlnbmEuZnIvYXV0b3JpdGVzLzBtBgNVHR8EZjBkMC+gLaArhilo
dHRwOi8vY3JsLmNlcnRpZ25hLmZyL2NlcnRpZ25hcm9vdGNhLmNybDAxoC+gLYYr
aHR0cDovL2NybC5kaGlteW90aXMuY29tL2NlcnRpZ25hcm9vdGNhLmNybDANBgkq
hkiG9w0BAQsFAAOCAgEAlLieT/DjlQgi581oQfccVdV8AOItOoldaDgvUSILSo3L
6btdPrtcPbEo/uRTVRPPoZAbAh1fZkYJMyjhDSSXcNMQH+pkV5a7XdrnxIxPTGRG
HVyH41neQtGbqH6mid2PHMkwgu07nM3A6RngatgCdTer9zQoKJHyBApPNeNgJgH6
0BGM+RFq7q89w1DTj18zeTyGqHNFkIwgtnJzFyO+B2XleJINugHA64wcZr+shncB
lA2c5uk5jR+mUYyZDDl34bSb+hxnV29qao6pK0xXeXpXIs/NX2NGjVxZOob4Mkdi
o2cNGJHc+6Zr9UhhcyNZjgKnvETq9Emd8VRY+WCv2hikLyhF3HqgiIZd8zvn/yk1
gPxkQ5Tm4xxvvq0OKmOZK8l+hfZx6AYDlf7ej0gcWtSS6Cvu5zHbugRqh5jnxV/v
faci9wHYTfmJ0A6aBVmknpjZbyvKcL5kwlWj9Omvw5Ip3IgWJJk8jSaYtlu3zM63
Nwf9JtmYhST/WSMDmu2dnajkXjjO11INb9I/bbEFa0nOipFGc/T2L/Coc3cOZayh
jWZSaX5LaAzHHjcng6WMxwLkFM1JAbBzs/3GkDpv0mztO+7skb6iQ12LAEpmJURw
3kAP+HwV96LOPNdeE4yBFxgX0b3xdxA61GU5wSesVywlVP+i2k+KYTlerj1KjL0=
-----END CERTIFICATE-----

View file

@ -8,14 +8,6 @@ certifi.py
This module returns the installation location of cacert.pem. This module returns the installation location of cacert.pem.
""" """
import os import os
import warnings
class DeprecatedBundleWarning(DeprecationWarning):
"""
The weak security bundle is being deprecated. Please bother your service
provider to get them to stop using cross-signed roots.
"""
def where(): def where():
@ -24,14 +16,5 @@ def where():
return os.path.join(f, 'cacert.pem') return os.path.join(f, 'cacert.pem')
def old_where():
warnings.warn(
"The weak security bundle has been removed. certifi.old_where() is now an alias "
"of certifi.where(). Please update your code to use certifi.where() instead. "
"certifi.old_where() will be removed in 2018.",
DeprecatedBundleWarning
)
return where()
if __name__ == '__main__': if __name__ == '__main__':
print(where()) print(where())

View file

@ -25,4 +25,3 @@ SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

View file

@ -3,5 +3,4 @@ from .initialise import init, deinit, reinit, colorama_text
from .ansi import Fore, Back, Style, Cursor from .ansi import Fore, Back, Style, Cursor
from .ansitowin32 import AnsiToWin32 from .ansitowin32 import AnsiToWin32
__version__ = '0.3.9' __version__ = '0.4.1'

View file

@ -13,14 +13,6 @@ if windll is not None:
winterm = WinTerm() winterm = WinTerm()
def is_stream_closed(stream):
return not hasattr(stream, 'closed') or stream.closed
def is_a_tty(stream):
return hasattr(stream, 'isatty') and stream.isatty()
class StreamWrapper(object): class StreamWrapper(object):
''' '''
Wraps a stream (such as stdout), acting as a transparent proxy for all Wraps a stream (such as stdout), acting as a transparent proxy for all
@ -36,9 +28,38 @@ class StreamWrapper(object):
def __getattr__(self, name): def __getattr__(self, name):
return getattr(self.__wrapped, name) return getattr(self.__wrapped, name)
def __enter__(self, *args, **kwargs):
# special method lookup bypasses __getattr__/__getattribute__, see
# https://stackoverflow.com/questions/12632894/why-doesnt-getattr-work-with-exit
# thus, contextlib magic methods are not proxied via __getattr__
return self.__wrapped.__enter__(*args, **kwargs)
def __exit__(self, *args, **kwargs):
return self.__wrapped.__exit__(*args, **kwargs)
def write(self, text): def write(self, text):
self.__convertor.write(text) self.__convertor.write(text)
def isatty(self):
stream = self.__wrapped
if 'PYCHARM_HOSTED' in os.environ:
if stream is not None and (stream is sys.__stdout__ or stream is sys.__stderr__):
return True
try:
stream_isatty = stream.isatty
except AttributeError:
return False
else:
return stream_isatty()
@property
def closed(self):
stream = self.__wrapped
try:
return stream.closed
except AttributeError:
return True
class AnsiToWin32(object): class AnsiToWin32(object):
''' '''
@ -68,12 +89,12 @@ class AnsiToWin32(object):
# should we strip ANSI sequences from our output? # should we strip ANSI sequences from our output?
if strip is None: if strip is None:
strip = conversion_supported or (not is_stream_closed(wrapped) and not is_a_tty(wrapped)) strip = conversion_supported or (not self.stream.closed and not self.stream.isatty())
self.strip = strip self.strip = strip
# should we should convert ANSI sequences into win32 calls? # should we should convert ANSI sequences into win32 calls?
if convert is None: if convert is None:
convert = conversion_supported and not is_stream_closed(wrapped) and is_a_tty(wrapped) convert = conversion_supported and not self.stream.closed and self.stream.isatty()
self.convert = convert self.convert = convert
# dict of ansi codes to win32 functions and parameters # dict of ansi codes to win32 functions and parameters
@ -149,7 +170,7 @@ class AnsiToWin32(object):
def reset_all(self): def reset_all(self):
if self.convert: if self.convert:
self.call_win32('m', (0,)) self.call_win32('m', (0,))
elif not self.strip and not is_stream_closed(self.wrapped): elif not self.strip and not self.stream.closed:
self.wrapped.write(Style.RESET_ALL) self.wrapped.write(Style.RESET_ALL)

View file

@ -78,5 +78,3 @@ def wrap_stream(stream, convert, strip, autoreset, wrap):
if wrapper.should_wrap(): if wrapper.should_wrap():
stream = wrapper.stream stream = wrapper.stream
return stream return stream

View file

@ -89,11 +89,6 @@ else:
] ]
_SetConsoleTitleW.restype = wintypes.BOOL _SetConsoleTitleW.restype = wintypes.BOOL
handles = {
STDOUT: _GetStdHandle(STDOUT),
STDERR: _GetStdHandle(STDERR),
}
def _winapi_test(handle): def _winapi_test(handle):
csbi = CONSOLE_SCREEN_BUFFER_INFO() csbi = CONSOLE_SCREEN_BUFFER_INFO()
success = _GetConsoleScreenBufferInfo( success = _GetConsoleScreenBufferInfo(
@ -101,17 +96,18 @@ else:
return bool(success) return bool(success)
def winapi_test(): def winapi_test():
return any(_winapi_test(h) for h in handles.values()) return any(_winapi_test(h) for h in
(_GetStdHandle(STDOUT), _GetStdHandle(STDERR)))
def GetConsoleScreenBufferInfo(stream_id=STDOUT): def GetConsoleScreenBufferInfo(stream_id=STDOUT):
handle = handles[stream_id] handle = _GetStdHandle(stream_id)
csbi = CONSOLE_SCREEN_BUFFER_INFO() csbi = CONSOLE_SCREEN_BUFFER_INFO()
success = _GetConsoleScreenBufferInfo( success = _GetConsoleScreenBufferInfo(
handle, byref(csbi)) handle, byref(csbi))
return csbi return csbi
def SetConsoleTextAttribute(stream_id, attrs): def SetConsoleTextAttribute(stream_id, attrs):
handle = handles[stream_id] handle = _GetStdHandle(stream_id)
return _SetConsoleTextAttribute(handle, attrs) return _SetConsoleTextAttribute(handle, attrs)
def SetConsoleCursorPosition(stream_id, position, adjust=True): def SetConsoleCursorPosition(stream_id, position, adjust=True):
@ -129,11 +125,11 @@ else:
adjusted_position.Y += sr.Top adjusted_position.Y += sr.Top
adjusted_position.X += sr.Left adjusted_position.X += sr.Left
# Resume normal processing # Resume normal processing
handle = handles[stream_id] handle = _GetStdHandle(stream_id)
return _SetConsoleCursorPosition(handle, adjusted_position) return _SetConsoleCursorPosition(handle, adjusted_position)
def FillConsoleOutputCharacter(stream_id, char, length, start): def FillConsoleOutputCharacter(stream_id, char, length, start):
handle = handles[stream_id] handle = _GetStdHandle(stream_id)
char = c_char(char.encode()) char = c_char(char.encode())
length = wintypes.DWORD(length) length = wintypes.DWORD(length)
num_written = wintypes.DWORD(0) num_written = wintypes.DWORD(0)
@ -144,7 +140,7 @@ else:
def FillConsoleOutputAttribute(stream_id, attr, length, start): def FillConsoleOutputAttribute(stream_id, attr, length, start):
''' FillConsoleOutputAttribute( hConsole, csbi.wAttributes, dwConSize, coordScreen, &cCharsWritten )''' ''' FillConsoleOutputAttribute( hConsole, csbi.wAttributes, dwConSize, coordScreen, &cCharsWritten )'''
handle = handles[stream_id] handle = _GetStdHandle(stream_id)
attribute = wintypes.WORD(attr) attribute = wintypes.WORD(attr)
length = wintypes.DWORD(length) length = wintypes.DWORD(length)
num_written = wintypes.DWORD(0) num_written = wintypes.DWORD(0)

View file

@ -44,6 +44,7 @@ class WinTerm(object):
def reset_all(self, on_stderr=None): def reset_all(self, on_stderr=None):
self.set_attrs(self._default) self.set_attrs(self._default)
self.set_console(attrs=self._default) self.set_console(attrs=self._default)
self._light = 0
def fore(self, fore=None, light=False, on_stderr=False): def fore(self, fore=None, light=False, on_stderr=False):
if fore is None: if fore is None:
@ -122,12 +123,15 @@ class WinTerm(object):
if mode == 0: if mode == 0:
from_coord = csbi.dwCursorPosition from_coord = csbi.dwCursorPosition
cells_to_erase = cells_in_screen - cells_before_cursor cells_to_erase = cells_in_screen - cells_before_cursor
if mode == 1: elif mode == 1:
from_coord = win32.COORD(0, 0) from_coord = win32.COORD(0, 0)
cells_to_erase = cells_before_cursor cells_to_erase = cells_before_cursor
elif mode == 2: elif mode == 2:
from_coord = win32.COORD(0, 0) from_coord = win32.COORD(0, 0)
cells_to_erase = cells_in_screen cells_to_erase = cells_in_screen
else:
# invalid mode
return
# fill the entire screen with blanks # fill the entire screen with blanks
win32.FillConsoleOutputCharacter(handle, ' ', cells_to_erase, from_coord) win32.FillConsoleOutputCharacter(handle, ' ', cells_to_erase, from_coord)
# now set the buffer's attributes accordingly # now set the buffer's attributes accordingly
@ -147,12 +151,15 @@ class WinTerm(object):
if mode == 0: if mode == 0:
from_coord = csbi.dwCursorPosition from_coord = csbi.dwCursorPosition
cells_to_erase = csbi.dwSize.X - csbi.dwCursorPosition.X cells_to_erase = csbi.dwSize.X - csbi.dwCursorPosition.X
if mode == 1: elif mode == 1:
from_coord = win32.COORD(0, csbi.dwCursorPosition.Y) from_coord = win32.COORD(0, csbi.dwCursorPosition.Y)
cells_to_erase = csbi.dwCursorPosition.X cells_to_erase = csbi.dwCursorPosition.X
elif mode == 2: elif mode == 2:
from_coord = win32.COORD(0, csbi.dwCursorPosition.Y) from_coord = win32.COORD(0, csbi.dwCursorPosition.Y)
cells_to_erase = csbi.dwSize.X cells_to_erase = csbi.dwSize.X
else:
# invalid mode
return
# fill the entire screen with blanks # fill the entire screen with blanks
win32.FillConsoleOutputCharacter(handle, ' ', cells_to_erase, from_coord) win32.FillConsoleOutputCharacter(handle, ' ', cells_to_erase, from_coord)
# now set the buffer's attributes accordingly # now set the buffer's attributes accordingly

View file

@ -6,7 +6,7 @@
# #
import logging import logging
__version__ = '0.2.7' __version__ = '0.2.8'
class DistlibException(Exception): class DistlibException(Exception):
pass pass

View file

@ -20,7 +20,8 @@ import zipimport
from . import DistlibException, resources from . import DistlibException, resources
from .compat import StringIO from .compat import StringIO
from .version import get_scheme, UnsupportedVersionError from .version import get_scheme, UnsupportedVersionError
from .metadata import Metadata, METADATA_FILENAME, WHEEL_METADATA_FILENAME from .metadata import (Metadata, METADATA_FILENAME, WHEEL_METADATA_FILENAME,
LEGACY_METADATA_FILENAME)
from .util import (parse_requirement, cached_property, parse_name_and_version, from .util import (parse_requirement, cached_property, parse_name_and_version,
read_exports, write_exports, CSVReader, CSVWriter) read_exports, write_exports, CSVReader, CSVWriter)
@ -132,7 +133,9 @@ class DistributionPath(object):
if not r or r.path in seen: if not r or r.path in seen:
continue continue
if self._include_dist and entry.endswith(DISTINFO_EXT): if self._include_dist and entry.endswith(DISTINFO_EXT):
possible_filenames = [METADATA_FILENAME, WHEEL_METADATA_FILENAME] possible_filenames = [METADATA_FILENAME,
WHEEL_METADATA_FILENAME,
LEGACY_METADATA_FILENAME]
for metadata_filename in possible_filenames: for metadata_filename in possible_filenames:
metadata_path = posixpath.join(entry, metadata_filename) metadata_path = posixpath.join(entry, metadata_filename)
pydist = finder.find(metadata_path) pydist = finder.find(metadata_path)

View file

@ -255,7 +255,9 @@ class Locator(object):
if path.endswith('.whl'): if path.endswith('.whl'):
try: try:
wheel = Wheel(path) wheel = Wheel(path)
if is_compatible(wheel, self.wheel_tags): if not is_compatible(wheel, self.wheel_tags):
logger.debug('Wheel not compatible: %s', path)
else:
if project_name is None: if project_name is None:
include = True include = True
else: else:
@ -613,6 +615,7 @@ class SimpleScrapingLocator(Locator):
# as it is for coordinating our internal threads - the ones created # as it is for coordinating our internal threads - the ones created
# in _prepare_threads. # in _prepare_threads.
self._gplock = threading.RLock() self._gplock = threading.RLock()
self.platform_check = False # See issue #112
def _prepare_threads(self): def _prepare_threads(self):
""" """
@ -658,8 +661,8 @@ class SimpleScrapingLocator(Locator):
del self.result del self.result
return result return result
platform_dependent = re.compile(r'\b(linux-(i\d86|x86_64|arm\w+)|' platform_dependent = re.compile(r'\b(linux_(i\d86|x86_64|arm\w+)|'
r'win(32|-amd64)|macosx-?\d+)\b', re.I) r'win(32|_amd64)|macosx_?\d+)\b', re.I)
def _is_platform_dependent(self, url): def _is_platform_dependent(self, url):
""" """
@ -677,7 +680,7 @@ class SimpleScrapingLocator(Locator):
Note that the return value isn't actually used other than as a boolean Note that the return value isn't actually used other than as a boolean
value. value.
""" """
if self._is_platform_dependent(url): if self.platform_check and self._is_platform_dependent(url):
info = None info = None
else: else:
info = self.convert_url_to_download_info(url, self.project_name) info = self.convert_url_to_download_info(url, self.project_name)

View file

@ -91,7 +91,9 @@ _426_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform',
_426_MARKERS = ('Private-Version', 'Provides-Extra', 'Obsoleted-By', _426_MARKERS = ('Private-Version', 'Provides-Extra', 'Obsoleted-By',
'Setup-Requires-Dist', 'Extension') 'Setup-Requires-Dist', 'Extension')
_566_FIELDS = _426_FIELDS + ('Description-Content-Type',) # See issue #106: Sometimes 'Requires' occurs wrongly in the metadata. Include
# it in the tuple literal below to allow it (for now)
_566_FIELDS = _426_FIELDS + ('Description-Content-Type', 'Requires')
_566_MARKERS = ('Description-Content-Type',) _566_MARKERS = ('Description-Content-Type',)
@ -377,8 +379,8 @@ class LegacyMetadata(object):
value = msg[field] value = msg[field]
if value is not None and value != 'UNKNOWN': if value is not None and value != 'UNKNOWN':
self.set(field, value) self.set(field, value)
logger.debug('Attempting to set metadata for %s', self) # logger.debug('Attempting to set metadata for %s', self)
self.set_metadata_version() # self.set_metadata_version()
def write(self, filepath, skip_unknown=False): def write(self, filepath, skip_unknown=False):
"""Write the metadata fields to filepath.""" """Write the metadata fields to filepath."""
@ -648,6 +650,7 @@ class LegacyMetadata(object):
METADATA_FILENAME = 'pydist.json' METADATA_FILENAME = 'pydist.json'
WHEEL_METADATA_FILENAME = 'metadata.json' WHEEL_METADATA_FILENAME = 'metadata.json'
LEGACY_METADATA_FILENAME = 'METADATA'
class Metadata(object): class Metadata(object):

View file

@ -236,8 +236,10 @@ class ScriptMaker(object):
def _write_script(self, names, shebang, script_bytes, filenames, ext): def _write_script(self, names, shebang, script_bytes, filenames, ext):
use_launcher = self.add_launchers and self._is_nt use_launcher = self.add_launchers and self._is_nt
linesep = os.linesep.encode('utf-8') linesep = os.linesep.encode('utf-8')
if not shebang.endswith(linesep):
shebang += linesep
if not use_launcher: if not use_launcher:
script_bytes = shebang + linesep + script_bytes script_bytes = shebang + script_bytes
else: # pragma: no cover else: # pragma: no cover
if ext == 'py': if ext == 'py':
launcher = self._get_launcher('t') launcher = self._get_launcher('t')
@ -247,7 +249,7 @@ class ScriptMaker(object):
with ZipFile(stream, 'w') as zf: with ZipFile(stream, 'w') as zf:
zf.writestr('__main__.py', script_bytes) zf.writestr('__main__.py', script_bytes)
zip_data = stream.getvalue() zip_data = stream.getvalue()
script_bytes = launcher + shebang + linesep + zip_data script_bytes = launcher + shebang + zip_data
for name in names: for name in names:
outname = os.path.join(self.target_dir, name) outname = os.path.join(self.target_dir, name)
if use_launcher: # pragma: no cover if use_launcher: # pragma: no cover

View file

@ -545,16 +545,14 @@ class FileOperator(object):
def write_binary_file(self, path, data): def write_binary_file(self, path, data):
self.ensure_dir(os.path.dirname(path)) self.ensure_dir(os.path.dirname(path))
if not self.dry_run: if not self.dry_run:
if os.path.exists(path):
os.remove(path)
with open(path, 'wb') as f: with open(path, 'wb') as f:
f.write(data) f.write(data)
self.record_as_written(path) self.record_as_written(path)
def write_text_file(self, path, data, encoding): def write_text_file(self, path, data, encoding):
self.ensure_dir(os.path.dirname(path)) self.write_binary_file(path, data.encode(encoding))
if not self.dry_run:
with open(path, 'wb') as f:
f.write(data.encode(encoding))
self.record_as_written(path)
def set_mode(self, bits, mask, files): def set_mode(self, bits, mask, files):
if os.name == 'posix' or (os.name == 'java' and os._name == 'posix'): if os.name == 'posix' or (os.name == 'java' and os._name == 'posix'):
@ -582,7 +580,7 @@ class FileOperator(object):
if self.record: if self.record:
self.dirs_created.add(path) self.dirs_created.add(path)
def byte_compile(self, path, optimize=False, force=False, prefix=None): def byte_compile(self, path, optimize=False, force=False, prefix=None, hashed_invalidation=False):
dpath = cache_from_source(path, not optimize) dpath = cache_from_source(path, not optimize)
logger.info('Byte-compiling %s to %s', path, dpath) logger.info('Byte-compiling %s to %s', path, dpath)
if not self.dry_run: if not self.dry_run:
@ -592,7 +590,10 @@ class FileOperator(object):
else: else:
assert path.startswith(prefix) assert path.startswith(prefix)
diagpath = path[len(prefix):] diagpath = path[len(prefix):]
py_compile.compile(path, dpath, diagpath, True) # raise error compile_kwargs = {}
if hashed_invalidation and hasattr(py_compile, 'PycInvalidationMode'):
compile_kwargs['invalidation_mode'] = py_compile.PycInvalidationMode.CHECKED_HASH
py_compile.compile(path, dpath, diagpath, True, **compile_kwargs) # raise error
self.record_as_written(dpath) self.record_as_written(dpath)
return dpath return dpath

View file

@ -442,7 +442,9 @@ class Wheel(object):
This can be used to issue any warnings to raise any exceptions. This can be used to issue any warnings to raise any exceptions.
If kwarg ``lib_only`` is True, only the purelib/platlib files are If kwarg ``lib_only`` is True, only the purelib/platlib files are
installed, and the headers, scripts, data and dist-info metadata are installed, and the headers, scripts, data and dist-info metadata are
not written. not written. If kwarg ``bytecode_hashed_invalidation`` is True, written
bytecode will try to use file-hash based invalidation (PEP-552) on
supported interpreter versions (CPython 2.7+).
The return value is a :class:`InstalledDistribution` instance unless The return value is a :class:`InstalledDistribution` instance unless
``options.lib_only`` is True, in which case the return value is ``None``. ``options.lib_only`` is True, in which case the return value is ``None``.
@ -451,6 +453,7 @@ class Wheel(object):
dry_run = maker.dry_run dry_run = maker.dry_run
warner = kwargs.get('warner') warner = kwargs.get('warner')
lib_only = kwargs.get('lib_only', False) lib_only = kwargs.get('lib_only', False)
bc_hashed_invalidation = kwargs.get('bytecode_hashed_invalidation', False)
pathname = os.path.join(self.dirname, self.filename) pathname = os.path.join(self.dirname, self.filename)
name_ver = '%s-%s' % (self.name, self.version) name_ver = '%s-%s' % (self.name, self.version)
@ -557,7 +560,8 @@ class Wheel(object):
'%s' % outfile) '%s' % outfile)
if bc and outfile.endswith('.py'): if bc and outfile.endswith('.py'):
try: try:
pyc = fileop.byte_compile(outfile) pyc = fileop.byte_compile(outfile,
hashed_invalidation=bc_hashed_invalidation)
outfiles.append(pyc) outfiles.append(pyc)
except Exception: except Exception:
# Don't give up if byte-compilation fails, # Don't give up if byte-compilation fails,

2
src/pip/_vendor/idna/LICENSE.rst Normal file → Executable file
View file

@ -1,7 +1,7 @@
License License
------- -------
Copyright (c) 2013-2017, Kim Davies. All rights reserved. Copyright (c) 2013-2018, Kim Davies. All rights reserved.
Redistribution and use in source and binary forms, with or without Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met: modification, are permitted provided that the following conditions are met:

View file

@ -267,10 +267,7 @@ def alabel(label):
try: try:
label = label.encode('ascii') label = label.encode('ascii')
try: ulabel(label)
ulabel(label)
except IDNAError:
raise IDNAError('The label {0} is not a valid A-label'.format(label))
if not valid_label_length(label): if not valid_label_length(label):
raise IDNAError('Label too long') raise IDNAError('Label too long')
return label return label

View file

@ -1,6 +1,6 @@
# This file is automatically generated by tools/idna-data # This file is automatically generated by tools/idna-data
__version__ = "10.0.0" __version__ = "11.0.0"
scripts = { scripts = {
'Greek': ( 'Greek': (
0x37000000374, 0x37000000374,
@ -49,7 +49,7 @@ scripts = {
0x30210000302a, 0x30210000302a,
0x30380000303c, 0x30380000303c,
0x340000004db6, 0x340000004db6,
0x4e0000009feb, 0x4e0000009ff0,
0xf9000000fa6e, 0xf9000000fa6e,
0xfa700000fada, 0xfa700000fada,
0x200000002a6d7, 0x200000002a6d7,
@ -62,7 +62,7 @@ scripts = {
'Hebrew': ( 'Hebrew': (
0x591000005c8, 0x591000005c8,
0x5d0000005eb, 0x5d0000005eb,
0x5f0000005f5, 0x5ef000005f5,
0xfb1d0000fb37, 0xfb1d0000fb37,
0xfb380000fb3d, 0xfb380000fb3d,
0xfb3e0000fb3f, 0xfb3e0000fb3f,
@ -248,6 +248,7 @@ joining_types = {
0x6fb: 68, 0x6fb: 68,
0x6fc: 68, 0x6fc: 68,
0x6ff: 68, 0x6ff: 68,
0x70f: 84,
0x710: 82, 0x710: 82,
0x712: 68, 0x712: 68,
0x713: 68, 0x713: 68,
@ -522,6 +523,7 @@ joining_types = {
0x1875: 68, 0x1875: 68,
0x1876: 68, 0x1876: 68,
0x1877: 68, 0x1877: 68,
0x1878: 68,
0x1880: 85, 0x1880: 85,
0x1881: 85, 0x1881: 85,
0x1882: 85, 0x1882: 85,
@ -690,6 +692,70 @@ joining_types = {
0x10bad: 68, 0x10bad: 68,
0x10bae: 68, 0x10bae: 68,
0x10baf: 85, 0x10baf: 85,
0x10d00: 76,
0x10d01: 68,
0x10d02: 68,
0x10d03: 68,
0x10d04: 68,
0x10d05: 68,
0x10d06: 68,
0x10d07: 68,
0x10d08: 68,
0x10d09: 68,
0x10d0a: 68,
0x10d0b: 68,
0x10d0c: 68,
0x10d0d: 68,
0x10d0e: 68,
0x10d0f: 68,
0x10d10: 68,
0x10d11: 68,
0x10d12: 68,
0x10d13: 68,
0x10d14: 68,
0x10d15: 68,
0x10d16: 68,
0x10d17: 68,
0x10d18: 68,
0x10d19: 68,
0x10d1a: 68,
0x10d1b: 68,
0x10d1c: 68,
0x10d1d: 68,
0x10d1e: 68,
0x10d1f: 68,
0x10d20: 68,
0x10d21: 68,
0x10d22: 82,
0x10d23: 68,
0x10f30: 68,
0x10f31: 68,
0x10f32: 68,
0x10f33: 82,
0x10f34: 68,
0x10f35: 68,
0x10f36: 68,
0x10f37: 68,
0x10f38: 68,
0x10f39: 68,
0x10f3a: 68,
0x10f3b: 68,
0x10f3c: 68,
0x10f3d: 68,
0x10f3e: 68,
0x10f3f: 68,
0x10f40: 68,
0x10f41: 68,
0x10f42: 68,
0x10f43: 68,
0x10f44: 68,
0x10f45: 85,
0x10f51: 68,
0x10f52: 68,
0x10f53: 68,
0x10f54: 82,
0x110bd: 85,
0x110cd: 85,
0x1e900: 68, 0x1e900: 68,
0x1e901: 68, 0x1e901: 68,
0x1e902: 68, 0x1e902: 68,
@ -1034,14 +1100,15 @@ codepoint_classes = {
0x52d0000052e, 0x52d0000052e,
0x52f00000530, 0x52f00000530,
0x5590000055a, 0x5590000055a,
0x56100000587, 0x56000000587,
0x58800000589,
0x591000005be, 0x591000005be,
0x5bf000005c0, 0x5bf000005c0,
0x5c1000005c3, 0x5c1000005c3,
0x5c4000005c6, 0x5c4000005c6,
0x5c7000005c8, 0x5c7000005c8,
0x5d0000005eb, 0x5d0000005eb,
0x5f0000005f3, 0x5ef000005f3,
0x6100000061b, 0x6100000061b,
0x62000000640, 0x62000000640,
0x64100000660, 0x64100000660,
@ -1054,12 +1121,13 @@ codepoint_classes = {
0x7100000074b, 0x7100000074b,
0x74d000007b2, 0x74d000007b2,
0x7c0000007f6, 0x7c0000007f6,
0x7fd000007fe,
0x8000000082e, 0x8000000082e,
0x8400000085c, 0x8400000085c,
0x8600000086b, 0x8600000086b,
0x8a0000008b5, 0x8a0000008b5,
0x8b6000008be, 0x8b6000008be,
0x8d4000008e2, 0x8d3000008e2,
0x8e300000958, 0x8e300000958,
0x96000000964, 0x96000000964,
0x96600000970, 0x96600000970,
@ -1077,6 +1145,7 @@ codepoint_classes = {
0x9e0000009e4, 0x9e0000009e4,
0x9e6000009f2, 0x9e6000009f2,
0x9fc000009fd, 0x9fc000009fd,
0x9fe000009ff,
0xa0100000a04, 0xa0100000a04,
0xa0500000a0b, 0xa0500000a0b,
0xa0f00000a11, 0xa0f00000a11,
@ -1136,8 +1205,7 @@ codepoint_classes = {
0xbd000000bd1, 0xbd000000bd1,
0xbd700000bd8, 0xbd700000bd8,
0xbe600000bf0, 0xbe600000bf0,
0xc0000000c04, 0xc0000000c0d,
0xc0500000c0d,
0xc0e00000c11, 0xc0e00000c11,
0xc1200000c29, 0xc1200000c29,
0xc2a00000c3a, 0xc2a00000c3a,
@ -1276,7 +1344,7 @@ codepoint_classes = {
0x17dc000017de, 0x17dc000017de,
0x17e0000017ea, 0x17e0000017ea,
0x18100000181a, 0x18100000181a,
0x182000001878, 0x182000001879,
0x1880000018ab, 0x1880000018ab,
0x18b0000018f6, 0x18b0000018f6,
0x19000000191f, 0x19000000191f,
@ -1544,11 +1612,11 @@ codepoint_classes = {
0x309d0000309f, 0x309d0000309f,
0x30a1000030fb, 0x30a1000030fb,
0x30fc000030ff, 0x30fc000030ff,
0x31050000312f, 0x310500003130,
0x31a0000031bb, 0x31a0000031bb,
0x31f000003200, 0x31f000003200,
0x340000004db6, 0x340000004db6,
0x4e0000009feb, 0x4e0000009ff0,
0xa0000000a48d, 0xa0000000a48d,
0xa4d00000a4fe, 0xa4d00000a4fe,
0xa5000000a60d, 0xa5000000a60d,
@ -1655,8 +1723,10 @@ codepoint_classes = {
0xa7a50000a7a6, 0xa7a50000a7a6,
0xa7a70000a7a8, 0xa7a70000a7a8,
0xa7a90000a7aa, 0xa7a90000a7aa,
0xa7af0000a7b0,
0xa7b50000a7b6, 0xa7b50000a7b6,
0xa7b70000a7b8, 0xa7b70000a7b8,
0xa7b90000a7ba,
0xa7f70000a7f8, 0xa7f70000a7f8,
0xa7fa0000a828, 0xa7fa0000a828,
0xa8400000a874, 0xa8400000a874,
@ -1664,8 +1734,7 @@ codepoint_classes = {
0xa8d00000a8da, 0xa8d00000a8da,
0xa8e00000a8f8, 0xa8e00000a8f8,
0xa8fb0000a8fc, 0xa8fb0000a8fc,
0xa8fd0000a8fe, 0xa8fd0000a92e,
0xa9000000a92e,
0xa9300000a954, 0xa9300000a954,
0xa9800000a9c1, 0xa9800000a9c1,
0xa9cf0000a9da, 0xa9cf0000a9da,
@ -1743,7 +1812,7 @@ codepoint_classes = {
0x10a0500010a07, 0x10a0500010a07,
0x10a0c00010a14, 0x10a0c00010a14,
0x10a1500010a18, 0x10a1500010a18,
0x10a1900010a34, 0x10a1900010a36,
0x10a3800010a3b, 0x10a3800010a3b,
0x10a3f00010a40, 0x10a3f00010a40,
0x10a6000010a7d, 0x10a6000010a7d,
@ -1756,6 +1825,11 @@ codepoint_classes = {
0x10b8000010b92, 0x10b8000010b92,
0x10c0000010c49, 0x10c0000010c49,
0x10cc000010cf3, 0x10cc000010cf3,
0x10d0000010d28,
0x10d3000010d3a,
0x10f0000010f1d,
0x10f2700010f28,
0x10f3000010f51,
0x1100000011047, 0x1100000011047,
0x1106600011070, 0x1106600011070,
0x1107f000110bb, 0x1107f000110bb,
@ -1763,10 +1837,11 @@ codepoint_classes = {
0x110f0000110fa, 0x110f0000110fa,
0x1110000011135, 0x1110000011135,
0x1113600011140, 0x1113600011140,
0x1114400011147,
0x1115000011174, 0x1115000011174,
0x1117600011177, 0x1117600011177,
0x11180000111c5, 0x11180000111c5,
0x111ca000111cd, 0x111c9000111cd,
0x111d0000111db, 0x111d0000111db,
0x111dc000111dd, 0x111dc000111dd,
0x1120000011212, 0x1120000011212,
@ -1786,7 +1861,7 @@ codepoint_classes = {
0x1132a00011331, 0x1132a00011331,
0x1133200011334, 0x1133200011334,
0x113350001133a, 0x113350001133a,
0x1133c00011345, 0x1133b00011345,
0x1134700011349, 0x1134700011349,
0x1134b0001134e, 0x1134b0001134e,
0x1135000011351, 0x1135000011351,
@ -1796,6 +1871,7 @@ codepoint_classes = {
0x1137000011375, 0x1137000011375,
0x114000001144b, 0x114000001144b,
0x114500001145a, 0x114500001145a,
0x1145e0001145f,
0x11480000114c6, 0x11480000114c6,
0x114c7000114c8, 0x114c7000114c8,
0x114d0000114da, 0x114d0000114da,
@ -1807,15 +1883,17 @@ codepoint_classes = {
0x116500001165a, 0x116500001165a,
0x11680000116b8, 0x11680000116b8,
0x116c0000116ca, 0x116c0000116ca,
0x117000001171a, 0x117000001171b,
0x1171d0001172c, 0x1171d0001172c,
0x117300001173a, 0x117300001173a,
0x118000001183b,
0x118c0000118ea, 0x118c0000118ea,
0x118ff00011900, 0x118ff00011900,
0x11a0000011a3f, 0x11a0000011a3f,
0x11a4700011a48, 0x11a4700011a48,
0x11a5000011a84, 0x11a5000011a84,
0x11a8600011a9a, 0x11a8600011a9a,
0x11a9d00011a9e,
0x11ac000011af9, 0x11ac000011af9,
0x11c0000011c09, 0x11c0000011c09,
0x11c0a00011c37, 0x11c0a00011c37,
@ -1831,6 +1909,13 @@ codepoint_classes = {
0x11d3c00011d3e, 0x11d3c00011d3e,
0x11d3f00011d48, 0x11d3f00011d48,
0x11d5000011d5a, 0x11d5000011d5a,
0x11d6000011d66,
0x11d6700011d69,
0x11d6a00011d8f,
0x11d9000011d92,
0x11d9300011d99,
0x11da000011daa,
0x11ee000011ef7,
0x120000001239a, 0x120000001239a,
0x1248000012544, 0x1248000012544,
0x130000001342f, 0x130000001342f,
@ -1845,11 +1930,12 @@ codepoint_classes = {
0x16b5000016b5a, 0x16b5000016b5a,
0x16b6300016b78, 0x16b6300016b78,
0x16b7d00016b90, 0x16b7d00016b90,
0x16e6000016e80,
0x16f0000016f45, 0x16f0000016f45,
0x16f5000016f7f, 0x16f5000016f7f,
0x16f8f00016fa0, 0x16f8f00016fa0,
0x16fe000016fe2, 0x16fe000016fe2,
0x17000000187ed, 0x17000000187f2,
0x1880000018af3, 0x1880000018af3,
0x1b0000001b11f, 0x1b0000001b11f,
0x1b1700001b2fc, 0x1b1700001b2fc,

View file

@ -1,2 +1,2 @@
__version__ = '2.7' __version__ = '2.8'

File diff suppressed because it is too large Load diff

View file

@ -4,18 +4,24 @@
from __future__ import absolute_import, division, print_function from __future__ import absolute_import, division, print_function
__all__ = [ __all__ = [
"__title__", "__summary__", "__uri__", "__version__", "__author__", "__title__",
"__email__", "__license__", "__copyright__", "__summary__",
"__uri__",
"__version__",
"__author__",
"__email__",
"__license__",
"__copyright__",
] ]
__title__ = "packaging" __title__ = "packaging"
__summary__ = "Core utilities for Python packages" __summary__ = "Core utilities for Python packages"
__uri__ = "https://github.com/pypa/packaging" __uri__ = "https://github.com/pypa/packaging"
__version__ = "18.0" __version__ = "19.0"
__author__ = "Donald Stufft and individual contributors" __author__ = "Donald Stufft and individual contributors"
__email__ = "donald@stufft.io" __email__ = "donald@stufft.io"
__license__ = "BSD or Apache License, Version 2.0" __license__ = "BSD or Apache License, Version 2.0"
__copyright__ = "Copyright 2014-2018 %s" % __author__ __copyright__ = "Copyright 2014-2019 %s" % __author__

View file

@ -4,11 +4,23 @@
from __future__ import absolute_import, division, print_function from __future__ import absolute_import, division, print_function
from .__about__ import ( from .__about__ import (
__author__, __copyright__, __email__, __license__, __summary__, __title__, __author__,
__uri__, __version__ __copyright__,
__email__,
__license__,
__summary__,
__title__,
__uri__,
__version__,
) )
__all__ = [ __all__ = [
"__title__", "__summary__", "__uri__", "__version__", "__author__", "__title__",
"__email__", "__license__", "__copyright__", "__summary__",
"__uri__",
"__version__",
"__author__",
"__email__",
"__license__",
"__copyright__",
] ]

View file

@ -12,9 +12,9 @@ PY3 = sys.version_info[0] == 3
# flake8: noqa # flake8: noqa
if PY3: if PY3:
string_types = str, string_types = (str,)
else: else:
string_types = basestring, string_types = (basestring,)
def with_metaclass(meta, *bases): def with_metaclass(meta, *bases):
@ -27,4 +27,5 @@ def with_metaclass(meta, *bases):
class metaclass(meta): class metaclass(meta):
def __new__(cls, name, this_bases, d): def __new__(cls, name, this_bases, d):
return meta(name, bases, d) return meta(name, bases, d)
return type.__new__(metaclass, 'temporary_class', (), {})
return type.__new__(metaclass, "temporary_class", (), {})

View file

@ -5,7 +5,6 @@ from __future__ import absolute_import, division, print_function
class Infinity(object): class Infinity(object):
def __repr__(self): def __repr__(self):
return "Infinity" return "Infinity"
@ -38,7 +37,6 @@ Infinity = Infinity()
class NegativeInfinity(object): class NegativeInfinity(object):
def __repr__(self): def __repr__(self):
return "-Infinity" return "-Infinity"

View file

@ -17,8 +17,11 @@ from .specifiers import Specifier, InvalidSpecifier
__all__ = [ __all__ = [
"InvalidMarker", "UndefinedComparison", "UndefinedEnvironmentName", "InvalidMarker",
"Marker", "default_environment", "UndefinedComparison",
"UndefinedEnvironmentName",
"Marker",
"default_environment",
] ]
@ -42,7 +45,6 @@ class UndefinedEnvironmentName(ValueError):
class Node(object): class Node(object):
def __init__(self, value): def __init__(self, value):
self.value = value self.value = value
@ -57,62 +59,52 @@ class Node(object):
class Variable(Node): class Variable(Node):
def serialize(self): def serialize(self):
return str(self) return str(self)
class Value(Node): class Value(Node):
def serialize(self): def serialize(self):
return '"{0}"'.format(self) return '"{0}"'.format(self)
class Op(Node): class Op(Node):
def serialize(self): def serialize(self):
return str(self) return str(self)
VARIABLE = ( VARIABLE = (
L("implementation_version") | L("implementation_version")
L("platform_python_implementation") | | L("platform_python_implementation")
L("implementation_name") | | L("implementation_name")
L("python_full_version") | | L("python_full_version")
L("platform_release") | | L("platform_release")
L("platform_version") | | L("platform_version")
L("platform_machine") | | L("platform_machine")
L("platform_system") | | L("platform_system")
L("python_version") | | L("python_version")
L("sys_platform") | | L("sys_platform")
L("os_name") | | L("os_name")
L("os.name") | # PEP-345 | L("os.name")
L("sys.platform") | # PEP-345 | L("sys.platform") # PEP-345
L("platform.version") | # PEP-345 | L("platform.version") # PEP-345
L("platform.machine") | # PEP-345 | L("platform.machine") # PEP-345
L("platform.python_implementation") | # PEP-345 | L("platform.python_implementation") # PEP-345
L("python_implementation") | # undocumented setuptools legacy | L("python_implementation") # PEP-345
L("extra") | L("extra") # undocumented setuptools legacy
) )
ALIASES = { ALIASES = {
'os.name': 'os_name', "os.name": "os_name",
'sys.platform': 'sys_platform', "sys.platform": "sys_platform",
'platform.version': 'platform_version', "platform.version": "platform_version",
'platform.machine': 'platform_machine', "platform.machine": "platform_machine",
'platform.python_implementation': 'platform_python_implementation', "platform.python_implementation": "platform_python_implementation",
'python_implementation': 'platform_python_implementation' "python_implementation": "platform_python_implementation",
} }
VARIABLE.setParseAction(lambda s, l, t: Variable(ALIASES.get(t[0], t[0]))) VARIABLE.setParseAction(lambda s, l, t: Variable(ALIASES.get(t[0], t[0])))
VERSION_CMP = ( VERSION_CMP = (
L("===") | L("===") | L("==") | L(">=") | L("<=") | L("!=") | L("~=") | L(">") | L("<")
L("==") |
L(">=") |
L("<=") |
L("!=") |
L("~=") |
L(">") |
L("<")
) )
MARKER_OP = VERSION_CMP | L("not in") | L("in") MARKER_OP = VERSION_CMP | L("not in") | L("in")
@ -152,8 +144,11 @@ def _format_marker(marker, first=True):
# where the single item is itself it's own list. In that case we want skip # where the single item is itself it's own list. In that case we want skip
# the rest of this function so that we don't get extraneous () on the # the rest of this function so that we don't get extraneous () on the
# outside. # outside.
if (isinstance(marker, list) and len(marker) == 1 and if (
isinstance(marker[0], (list, tuple))): isinstance(marker, list)
and len(marker) == 1
and isinstance(marker[0], (list, tuple))
):
return _format_marker(marker[0]) return _format_marker(marker[0])
if isinstance(marker, list): if isinstance(marker, list):
@ -239,20 +234,20 @@ def _evaluate_markers(markers, environment):
def format_full_version(info): def format_full_version(info):
version = '{0.major}.{0.minor}.{0.micro}'.format(info) version = "{0.major}.{0.minor}.{0.micro}".format(info)
kind = info.releaselevel kind = info.releaselevel
if kind != 'final': if kind != "final":
version += kind[0] + str(info.serial) version += kind[0] + str(info.serial)
return version return version
def default_environment(): def default_environment():
if hasattr(sys, 'implementation'): if hasattr(sys, "implementation"):
iver = format_full_version(sys.implementation.version) iver = format_full_version(sys.implementation.version)
implementation_name = sys.implementation.name implementation_name = sys.implementation.name
else: else:
iver = '0' iver = "0"
implementation_name = '' implementation_name = ""
return { return {
"implementation_name": implementation_name, "implementation_name": implementation_name,
@ -270,13 +265,13 @@ def default_environment():
class Marker(object): class Marker(object):
def __init__(self, marker): def __init__(self, marker):
try: try:
self._markers = _coerce_parse_result(MARKER.parseString(marker)) self._markers = _coerce_parse_result(MARKER.parseString(marker))
except ParseException as e: except ParseException as e:
err_str = "Invalid marker: {0!r}, parse error at {1!r}".format( err_str = "Invalid marker: {0!r}, parse error at {1!r}".format(
marker, marker[e.loc:e.loc + 8]) marker, marker[e.loc : e.loc + 8]
)
raise InvalidMarker(err_str) raise InvalidMarker(err_str)
def __str__(self): def __str__(self):

View file

@ -38,8 +38,8 @@ IDENTIFIER = Combine(ALPHANUM + ZeroOrMore(IDENTIFIER_END))
NAME = IDENTIFIER("name") NAME = IDENTIFIER("name")
EXTRA = IDENTIFIER EXTRA = IDENTIFIER
URI = Regex(r'[^ ]+')("url") URI = Regex(r"[^ ]+")("url")
URL = (AT + URI) URL = AT + URI
EXTRAS_LIST = EXTRA + ZeroOrMore(COMMA + EXTRA) EXTRAS_LIST = EXTRA + ZeroOrMore(COMMA + EXTRA)
EXTRAS = (LBRACKET + Optional(EXTRAS_LIST) + RBRACKET)("extras") EXTRAS = (LBRACKET + Optional(EXTRAS_LIST) + RBRACKET)("extras")
@ -48,17 +48,18 @@ VERSION_PEP440 = Regex(Specifier._regex_str, re.VERBOSE | re.IGNORECASE)
VERSION_LEGACY = Regex(LegacySpecifier._regex_str, re.VERBOSE | re.IGNORECASE) VERSION_LEGACY = Regex(LegacySpecifier._regex_str, re.VERBOSE | re.IGNORECASE)
VERSION_ONE = VERSION_PEP440 ^ VERSION_LEGACY VERSION_ONE = VERSION_PEP440 ^ VERSION_LEGACY
VERSION_MANY = Combine(VERSION_ONE + ZeroOrMore(COMMA + VERSION_ONE), VERSION_MANY = Combine(
joinString=",", adjacent=False)("_raw_spec") VERSION_ONE + ZeroOrMore(COMMA + VERSION_ONE), joinString=",", adjacent=False
)("_raw_spec")
_VERSION_SPEC = Optional(((LPAREN + VERSION_MANY + RPAREN) | VERSION_MANY)) _VERSION_SPEC = Optional(((LPAREN + VERSION_MANY + RPAREN) | VERSION_MANY))
_VERSION_SPEC.setParseAction(lambda s, l, t: t._raw_spec or '') _VERSION_SPEC.setParseAction(lambda s, l, t: t._raw_spec or "")
VERSION_SPEC = originalTextFor(_VERSION_SPEC)("specifier") VERSION_SPEC = originalTextFor(_VERSION_SPEC)("specifier")
VERSION_SPEC.setParseAction(lambda s, l, t: t[1]) VERSION_SPEC.setParseAction(lambda s, l, t: t[1])
MARKER_EXPR = originalTextFor(MARKER_EXPR())("marker") MARKER_EXPR = originalTextFor(MARKER_EXPR())("marker")
MARKER_EXPR.setParseAction( MARKER_EXPR.setParseAction(
lambda s, l, t: Marker(s[t._original_start:t._original_end]) lambda s, l, t: Marker(s[t._original_start : t._original_end])
) )
MARKER_SEPARATOR = SEMICOLON MARKER_SEPARATOR = SEMICOLON
MARKER = MARKER_SEPARATOR + MARKER_EXPR MARKER = MARKER_SEPARATOR + MARKER_EXPR
@ -66,8 +67,7 @@ MARKER = MARKER_SEPARATOR + MARKER_EXPR
VERSION_AND_MARKER = VERSION_SPEC + Optional(MARKER) VERSION_AND_MARKER = VERSION_SPEC + Optional(MARKER)
URL_AND_MARKER = URL + Optional(MARKER) URL_AND_MARKER = URL + Optional(MARKER)
NAMED_REQUIREMENT = \ NAMED_REQUIREMENT = NAME + Optional(EXTRAS) + (URL_AND_MARKER | VERSION_AND_MARKER)
NAME + Optional(EXTRAS) + (URL_AND_MARKER | VERSION_AND_MARKER)
REQUIREMENT = stringStart + NAMED_REQUIREMENT + stringEnd REQUIREMENT = stringStart + NAMED_REQUIREMENT + stringEnd
# pyparsing isn't thread safe during initialization, so we do it eagerly, see # pyparsing isn't thread safe during initialization, so we do it eagerly, see
@ -92,15 +92,21 @@ class Requirement(object):
try: try:
req = REQUIREMENT.parseString(requirement_string) req = REQUIREMENT.parseString(requirement_string)
except ParseException as e: except ParseException as e:
raise InvalidRequirement("Parse error at \"{0!r}\": {1}".format( raise InvalidRequirement(
requirement_string[e.loc:e.loc + 8], e.msg 'Parse error at "{0!r}": {1}'.format(
)) requirement_string[e.loc : e.loc + 8], e.msg
)
)
self.name = req.name self.name = req.name
if req.url: if req.url:
parsed_url = urlparse.urlparse(req.url) parsed_url = urlparse.urlparse(req.url)
if not (parsed_url.scheme and parsed_url.netloc) or ( if parsed_url.scheme == "file":
not parsed_url.scheme and not parsed_url.netloc): if urlparse.urlunparse(parsed_url) != req.url:
raise InvalidRequirement("Invalid URL given")
elif not (parsed_url.scheme and parsed_url.netloc) or (
not parsed_url.scheme and not parsed_url.netloc
):
raise InvalidRequirement("Invalid URL: {0}".format(req.url)) raise InvalidRequirement("Invalid URL: {0}".format(req.url))
self.url = req.url self.url = req.url
else: else:
@ -120,6 +126,8 @@ class Requirement(object):
if self.url: if self.url:
parts.append("@ {0}".format(self.url)) parts.append("@ {0}".format(self.url))
if self.marker:
parts.append(" ")
if self.marker: if self.marker:
parts.append("; {0}".format(self.marker)) parts.append("; {0}".format(self.marker))

View file

@ -19,7 +19,6 @@ class InvalidSpecifier(ValueError):
class BaseSpecifier(with_metaclass(abc.ABCMeta, object)): class BaseSpecifier(with_metaclass(abc.ABCMeta, object)):
@abc.abstractmethod @abc.abstractmethod
def __str__(self): def __str__(self):
""" """
@ -84,10 +83,7 @@ class _IndividualSpecifier(BaseSpecifier):
if not match: if not match:
raise InvalidSpecifier("Invalid specifier: '{0}'".format(spec)) raise InvalidSpecifier("Invalid specifier: '{0}'".format(spec))
self._spec = ( self._spec = (match.group("operator").strip(), match.group("version").strip())
match.group("operator").strip(),
match.group("version").strip(),
)
# Store whether or not this Specifier should accept prereleases # Store whether or not this Specifier should accept prereleases
self._prereleases = prereleases self._prereleases = prereleases
@ -99,11 +95,7 @@ class _IndividualSpecifier(BaseSpecifier):
else "" else ""
) )
return "<{0}({1!r}{2})>".format( return "<{0}({1!r}{2})>".format(self.__class__.__name__, str(self), pre)
self.__class__.__name__,
str(self),
pre,
)
def __str__(self): def __str__(self):
return "{0}{1}".format(*self._spec) return "{0}{1}".format(*self._spec)
@ -194,8 +186,9 @@ class _IndividualSpecifier(BaseSpecifier):
# If our version is a prerelease, and we were not set to allow # If our version is a prerelease, and we were not set to allow
# prereleases, then we'll store it for later incase nothing # prereleases, then we'll store it for later incase nothing
# else matches this specifier. # else matches this specifier.
if (parsed_version.is_prerelease and not if parsed_version.is_prerelease and not (
(prereleases or self.prereleases)): prereleases or self.prereleases
):
found_prereleases.append(version) found_prereleases.append(version)
# Either this is not a prerelease, or we should have been # Either this is not a prerelease, or we should have been
# accepting prereleases from the beginning. # accepting prereleases from the beginning.
@ -213,8 +206,7 @@ class _IndividualSpecifier(BaseSpecifier):
class LegacySpecifier(_IndividualSpecifier): class LegacySpecifier(_IndividualSpecifier):
_regex_str = ( _regex_str = r"""
r"""
(?P<operator>(==|!=|<=|>=|<|>)) (?P<operator>(==|!=|<=|>=|<|>))
\s* \s*
(?P<version> (?P<version>
@ -225,10 +217,8 @@ class LegacySpecifier(_IndividualSpecifier):
# them, and a comma since it's a version separator. # them, and a comma since it's a version separator.
) )
""" """
)
_regex = re.compile( _regex = re.compile(r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE)
r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE)
_operators = { _operators = {
"==": "equal", "==": "equal",
@ -269,13 +259,13 @@ def _require_version_compare(fn):
if not isinstance(prospective, Version): if not isinstance(prospective, Version):
return False return False
return fn(self, prospective, spec) return fn(self, prospective, spec)
return wrapped return wrapped
class Specifier(_IndividualSpecifier): class Specifier(_IndividualSpecifier):
_regex_str = ( _regex_str = r"""
r"""
(?P<operator>(~=|==|!=|<=|>=|<|>|===)) (?P<operator>(~=|==|!=|<=|>=|<|>|===))
(?P<version> (?P<version>
(?: (?:
@ -367,10 +357,8 @@ class Specifier(_IndividualSpecifier):
) )
) )
""" """
)
_regex = re.compile( _regex = re.compile(r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE)
r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE)
_operators = { _operators = {
"~=": "compatible", "~=": "compatible",
@ -397,8 +385,7 @@ class Specifier(_IndividualSpecifier):
prefix = ".".join( prefix = ".".join(
list( list(
itertools.takewhile( itertools.takewhile(
lambda x: (not x.startswith("post") and not lambda x: (not x.startswith("post") and not x.startswith("dev")),
x.startswith("dev")),
_version_split(spec), _version_split(spec),
) )
)[:-1] )[:-1]
@ -407,8 +394,9 @@ class Specifier(_IndividualSpecifier):
# Add the prefix notation to the end of our string # Add the prefix notation to the end of our string
prefix += ".*" prefix += ".*"
return (self._get_operator(">=")(prospective, spec) and return self._get_operator(">=")(prospective, spec) and self._get_operator("==")(
self._get_operator("==")(prospective, prefix)) prospective, prefix
)
@_require_version_compare @_require_version_compare
def _compare_equal(self, prospective, spec): def _compare_equal(self, prospective, spec):
@ -428,7 +416,7 @@ class Specifier(_IndividualSpecifier):
# Shorten the prospective version to be the same length as the spec # Shorten the prospective version to be the same length as the spec
# so that we can determine if the specifier is a prefix of the # so that we can determine if the specifier is a prefix of the
# prospective version or not. # prospective version or not.
prospective = prospective[:len(spec)] prospective = prospective[: len(spec)]
# Pad out our two sides with zeros so that they both equal the same # Pad out our two sides with zeros so that they both equal the same
# length. # length.
@ -567,27 +555,17 @@ def _pad_version(left, right):
right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right))) right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right)))
# Get the rest of our versions # Get the rest of our versions
left_split.append(left[len(left_split[0]):]) left_split.append(left[len(left_split[0]) :])
right_split.append(right[len(right_split[0]):]) right_split.append(right[len(right_split[0]) :])
# Insert our padding # Insert our padding
left_split.insert( left_split.insert(1, ["0"] * max(0, len(right_split[0]) - len(left_split[0])))
1, right_split.insert(1, ["0"] * max(0, len(left_split[0]) - len(right_split[0])))
["0"] * max(0, len(right_split[0]) - len(left_split[0])),
)
right_split.insert(
1,
["0"] * max(0, len(left_split[0]) - len(right_split[0])),
)
return ( return (list(itertools.chain(*left_split)), list(itertools.chain(*right_split)))
list(itertools.chain(*left_split)),
list(itertools.chain(*right_split)),
)
class SpecifierSet(BaseSpecifier): class SpecifierSet(BaseSpecifier):
def __init__(self, specifiers="", prereleases=None): def __init__(self, specifiers="", prereleases=None):
# Split on , to break each indidivual specifier into it's own item, and # Split on , to break each indidivual specifier into it's own item, and
# strip each item to remove leading/trailing whitespace. # strip each item to remove leading/trailing whitespace.
@ -721,10 +699,7 @@ class SpecifierSet(BaseSpecifier):
# given version is contained within all of them. # given version is contained within all of them.
# Note: This use of all() here means that an empty set of specifiers # Note: This use of all() here means that an empty set of specifiers
# will always return True, this is an explicit design decision. # will always return True, this is an explicit design decision.
return all( return all(s.contains(item, prereleases=prereleases) for s in self._specs)
s.contains(item, prereleases=prereleases)
for s in self._specs
)
def filter(self, iterable, prereleases=None): def filter(self, iterable, prereleases=None):
# Determine if we're forcing a prerelease or not, if we're not forcing # Determine if we're forcing a prerelease or not, if we're not forcing

Some files were not shown because too many files have changed in this diff Show more