mirror of
https://github.com/pypa/pip
synced 2023-12-13 21:30:23 +01:00
Switch to using the stdlib logger instead of pip.log
This commit is contained in:
parent
83cdcc1f40
commit
767d11e49c
|
@ -8,8 +8,7 @@ from docutils.statemachine import ViewList
|
||||||
from textwrap import dedent
|
from textwrap import dedent
|
||||||
from pip import commands
|
from pip import commands
|
||||||
from pip import cmdoptions
|
from pip import cmdoptions
|
||||||
from pip.locations import default_log_file
|
from pip.utils import get_prog
|
||||||
from pip.util import get_prog
|
|
||||||
|
|
||||||
|
|
||||||
class PipCommandUsage(rst.Directive):
|
class PipCommandUsage(rst.Directive):
|
||||||
|
@ -58,10 +57,6 @@ class PipOptions(rst.Directive):
|
||||||
opt_help = option.help.replace('%default', str(option.default))
|
opt_help = option.help.replace('%default', str(option.default))
|
||||||
# fix paths with sys.prefix
|
# fix paths with sys.prefix
|
||||||
opt_help = opt_help.replace(sys.prefix, "<sys.prefix>")
|
opt_help = opt_help.replace(sys.prefix, "<sys.prefix>")
|
||||||
opt_help = opt_help.replace(
|
|
||||||
default_log_file,
|
|
||||||
"<see :ref:`FileLogging`>",
|
|
||||||
)
|
|
||||||
return [bookmark_line, "", line, "", " %s" % opt_help, ""]
|
return [bookmark_line, "", line, "", " %s" % opt_help, ""]
|
||||||
|
|
||||||
def _format_options(self, options, cmd_name=None):
|
def _format_options(self, options, cmd_name=None):
|
||||||
|
|
|
@ -24,26 +24,7 @@ Console logging
|
||||||
~~~~~~~~~~~~~~~
|
~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
pip offers :ref:`-v, --verbose <--verbose>` and :ref:`-q, --quiet <--quiet>`
|
pip offers :ref:`-v, --verbose <--verbose>` and :ref:`-q, --quiet <--quiet>`
|
||||||
to control the console log level. Each option can be used multiple times and
|
to control the console log level.
|
||||||
used together. One ``-v`` increases the verbosity by one, whereas one ``-q`` decreases it by
|
|
||||||
one.
|
|
||||||
|
|
||||||
The series of log levels, in order, are as follows::
|
|
||||||
|
|
||||||
VERBOSE_DEBUG, DEBUG, INFO, NOTIFY, WARN, ERROR, FATAL
|
|
||||||
|
|
||||||
``NOTIFY`` is the default level.
|
|
||||||
|
|
||||||
A few examples on how the parameters work to affect the level:
|
|
||||||
|
|
||||||
* specifying nothing results in ``NOTIFY``
|
|
||||||
* ``-v`` results in ``INFO``
|
|
||||||
* ``-vv`` results in ``DEBUG``
|
|
||||||
* ``-q`` results in ``WARN``
|
|
||||||
* ``-vq`` results in ``NOTIFY``
|
|
||||||
|
|
||||||
The most practical use case for users is either ``-v`` or ``-vv`` to see
|
|
||||||
additional logging to help troubleshoot an issue.
|
|
||||||
|
|
||||||
|
|
||||||
.. _`FileLogging`:
|
.. _`FileLogging`:
|
||||||
|
@ -55,20 +36,16 @@ pip offers the :ref:`--log <--log>` option for specifying a file where a maximum
|
||||||
verbosity log will be kept. This option is empty by default. This log appends
|
verbosity log will be kept. This option is empty by default. This log appends
|
||||||
to previous logging.
|
to previous logging.
|
||||||
|
|
||||||
Additionally, when commands fail (i.e. return a non-zero exit code), pip writes
|
Additionally, pip writes a "debug log" for every command. This log appends and
|
||||||
a "failure log" for the failed command. This log overwrites previous
|
will periodically rotate and clean itself up to limit on disk file size. The
|
||||||
logging. The default location is as follows:
|
default location is as follows:
|
||||||
|
|
||||||
* On Unix and Mac OS X: :file:`$HOME/.pip/pip.log`
|
* On Unix: :file:`$HOME/.cache/pip/log/debug.log`
|
||||||
* On Windows, the configuration file is: :file:`%HOME%\\pip\\pip.log`
|
* On Mac OS X: :file:`$HOME/Library/Logs/pip/debug.log`
|
||||||
|
* On Windows: :file:`C:\\Users\<username>\AppData\Local\pip\Logs\debug.log`
|
||||||
|
|
||||||
The option for the failure log, is :ref:`--log-file <--log-file>`.
|
Like all pip options, ``--log`` can also be set as an environment variable, or
|
||||||
|
placed into the pip config file. See the :ref:`Configuration` section.
|
||||||
Both logs add a line per execution to specify the date and what pip executable wrote the log.
|
|
||||||
|
|
||||||
Like all pip options, ``--log`` and ``log-file``, can also be set as an environment
|
|
||||||
variable, or placed into the pip config file. See the :ref:`Configuration`
|
|
||||||
section.
|
|
||||||
|
|
||||||
.. _`exists-action`:
|
.. _`exists-action`:
|
||||||
|
|
||||||
|
|
|
@ -1,13 +1,17 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import logging
|
||||||
import os
|
import os
|
||||||
import optparse
|
import optparse
|
||||||
|
import warnings
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from pip.exceptions import InstallationError, CommandError, PipError
|
from pip.exceptions import InstallationError, CommandError, PipError
|
||||||
from pip.log import logger
|
from pip.utils import get_installed_distributions, get_prog
|
||||||
from pip.util import get_installed_distributions, get_prog
|
from pip.utils import deprecation
|
||||||
from pip.vcs import git, mercurial, subversion, bazaar # noqa
|
from pip.vcs import git, mercurial, subversion, bazaar # noqa
|
||||||
from pip.baseparser import ConfigOptionParser, UpdatingDefaultsHelpFormatter
|
from pip.baseparser import ConfigOptionParser, UpdatingDefaultsHelpFormatter
|
||||||
from pip.commands import commands, get_summaries, get_similar_commands
|
from pip.commands import commands, get_summaries, get_similar_commands
|
||||||
|
@ -22,6 +26,9 @@ cmdoptions = pip.cmdoptions
|
||||||
__version__ = "1.6.dev1"
|
__version__ = "1.6.dev1"
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def autocomplete():
|
def autocomplete():
|
||||||
"""Command and option completion for the main option parser (and options)
|
"""Command and option completion for the main option parser (and options)
|
||||||
and its subcommands (and options).
|
and its subcommands (and options).
|
||||||
|
@ -171,6 +178,13 @@ def main(args=None):
|
||||||
if args is None:
|
if args is None:
|
||||||
args = sys.argv[1:]
|
args = sys.argv[1:]
|
||||||
|
|
||||||
|
# Enable our Deprecation Warnings
|
||||||
|
for deprecation_warning in deprecation.DEPRECATIONS:
|
||||||
|
warnings.simplefilter("default", deprecation_warning)
|
||||||
|
|
||||||
|
# Configure our deprecation warnings to be sent through loggers
|
||||||
|
deprecation.install_warning_logger()
|
||||||
|
|
||||||
autocomplete()
|
autocomplete()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -208,14 +222,14 @@ class FrozenRequirement(object):
|
||||||
try:
|
try:
|
||||||
req = get_src_requirement(dist, location, find_tags)
|
req = get_src_requirement(dist, location, find_tags)
|
||||||
except InstallationError as exc:
|
except InstallationError as exc:
|
||||||
logger.warn(
|
logger.warning(
|
||||||
"Error when trying to get requirement for VCS system %s, "
|
"Error when trying to get requirement for VCS system %s, "
|
||||||
"falling back to uneditable format" % exc
|
"falling back to uneditable format", exc
|
||||||
)
|
)
|
||||||
req = None
|
req = None
|
||||||
if req is None:
|
if req is None:
|
||||||
logger.warn(
|
logger.warning(
|
||||||
'Could not determine repository location of %s' % location
|
'Could not determine repository location of %s', location
|
||||||
)
|
)
|
||||||
comments.append(
|
comments.append(
|
||||||
'## !! Could not determine repository location'
|
'## !! Could not determine repository location'
|
||||||
|
@ -238,8 +252,8 @@ class FrozenRequirement(object):
|
||||||
dependency_links,
|
dependency_links,
|
||||||
)
|
)
|
||||||
if not svn_location:
|
if not svn_location:
|
||||||
logger.warn(
|
logger.warning(
|
||||||
'Warning: cannot find svn location for %s' % req)
|
'Warning: cannot find svn location for %s', req)
|
||||||
comments.append(
|
comments.append(
|
||||||
'## FIXME: could not find svn URL in dependency_links '
|
'## FIXME: could not find svn URL in dependency_links '
|
||||||
'for this package:'
|
'for this package:'
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
# If we are running from a wheel, add the wheel to sys.path
|
# If we are running from a wheel, add the wheel to sys.path
|
||||||
|
|
|
@ -2,6 +2,8 @@
|
||||||
This code was taken from https://github.com/ActiveState/appdirs and modified
|
This code was taken from https://github.com/ActiveState/appdirs and modified
|
||||||
to suite our purposes.
|
to suite our purposes.
|
||||||
"""
|
"""
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
@ -52,6 +54,82 @@ def user_cache_dir(appname):
|
||||||
return path
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
def user_data_dir(appname, roaming=False):
|
||||||
|
"""
|
||||||
|
Return full path to the user-specific data dir for this application.
|
||||||
|
|
||||||
|
"appname" is the name of application.
|
||||||
|
If None, just the system directory is returned.
|
||||||
|
"roaming" (boolean, default False) can be set True to use the Windows
|
||||||
|
roaming appdata directory. That means that for users on a Windows
|
||||||
|
network setup for roaming profiles, this user data will be
|
||||||
|
sync'd on login. See
|
||||||
|
<http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
|
||||||
|
for a discussion of issues.
|
||||||
|
|
||||||
|
Typical user data directories are:
|
||||||
|
Mac OS X: ~/Library/Application Support/<AppName>
|
||||||
|
Unix: ~/.local/share/<AppName> # or in
|
||||||
|
$XDG_DATA_HOME, if defined
|
||||||
|
Win XP (not roaming): C:\Documents and Settings\<username>\ ...
|
||||||
|
...Application Data\<AppName>
|
||||||
|
Win XP (roaming): C:\Documents and Settings\<username>\Local ...
|
||||||
|
...Settings\Application Data\<AppName>
|
||||||
|
Win 7 (not roaming): C:\\Users\<username>\AppData\Local\<AppName>
|
||||||
|
Win 7 (roaming): C:\\Users\<username>\AppData\Roaming\<AppName>
|
||||||
|
|
||||||
|
For Unix, we follow the XDG spec and support $XDG_DATA_HOME.
|
||||||
|
That means, by default "~/.local/share/<AppName>".
|
||||||
|
"""
|
||||||
|
if WINDOWS:
|
||||||
|
const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA"
|
||||||
|
path = os.path.join(os.path.normpath(_get_win_folder(const)), appname)
|
||||||
|
elif sys.platform == "darwin":
|
||||||
|
path = os.path.join(
|
||||||
|
os.path.expanduser('~/Library/Application Support/'),
|
||||||
|
appname,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
path = os.path.join(
|
||||||
|
os.getenv('XDG_DATA_HOME', os.path.expanduser("~/.local/share")),
|
||||||
|
appname,
|
||||||
|
)
|
||||||
|
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
def user_log_dir(appname):
|
||||||
|
"""
|
||||||
|
Return full path to the user-specific log dir for this application.
|
||||||
|
|
||||||
|
"appname" is the name of application.
|
||||||
|
If None, just the system directory is returned.
|
||||||
|
|
||||||
|
Typical user cache directories are:
|
||||||
|
Mac OS X: ~/Library/Logs/<AppName>
|
||||||
|
Unix: ~/.cache/<AppName>/log # or under $XDG_CACHE_HOME if
|
||||||
|
defined
|
||||||
|
Win XP: C:\Documents and Settings\<username>\Local Settings\ ...
|
||||||
|
...Application Data\<AppName>\Logs
|
||||||
|
Vista: C:\\Users\<username>\AppData\Local\<AppName>\Logs
|
||||||
|
|
||||||
|
On Windows the only suggestion in the MSDN docs is that local settings
|
||||||
|
go in the `CSIDL_LOCAL_APPDATA` directory. (Note: I'm interested in
|
||||||
|
examples of what some windows apps use for a logs dir.)
|
||||||
|
|
||||||
|
OPINION: This function appends "Logs" to the `CSIDL_LOCAL_APPDATA`
|
||||||
|
value for Windows and appends "log" to the user cache dir for Unix.
|
||||||
|
"""
|
||||||
|
if WINDOWS:
|
||||||
|
path = os.path.join(user_data_dir(appname), "Logs")
|
||||||
|
elif sys.platform == "darwin":
|
||||||
|
path = os.path.join(os.path.expanduser('~/Library/Logs'), appname)
|
||||||
|
else:
|
||||||
|
path = os.path.join(user_cache_dir(appname), "log")
|
||||||
|
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
# for the discussion regarding site_config_dirs locations
|
# for the discussion regarding site_config_dirs locations
|
||||||
# see <https://github.com/pypa/pip/issues/1733>
|
# see <https://github.com/pypa/pip/issues/1733>
|
||||||
def site_config_dirs(appname):
|
def site_config_dirs(appname):
|
||||||
|
|
|
@ -1,34 +1,40 @@
|
||||||
"""Base Command class, and related routines"""
|
"""Base Command class, and related routines"""
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import logging
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import tempfile
|
|
||||||
import traceback
|
import traceback
|
||||||
import time
|
|
||||||
import optparse
|
import optparse
|
||||||
|
import warnings
|
||||||
|
|
||||||
from pip import cmdoptions
|
from pip import appdirs, cmdoptions
|
||||||
from pip.locations import running_under_virtualenv
|
from pip.locations import running_under_virtualenv
|
||||||
from pip.log import logger
|
|
||||||
from pip.download import PipSession
|
from pip.download import PipSession
|
||||||
from pip.exceptions import (BadCommand, InstallationError, UninstallationError,
|
from pip.exceptions import (BadCommand, InstallationError, UninstallationError,
|
||||||
CommandError, PreviousBuildDirError)
|
CommandError, PreviousBuildDirError)
|
||||||
from pip.compat import StringIO, native_str
|
from pip.compat import StringIO, logging_dictConfig
|
||||||
from pip.baseparser import ConfigOptionParser, UpdatingDefaultsHelpFormatter
|
from pip.baseparser import ConfigOptionParser, UpdatingDefaultsHelpFormatter
|
||||||
from pip.status_codes import (
|
from pip.status_codes import (
|
||||||
SUCCESS, ERROR, UNKNOWN_ERROR, VIRTUALENV_NOT_FOUND,
|
SUCCESS, ERROR, UNKNOWN_ERROR, VIRTUALENV_NOT_FOUND,
|
||||||
PREVIOUS_BUILD_DIR_ERROR,
|
PREVIOUS_BUILD_DIR_ERROR,
|
||||||
)
|
)
|
||||||
from pip.util import get_prog, normalize_path
|
from pip.utils import get_prog, normalize_path
|
||||||
|
from pip.utils.deprecation import RemovedInPip18Warning
|
||||||
|
from pip.utils.logging import IndentingFormatter
|
||||||
|
|
||||||
|
|
||||||
__all__ = ['Command']
|
__all__ = ['Command']
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class Command(object):
|
class Command(object):
|
||||||
name = None
|
name = None
|
||||||
usage = None
|
usage = None
|
||||||
hidden = False
|
hidden = False
|
||||||
|
log_stream = "ext://sys.stdout"
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
parser_kw = {
|
parser_kw = {
|
||||||
|
@ -88,9 +94,6 @@ class Command(object):
|
||||||
|
|
||||||
return session
|
return session
|
||||||
|
|
||||||
def setup_logging(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def parse_args(self, args):
|
def parse_args(self, args):
|
||||||
# factored out for testability
|
# factored out for testability
|
||||||
return self.parser.parse_args(args)
|
return self.parser.parse_args(args)
|
||||||
|
@ -98,19 +101,84 @@ class Command(object):
|
||||||
def main(self, args):
|
def main(self, args):
|
||||||
options, args = self.parse_args(args)
|
options, args = self.parse_args(args)
|
||||||
|
|
||||||
level = 1 # Notify
|
if options.quiet:
|
||||||
level += options.verbose
|
level = "WARNING"
|
||||||
level -= options.quiet
|
elif options.verbose:
|
||||||
level = logger.level_for_integer(4 - level)
|
level = "DEBUG"
|
||||||
complete_log = []
|
else:
|
||||||
logger.add_consumers(
|
level = "INFO"
|
||||||
(level, sys.stdout),
|
|
||||||
(logger.DEBUG, complete_log.append),
|
|
||||||
)
|
|
||||||
if options.log_explicit_levels:
|
|
||||||
logger.explicit_levels = True
|
|
||||||
|
|
||||||
self.setup_logging()
|
logging_dictConfig({
|
||||||
|
"version": 1,
|
||||||
|
"disable_existing_loggers": False,
|
||||||
|
"formatters": {
|
||||||
|
"indent": {
|
||||||
|
"()": IndentingFormatter,
|
||||||
|
"format": (
|
||||||
|
"%(message)s"
|
||||||
|
if not options.log_explicit_levels
|
||||||
|
else "[%(levelname)s] %(message)s"
|
||||||
|
),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"handlers": {
|
||||||
|
"console": {
|
||||||
|
"level": level,
|
||||||
|
"class": "pip.utils.logging.ColorizedStreamHandler",
|
||||||
|
"stream": self.log_stream,
|
||||||
|
"formatter": "indent",
|
||||||
|
},
|
||||||
|
"debug_log": {
|
||||||
|
"level": "DEBUG",
|
||||||
|
"class": "pip.utils.logging.BetterRotatingFileHandler",
|
||||||
|
"filename": os.path.join(
|
||||||
|
appdirs.user_log_dir("pip"), "debug.log",
|
||||||
|
),
|
||||||
|
"maxBytes": 10 * 1000 * 1000, # 10 MB
|
||||||
|
"backupCount": 1,
|
||||||
|
"delay": True,
|
||||||
|
"formatter": "indent",
|
||||||
|
},
|
||||||
|
"user_log": {
|
||||||
|
"level": "DEBUG",
|
||||||
|
"class": "pip.utils.logging.BetterRotatingFileHandler",
|
||||||
|
"filename": options.log or "/dev/null",
|
||||||
|
"delay": True,
|
||||||
|
"formatter": "indent",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"root": {
|
||||||
|
"level": level,
|
||||||
|
"handlers": list(filter(None, [
|
||||||
|
"console",
|
||||||
|
"debug_log",
|
||||||
|
"user_log" if options.log else None,
|
||||||
|
])),
|
||||||
|
},
|
||||||
|
# Disable any logging besides WARNING unless we have DEBUG level
|
||||||
|
# logging enabled. These use both pip._vendor and the bare names
|
||||||
|
# for the case where someone unbundles our libraries.
|
||||||
|
"loggers": dict(
|
||||||
|
(
|
||||||
|
name,
|
||||||
|
{
|
||||||
|
"level": (
|
||||||
|
"WARNING"
|
||||||
|
if level in ["INFO", "ERROR"]
|
||||||
|
else "DEBUG"
|
||||||
|
),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
for name in ["pip._vendor", "distlib", "requests", "urllib3"]
|
||||||
|
),
|
||||||
|
})
|
||||||
|
|
||||||
|
if options.log_explicit_levels:
|
||||||
|
warnings.warn(
|
||||||
|
"--log-explicit-levels has been deprecated and will be removed"
|
||||||
|
" in a future version.",
|
||||||
|
RemovedInPip18Warning,
|
||||||
|
)
|
||||||
|
|
||||||
# TODO: try to get these passing down from the command?
|
# TODO: try to get these passing down from the command?
|
||||||
# without resorting to os.environ to hold these.
|
# without resorting to os.environ to hold these.
|
||||||
|
@ -124,68 +192,43 @@ class Command(object):
|
||||||
if options.require_venv:
|
if options.require_venv:
|
||||||
# If a venv is required check if it can really be found
|
# If a venv is required check if it can really be found
|
||||||
if not running_under_virtualenv():
|
if not running_under_virtualenv():
|
||||||
logger.fatal(
|
logger.critical(
|
||||||
'Could not find an activated virtualenv (required).'
|
'Could not find an activated virtualenv (required).'
|
||||||
)
|
)
|
||||||
sys.exit(VIRTUALENV_NOT_FOUND)
|
sys.exit(VIRTUALENV_NOT_FOUND)
|
||||||
|
|
||||||
if options.log:
|
|
||||||
log_fp = open_logfile(options.log, 'a')
|
|
||||||
logger.add_consumers((logger.DEBUG, log_fp))
|
|
||||||
else:
|
|
||||||
log_fp = None
|
|
||||||
|
|
||||||
exit = SUCCESS
|
|
||||||
store_log = False
|
|
||||||
try:
|
try:
|
||||||
status = self.run(options, args)
|
status = self.run(options, args)
|
||||||
# FIXME: all commands should return an exit status
|
# FIXME: all commands should return an exit status
|
||||||
# and when it is done, isinstance is not needed anymore
|
# and when it is done, isinstance is not needed anymore
|
||||||
if isinstance(status, int):
|
if isinstance(status, int):
|
||||||
exit = status
|
return status
|
||||||
except PreviousBuildDirError as exc:
|
except PreviousBuildDirError as exc:
|
||||||
logger.fatal(str(exc))
|
logger.critical(str(exc))
|
||||||
logger.info('Exception information:\n%s' % format_exc())
|
logger.debug('Exception information:\n%s', format_exc())
|
||||||
store_log = True
|
|
||||||
exit = PREVIOUS_BUILD_DIR_ERROR
|
return PREVIOUS_BUILD_DIR_ERROR
|
||||||
except (InstallationError, UninstallationError) as exc:
|
except (InstallationError, UninstallationError, BadCommand) as exc:
|
||||||
logger.fatal(str(exc))
|
logger.critical(str(exc))
|
||||||
logger.info('Exception information:\n%s' % format_exc())
|
logger.debug('Exception information:\n%s', format_exc())
|
||||||
store_log = True
|
|
||||||
exit = ERROR
|
return ERROR
|
||||||
except BadCommand as exc:
|
|
||||||
logger.fatal(str(exc))
|
|
||||||
logger.info('Exception information:\n%s' % format_exc())
|
|
||||||
store_log = True
|
|
||||||
exit = ERROR
|
|
||||||
except CommandError as exc:
|
except CommandError as exc:
|
||||||
logger.fatal('ERROR: %s' % exc)
|
logger.critical('ERROR: %s', exc)
|
||||||
logger.info('Exception information:\n%s' % format_exc())
|
logger.debug('Exception information:\n%s', format_exc())
|
||||||
exit = ERROR
|
|
||||||
|
return ERROR
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
logger.fatal('Operation cancelled by user')
|
logger.critical('Operation cancelled by user')
|
||||||
logger.info('Exception information:\n%s' % format_exc())
|
logger.debug('Exception information:\n%s', format_exc())
|
||||||
store_log = True
|
|
||||||
exit = ERROR
|
return ERROR
|
||||||
except:
|
except:
|
||||||
logger.fatal('Exception:\n%s' % format_exc())
|
logger.critical('Exception:\n%s', format_exc())
|
||||||
store_log = True
|
|
||||||
exit = UNKNOWN_ERROR
|
return UNKNOWN_ERROR
|
||||||
if store_log:
|
|
||||||
log_file_fn = options.log_file
|
return SUCCESS
|
||||||
text = '\n'.join(native_str(l, True) for l in complete_log)
|
|
||||||
try:
|
|
||||||
log_file_fp = open_logfile(log_file_fn, 'w')
|
|
||||||
except IOError:
|
|
||||||
temp = tempfile.NamedTemporaryFile(delete=False)
|
|
||||||
log_file_fn = temp.name
|
|
||||||
log_file_fp = open_logfile(log_file_fn, 'w')
|
|
||||||
logger.fatal('Storing debug log for failure in %s' % log_file_fn)
|
|
||||||
log_file_fp.write(text)
|
|
||||||
log_file_fp.close()
|
|
||||||
if log_fp is not None:
|
|
||||||
log_fp.close()
|
|
||||||
return exit
|
|
||||||
|
|
||||||
|
|
||||||
def format_exc(exc_info=None):
|
def format_exc(exc_info=None):
|
||||||
|
@ -194,27 +237,3 @@ def format_exc(exc_info=None):
|
||||||
out = StringIO()
|
out = StringIO()
|
||||||
traceback.print_exception(*exc_info, **dict(file=out))
|
traceback.print_exception(*exc_info, **dict(file=out))
|
||||||
return out.getvalue()
|
return out.getvalue()
|
||||||
|
|
||||||
|
|
||||||
def open_logfile(filename, mode='a'):
|
|
||||||
"""Open the named log file in append mode.
|
|
||||||
|
|
||||||
If the file already exists, a separator will also be printed to
|
|
||||||
the file to separate past activity from current activity.
|
|
||||||
"""
|
|
||||||
filename = os.path.expanduser(filename)
|
|
||||||
filename = os.path.abspath(filename)
|
|
||||||
dirname = os.path.dirname(filename)
|
|
||||||
if not os.path.exists(dirname):
|
|
||||||
os.makedirs(dirname)
|
|
||||||
exists = os.path.exists(filename)
|
|
||||||
|
|
||||||
kw = {}
|
|
||||||
if sys.version_info[0] != 2:
|
|
||||||
kw['errors'] = 'replace'
|
|
||||||
|
|
||||||
log_fp = open(filename, mode, **kw)
|
|
||||||
if exists:
|
|
||||||
log_fp.write('%s\n' % ('-' * 60))
|
|
||||||
log_fp.write('%s run on %s\n' % (sys.argv[0], time.strftime('%c')))
|
|
||||||
return log_fp
|
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
"""Base option parser setup"""
|
"""Base option parser setup"""
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
import optparse
|
import optparse
|
||||||
|
@ -12,7 +13,7 @@ from pip.locations import (
|
||||||
default_config_file, default_config_basename, running_under_virtualenv,
|
default_config_file, default_config_basename, running_under_virtualenv,
|
||||||
site_config_files
|
site_config_files
|
||||||
)
|
)
|
||||||
from pip.util import get_terminal_size
|
from pip.utils import get_terminal_size
|
||||||
|
|
||||||
|
|
||||||
class PrettyHelpFormatter(optparse.IndentedHelpFormatter):
|
class PrettyHelpFormatter(optparse.IndentedHelpFormatter):
|
||||||
|
|
|
@ -7,10 +7,12 @@ between parses. pip parse's general options twice internally, and shouldn't
|
||||||
pass on state. To be consistent, all options will follow this design.
|
pass on state. To be consistent, all options will follow this design.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
import copy
|
import copy
|
||||||
from optparse import OptionGroup, SUPPRESS_HELP, Option
|
from optparse import OptionGroup, SUPPRESS_HELP, Option
|
||||||
from pip.locations import (
|
from pip.locations import (
|
||||||
CA_BUNDLE_PATH, USER_CACHE_DIR, build_prefix, default_log_file, src_prefix,
|
CA_BUNDLE_PATH, USER_CACHE_DIR, build_prefix, src_prefix,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -79,10 +81,11 @@ quiet = OptionMaker(
|
||||||
help='Give less output.')
|
help='Give less output.')
|
||||||
|
|
||||||
log = OptionMaker(
|
log = OptionMaker(
|
||||||
'--log',
|
"--log", "--log-file", "--local-log",
|
||||||
dest='log',
|
dest="log",
|
||||||
metavar='path',
|
metavar="path",
|
||||||
help='Path to a verbose appending log. This log is inactive by default.')
|
help="Path to a verbose appending log."
|
||||||
|
)
|
||||||
|
|
||||||
log_explicit_levels = OptionMaker(
|
log_explicit_levels = OptionMaker(
|
||||||
# Writes the log levels explicitely to the log'
|
# Writes the log levels explicitely to the log'
|
||||||
|
@ -92,15 +95,6 @@ log_explicit_levels = OptionMaker(
|
||||||
default=False,
|
default=False,
|
||||||
help=SUPPRESS_HELP)
|
help=SUPPRESS_HELP)
|
||||||
|
|
||||||
log_file = OptionMaker(
|
|
||||||
# The default log file
|
|
||||||
'--log-file', '--local-log',
|
|
||||||
dest='log_file',
|
|
||||||
metavar='path',
|
|
||||||
default=default_log_file,
|
|
||||||
help='Path to a verbose non-appending log, that only logs failures. This '
|
|
||||||
'log is active by default at %default.')
|
|
||||||
|
|
||||||
no_input = OptionMaker(
|
no_input = OptionMaker(
|
||||||
# Don't ask for input
|
# Don't ask for input
|
||||||
'--no-input',
|
'--no-input',
|
||||||
|
@ -409,7 +403,6 @@ general_group = {
|
||||||
verbose,
|
verbose,
|
||||||
version,
|
version,
|
||||||
quiet,
|
quiet,
|
||||||
log_file,
|
|
||||||
log,
|
log,
|
||||||
log_explicit_levels,
|
log_explicit_levels,
|
||||||
no_input,
|
no_input,
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
"""
|
"""
|
||||||
Package containing all pip commands
|
Package containing all pip commands
|
||||||
"""
|
"""
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
from pip.commands.completion import CompletionCommand
|
from pip.commands.completion import CompletionCommand
|
||||||
from pip.commands.freeze import FreezeCommand
|
from pip.commands.freeze import FreezeCommand
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
from pip.basecommand import Command
|
from pip.basecommand import Command
|
||||||
|
|
||||||
|
|
|
@ -1,18 +1,25 @@
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import logging
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
|
||||||
import pip
|
import pip
|
||||||
|
|
||||||
from pip.compat import stdlib_pkgs
|
from pip.compat import stdlib_pkgs
|
||||||
from pip.req import InstallRequirement
|
from pip.req import InstallRequirement
|
||||||
from pip.log import logger
|
|
||||||
from pip.basecommand import Command
|
from pip.basecommand import Command
|
||||||
from pip.util import get_installed_distributions
|
from pip.utils import get_installed_distributions
|
||||||
from pip._vendor import pkg_resources
|
from pip._vendor import pkg_resources
|
||||||
|
|
||||||
# packages to exclude from freeze output
|
# packages to exclude from freeze output
|
||||||
freeze_excludes = stdlib_pkgs + ['setuptools', 'pip', 'distribute']
|
freeze_excludes = stdlib_pkgs + ['setuptools', 'pip', 'distribute']
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class FreezeCommand(Command):
|
class FreezeCommand(Command):
|
||||||
"""
|
"""
|
||||||
Output installed packages in requirements format.
|
Output installed packages in requirements format.
|
||||||
|
@ -23,6 +30,7 @@ class FreezeCommand(Command):
|
||||||
usage = """
|
usage = """
|
||||||
%prog [options]"""
|
%prog [options]"""
|
||||||
summary = 'Output installed packages in requirements format.'
|
summary = 'Output installed packages in requirements format.'
|
||||||
|
log_stream = "ext://sys.stderr"
|
||||||
|
|
||||||
def __init__(self, *args, **kw):
|
def __init__(self, *args, **kw):
|
||||||
super(FreezeCommand, self).__init__(*args, **kw)
|
super(FreezeCommand, self).__init__(*args, **kw)
|
||||||
|
@ -53,9 +61,6 @@ class FreezeCommand(Command):
|
||||||
|
|
||||||
self.parser.insert_option_group(0, self.cmd_opts)
|
self.parser.insert_option_group(0, self.cmd_opts)
|
||||||
|
|
||||||
def setup_logging(self):
|
|
||||||
logger.move_stdout_to_stderr()
|
|
||||||
|
|
||||||
def run(self, options, args):
|
def run(self, options, args):
|
||||||
requirement = options.requirement
|
requirement = options.requirement
|
||||||
find_links = options.find_links or []
|
find_links = options.find_links or []
|
||||||
|
@ -123,19 +128,21 @@ class FreezeCommand(Command):
|
||||||
else:
|
else:
|
||||||
line_req = InstallRequirement.from_line(line)
|
line_req = InstallRequirement.from_line(line)
|
||||||
if not line_req.name:
|
if not line_req.name:
|
||||||
logger.notify(
|
logger.info(
|
||||||
"Skipping line because it's not clear what it would "
|
"Skipping line because it's not clear what it would "
|
||||||
"install: %s" % line.strip()
|
"install: %s",
|
||||||
|
line.strip(),
|
||||||
)
|
)
|
||||||
logger.notify(
|
logger.info(
|
||||||
" (add #egg=PackageName to the URL to avoid"
|
" (add #egg=PackageName to the URL to avoid"
|
||||||
" this warning)"
|
" this warning)"
|
||||||
)
|
)
|
||||||
continue
|
continue
|
||||||
if line_req.name not in installations:
|
if line_req.name not in installations:
|
||||||
logger.warn(
|
logger.warning(
|
||||||
"Requirement file contains %s, but that package is not"
|
"Requirement file contains %s, but that package is not"
|
||||||
" installed" % line.strip()
|
" installed",
|
||||||
|
line.strip(),
|
||||||
)
|
)
|
||||||
continue
|
continue
|
||||||
f.write(str(installations[line_req.name]))
|
f.write(str(installations[line_req.name]))
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
from pip.basecommand import Command, SUCCESS
|
from pip.basecommand import Command, SUCCESS
|
||||||
from pip.exceptions import CommandError
|
from pip.exceptions import CommandError
|
||||||
|
|
||||||
|
|
|
@ -1,9 +1,12 @@
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import logging
|
||||||
import os
|
import os
|
||||||
import tempfile
|
import tempfile
|
||||||
import shutil
|
import shutil
|
||||||
|
import warnings
|
||||||
|
|
||||||
from pip.req import InstallRequirement, RequirementSet, parse_requirements
|
from pip.req import InstallRequirement, RequirementSet, parse_requirements
|
||||||
from pip.log import logger
|
|
||||||
from pip.locations import (virtualenv_no_global, distutils_scheme,
|
from pip.locations import (virtualenv_no_global, distutils_scheme,
|
||||||
build_prefix)
|
build_prefix)
|
||||||
from pip.basecommand import Command
|
from pip.basecommand import Command
|
||||||
|
@ -12,6 +15,10 @@ from pip.exceptions import (
|
||||||
InstallationError, CommandError, PreviousBuildDirError,
|
InstallationError, CommandError, PreviousBuildDirError,
|
||||||
)
|
)
|
||||||
from pip import cmdoptions
|
from pip import cmdoptions
|
||||||
|
from pip.utils.deprecation import RemovedInPip17Warning, RemovedInPip18Warning
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class InstallCommand(Command):
|
class InstallCommand(Command):
|
||||||
|
@ -192,11 +199,10 @@ class InstallCommand(Command):
|
||||||
(options.build_dir != build_prefix) or
|
(options.build_dir != build_prefix) or
|
||||||
options.no_clean
|
options.no_clean
|
||||||
):
|
):
|
||||||
logger.deprecated(
|
warnings.warn(
|
||||||
'1.7',
|
"--no-install, --no-download, --build, and --no-clean are "
|
||||||
'DEPRECATION: --no-install, --no-download, --build, '
|
"deprecated. See https://github.com/pypa/pip/issues/906.",
|
||||||
'and --no-clean are deprecated. See '
|
RemovedInPip17Warning,
|
||||||
'https://github.com/pypa/pip/issues/906.',
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if options.download_dir:
|
if options.download_dir:
|
||||||
|
@ -229,32 +235,32 @@ class InstallCommand(Command):
|
||||||
global_options = options.global_options or []
|
global_options = options.global_options or []
|
||||||
index_urls = [options.index_url] + options.extra_index_urls
|
index_urls = [options.index_url] + options.extra_index_urls
|
||||||
if options.no_index:
|
if options.no_index:
|
||||||
logger.notify('Ignoring indexes: %s' % ','.join(index_urls))
|
logger.info('Ignoring indexes: %s', ','.join(index_urls))
|
||||||
index_urls = []
|
index_urls = []
|
||||||
|
|
||||||
if options.use_mirrors:
|
if options.use_mirrors:
|
||||||
logger.deprecated(
|
warnings.warn(
|
||||||
"1.7",
|
"--use-mirrors has been deprecated and will be removed in the "
|
||||||
"--use-mirrors has been deprecated and will be removed"
|
"future. Explicit uses of --index-url and/or --extra-index-url"
|
||||||
" in the future. Explicit uses of --index-url and/or "
|
" is suggested.",
|
||||||
"--extra-index-url is suggested."
|
RemovedInPip17Warning,
|
||||||
)
|
)
|
||||||
|
|
||||||
if options.mirrors:
|
if options.mirrors:
|
||||||
logger.deprecated(
|
warnings.warn(
|
||||||
"1.7",
|
"--mirrors has been deprecated and will be removed in the "
|
||||||
"--mirrors has been deprecated and will be removed in "
|
"future. Explicit uses of --index-url and/or --extra-index-url"
|
||||||
" the future. Explicit uses of --index-url and/or "
|
" is suggested.",
|
||||||
"--extra-index-url is suggested."
|
RemovedInPip17Warning,
|
||||||
)
|
)
|
||||||
index_urls += options.mirrors
|
index_urls += options.mirrors
|
||||||
|
|
||||||
if options.download_cache:
|
if options.download_cache:
|
||||||
logger.deprecated(
|
warnings.warn(
|
||||||
"1.8",
|
|
||||||
"--download-cache has been deprecated and will be removed in "
|
"--download-cache has been deprecated and will be removed in "
|
||||||
" the future. Pip now automatically uses and configures its "
|
"the future. Pip now automatically uses and configures its "
|
||||||
"cache."
|
"cache.",
|
||||||
|
RemovedInPip18Warning,
|
||||||
)
|
)
|
||||||
|
|
||||||
with self._build_session(options) as session:
|
with self._build_session(options) as session:
|
||||||
|
@ -299,7 +305,7 @@ class InstallCommand(Command):
|
||||||
else:
|
else:
|
||||||
msg = ('You must give at least one requirement '
|
msg = ('You must give at least one requirement '
|
||||||
'to %(name)s (see "pip help %(name)s")' % opts)
|
'to %(name)s (see "pip help %(name)s")' % opts)
|
||||||
logger.warn(msg)
|
logger.warning(msg)
|
||||||
return
|
return
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -319,16 +325,14 @@ class InstallCommand(Command):
|
||||||
requirement_set.successfully_installed
|
requirement_set.successfully_installed
|
||||||
])
|
])
|
||||||
if installed:
|
if installed:
|
||||||
logger.notify('Successfully installed %s' % installed)
|
logger.info('Successfully installed %s', installed)
|
||||||
else:
|
else:
|
||||||
downloaded = ' '.join([
|
downloaded = ' '.join([
|
||||||
req.name
|
req.name
|
||||||
for req in requirement_set.successfully_downloaded
|
for req in requirement_set.successfully_downloaded
|
||||||
])
|
])
|
||||||
if downloaded:
|
if downloaded:
|
||||||
logger.notify(
|
logger.info('Successfully downloaded %s', downloaded)
|
||||||
'Successfully downloaded %s' % downloaded
|
|
||||||
)
|
|
||||||
except PreviousBuildDirError:
|
except PreviousBuildDirError:
|
||||||
options.no_clean = True
|
options.no_clean = True
|
||||||
raise
|
raise
|
||||||
|
|
|
@ -1,12 +1,20 @@
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import warnings
|
||||||
|
|
||||||
from pip.basecommand import Command
|
from pip.basecommand import Command
|
||||||
from pip.exceptions import DistributionNotFound, BestVersionAlreadyInstalled
|
from pip.exceptions import DistributionNotFound, BestVersionAlreadyInstalled
|
||||||
from pip.index import PackageFinder
|
from pip.index import PackageFinder
|
||||||
from pip.log import logger
|
|
||||||
from pip.req import InstallRequirement
|
from pip.req import InstallRequirement
|
||||||
from pip.util import get_installed_distributions, dist_is_editable
|
from pip.utils import get_installed_distributions, dist_is_editable
|
||||||
|
from pip.utils.deprecation import RemovedInPip17Warning
|
||||||
from pip.cmdoptions import make_option_group, index_group
|
from pip.cmdoptions import make_option_group, index_group
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class ListCommand(Command):
|
class ListCommand(Command):
|
||||||
"""
|
"""
|
||||||
List installed packages, including editables.
|
List installed packages, including editables.
|
||||||
|
@ -88,31 +96,31 @@ class ListCommand(Command):
|
||||||
for dist, remote_version_raw, remote_version_parsed in \
|
for dist, remote_version_raw, remote_version_parsed in \
|
||||||
self.find_packages_latests_versions(options):
|
self.find_packages_latests_versions(options):
|
||||||
if remote_version_parsed > dist.parsed_version:
|
if remote_version_parsed > dist.parsed_version:
|
||||||
logger.notify(
|
logger.info(
|
||||||
'%s (Current: %s Latest: %s)' %
|
'%s (Current: %s Latest: %s)',
|
||||||
(dist.project_name, dist.version, remote_version_raw)
|
dist.project_name, dist.version, remote_version_raw,
|
||||||
)
|
)
|
||||||
|
|
||||||
def find_packages_latests_versions(self, options):
|
def find_packages_latests_versions(self, options):
|
||||||
index_urls = [options.index_url] + options.extra_index_urls
|
index_urls = [options.index_url] + options.extra_index_urls
|
||||||
if options.no_index:
|
if options.no_index:
|
||||||
logger.notify('Ignoring indexes: %s' % ','.join(index_urls))
|
logger.info('Ignoring indexes: %s', ','.join(index_urls))
|
||||||
index_urls = []
|
index_urls = []
|
||||||
|
|
||||||
if options.use_mirrors:
|
if options.use_mirrors:
|
||||||
logger.deprecated(
|
warnings.warn(
|
||||||
"1.7",
|
"--use-mirrors has been deprecated and will be removed in the "
|
||||||
"--use-mirrors has been deprecated and will be removed"
|
"future. Explicit uses of --index-url and/or --extra-index-url"
|
||||||
" in the future. Explicit uses of --index-url and/or "
|
" is suggested.",
|
||||||
"--extra-index-url is suggested."
|
RemovedInPip17Warning,
|
||||||
)
|
)
|
||||||
|
|
||||||
if options.mirrors:
|
if options.mirrors:
|
||||||
logger.deprecated(
|
warnings.warn(
|
||||||
"1.7",
|
"--mirrors has been deprecated and will be removed in the "
|
||||||
"--mirrors has been deprecated and will be removed in "
|
"future. Explicit uses of --index-url and/or --extra-index-url"
|
||||||
" the future. Explicit uses of --index-url and/or "
|
" is suggested.",
|
||||||
"--extra-index-url is suggested."
|
RemovedInPip17Warning,
|
||||||
)
|
)
|
||||||
index_urls += options.mirrors
|
index_urls += options.mirrors
|
||||||
|
|
||||||
|
@ -181,7 +189,7 @@ class ListCommand(Command):
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
line = '%s (%s)' % (dist.project_name, dist.version)
|
line = '%s (%s)' % (dist.project_name, dist.version)
|
||||||
logger.notify(line)
|
logger.info(line)
|
||||||
|
|
||||||
def run_uptodate(self, options):
|
def run_uptodate(self, options):
|
||||||
uptodate = []
|
uptodate = []
|
||||||
|
|
|
@ -1,3 +1,6 @@
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import logging
|
||||||
import sys
|
import sys
|
||||||
import textwrap
|
import textwrap
|
||||||
|
|
||||||
|
@ -6,8 +9,8 @@ from functools import reduce
|
||||||
|
|
||||||
from pip.basecommand import Command, SUCCESS
|
from pip.basecommand import Command, SUCCESS
|
||||||
from pip.download import PipXmlrpcTransport
|
from pip.download import PipXmlrpcTransport
|
||||||
from pip.util import get_terminal_size
|
from pip.utils import get_terminal_size
|
||||||
from pip.log import logger
|
from pip.utils.logging import indent_log
|
||||||
from pip.compat import cmp
|
from pip.compat import cmp
|
||||||
from pip.exceptions import CommandError
|
from pip.exceptions import CommandError
|
||||||
from pip.status_codes import NO_MATCHES_FOUND
|
from pip.status_codes import NO_MATCHES_FOUND
|
||||||
|
@ -15,6 +18,9 @@ from pip._vendor import pkg_resources
|
||||||
from pip._vendor.six.moves import xmlrpc_client
|
from pip._vendor.six.moves import xmlrpc_client
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class SearchCommand(Command):
|
class SearchCommand(Command):
|
||||||
"""Search for PyPI packages whose name or summary contains <query>."""
|
"""Search for PyPI packages whose name or summary contains <query>."""
|
||||||
name = 'search'
|
name = 'search'
|
||||||
|
@ -112,19 +118,16 @@ def print_results(hits, name_column_width=25, terminal_width=None):
|
||||||
summary = ('\n' + ' ' * (name_column_width + 3)).join(summary)
|
summary = ('\n' + ' ' * (name_column_width + 3)).join(summary)
|
||||||
line = '%s - %s' % (name.ljust(name_column_width), summary)
|
line = '%s - %s' % (name.ljust(name_column_width), summary)
|
||||||
try:
|
try:
|
||||||
logger.notify(line)
|
logger.info(line)
|
||||||
if name in installed_packages:
|
if name in installed_packages:
|
||||||
dist = pkg_resources.get_distribution(name)
|
dist = pkg_resources.get_distribution(name)
|
||||||
logger.indent += 2
|
with indent_log():
|
||||||
try:
|
|
||||||
latest = highest_version(hit['versions'])
|
latest = highest_version(hit['versions'])
|
||||||
if dist.version == latest:
|
if dist.version == latest:
|
||||||
logger.notify('INSTALLED: %s (latest)' % dist.version)
|
logger.info('INSTALLED: %s (latest)', dist.version)
|
||||||
else:
|
else:
|
||||||
logger.notify('INSTALLED: %s' % dist.version)
|
logger.info('INSTALLED: %s', dist.version)
|
||||||
logger.notify('LATEST: %s' % latest)
|
logger.info('LATEST: %s', latest)
|
||||||
finally:
|
|
||||||
logger.indent -= 2
|
|
||||||
except UnicodeEncodeError:
|
except UnicodeEncodeError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
|
@ -1,10 +1,15 @@
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import logging
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from pip.basecommand import Command
|
from pip.basecommand import Command
|
||||||
from pip.log import logger
|
|
||||||
from pip._vendor import pkg_resources
|
from pip._vendor import pkg_resources
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class ShowCommand(Command):
|
class ShowCommand(Command):
|
||||||
"""Show information about one or more installed packages."""
|
"""Show information about one or more installed packages."""
|
||||||
name = 'show'
|
name = 'show'
|
||||||
|
@ -25,7 +30,7 @@ class ShowCommand(Command):
|
||||||
|
|
||||||
def run(self, options, args):
|
def run(self, options, args):
|
||||||
if not args:
|
if not args:
|
||||||
logger.warn('ERROR: Please provide a package name or names.')
|
logger.warning('ERROR: Please provide a package name or names.')
|
||||||
return
|
return
|
||||||
query = args
|
query = args
|
||||||
|
|
||||||
|
@ -75,15 +80,15 @@ def print_results(distributions, list_all_files):
|
||||||
Print the informations from installed distributions found.
|
Print the informations from installed distributions found.
|
||||||
"""
|
"""
|
||||||
for dist in distributions:
|
for dist in distributions:
|
||||||
logger.notify("---")
|
logger.info("---")
|
||||||
logger.notify("Name: %s" % dist['name'])
|
logger.info("Name: %s" % dist['name'])
|
||||||
logger.notify("Version: %s" % dist['version'])
|
logger.info("Version: %s" % dist['version'])
|
||||||
logger.notify("Location: %s" % dist['location'])
|
logger.info("Location: %s" % dist['location'])
|
||||||
logger.notify("Requires: %s" % ', '.join(dist['requires']))
|
logger.info("Requires: %s" % ', '.join(dist['requires']))
|
||||||
if list_all_files:
|
if list_all_files:
|
||||||
logger.notify("Files:")
|
logger.info("Files:")
|
||||||
if dist['files'] is not None:
|
if dist['files'] is not None:
|
||||||
for line in dist['files']:
|
for line in dist['files']:
|
||||||
logger.notify(" %s" % line.strip())
|
logger.info(" %s" % line.strip())
|
||||||
else:
|
else:
|
||||||
logger.notify("Cannot locate installed-files.txt")
|
logger.info("Cannot locate installed-files.txt")
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
from pip.req import InstallRequirement, RequirementSet, parse_requirements
|
from pip.req import InstallRequirement, RequirementSet, parse_requirements
|
||||||
from pip.basecommand import Command
|
from pip.basecommand import Command
|
||||||
from pip.exceptions import InstallationError
|
from pip.exceptions import InstallationError
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
from pip.commands.zip import ZipCommand
|
from pip.commands.zip import ZipCommand
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,20 +1,25 @@
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
from __future__ import absolute_import
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import logging
|
||||||
import os
|
import os
|
||||||
|
import warnings
|
||||||
|
|
||||||
from pip.basecommand import Command
|
from pip.basecommand import Command
|
||||||
from pip.index import PackageFinder
|
from pip.index import PackageFinder
|
||||||
from pip.log import logger
|
|
||||||
from pip.exceptions import CommandError, PreviousBuildDirError
|
from pip.exceptions import CommandError, PreviousBuildDirError
|
||||||
from pip.req import InstallRequirement, RequirementSet, parse_requirements
|
from pip.req import InstallRequirement, RequirementSet, parse_requirements
|
||||||
from pip.util import normalize_path
|
from pip.utils import normalize_path
|
||||||
|
from pip.utils.deprecation import RemovedInPip17Warning, RemovedInPip18Warning
|
||||||
from pip.wheel import WheelBuilder
|
from pip.wheel import WheelBuilder
|
||||||
from pip import cmdoptions
|
from pip import cmdoptions
|
||||||
|
|
||||||
DEFAULT_WHEEL_DIR = os.path.join(normalize_path(os.curdir), 'wheelhouse')
|
DEFAULT_WHEEL_DIR = os.path.join(normalize_path(os.curdir), 'wheelhouse')
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class WheelCommand(Command):
|
class WheelCommand(Command):
|
||||||
"""
|
"""
|
||||||
Build Wheel archives for your requirements and dependencies.
|
Build Wheel archives for your requirements and dependencies.
|
||||||
|
@ -124,32 +129,32 @@ class WheelCommand(Command):
|
||||||
|
|
||||||
index_urls = [options.index_url] + options.extra_index_urls
|
index_urls = [options.index_url] + options.extra_index_urls
|
||||||
if options.no_index:
|
if options.no_index:
|
||||||
logger.notify('Ignoring indexes: %s' % ','.join(index_urls))
|
logger.info('Ignoring indexes: %s', ','.join(index_urls))
|
||||||
index_urls = []
|
index_urls = []
|
||||||
|
|
||||||
if options.use_mirrors:
|
if options.use_mirrors:
|
||||||
logger.deprecated(
|
warnings.warn(
|
||||||
"1.7",
|
"--use-mirrors has been deprecated and will be removed in the "
|
||||||
"--use-mirrors has been deprecated and will be removed"
|
"future. Explicit uses of --index-url and/or --extra-index-url"
|
||||||
" in the future. Explicit uses of --index-url and/or "
|
" is suggested.",
|
||||||
"--extra-index-url is suggested."
|
RemovedInPip17Warning,
|
||||||
)
|
)
|
||||||
|
|
||||||
if options.mirrors:
|
if options.mirrors:
|
||||||
logger.deprecated(
|
warnings.warn(
|
||||||
"1.7",
|
"--mirrors has been deprecated and will be removed in the "
|
||||||
"--mirrors has been deprecated and will be removed in "
|
"future. Explicit uses of --index-url and/or --extra-index-url"
|
||||||
" the future. Explicit uses of --index-url and/or "
|
" is suggested.",
|
||||||
"--extra-index-url is suggested."
|
RemovedInPip17Warning,
|
||||||
)
|
)
|
||||||
index_urls += options.mirrors
|
index_urls += options.mirrors
|
||||||
|
|
||||||
if options.download_cache:
|
if options.download_cache:
|
||||||
logger.deprecated(
|
warnings.warn(
|
||||||
"1.8",
|
|
||||||
"--download-cache has been deprecated and will be removed in "
|
"--download-cache has been deprecated and will be removed in "
|
||||||
" the future. Pip now automatically uses and configures its "
|
"the future. Pip now automatically uses and configures its "
|
||||||
"cache."
|
"cache.",
|
||||||
|
RemovedInPip18Warning,
|
||||||
)
|
)
|
||||||
|
|
||||||
with self._build_session(options) as session:
|
with self._build_session(options) as session:
|
||||||
|
@ -202,10 +207,11 @@ class WheelCommand(Command):
|
||||||
|
|
||||||
# fail if no requirements
|
# fail if no requirements
|
||||||
if not requirement_set.has_requirements:
|
if not requirement_set.has_requirements:
|
||||||
opts = {'name': self.name}
|
logger.error(
|
||||||
msg = ('You must give at least one requirement '
|
"You must give at least one requirement to %s "
|
||||||
'to %(name)s (see "pip help %(name)s")' % opts)
|
"(see \"pip help %s\")",
|
||||||
logger.error(msg)
|
self.name,
|
||||||
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -1,15 +1,24 @@
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
import re
|
import re
|
||||||
import fnmatch
|
import fnmatch
|
||||||
|
import logging
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
|
import warnings
|
||||||
import zipfile
|
import zipfile
|
||||||
from pip.util import display_path, backup_dir, rmtree
|
|
||||||
from pip.log import logger
|
from pip.utils import display_path, backup_dir, rmtree
|
||||||
|
from pip.utils.deprecation import RemovedInPip17Warning
|
||||||
|
from pip.utils.logging import indent_log
|
||||||
from pip.exceptions import InstallationError
|
from pip.exceptions import InstallationError
|
||||||
from pip.basecommand import Command
|
from pip.basecommand import Command
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class ZipCommand(Command):
|
class ZipCommand(Command):
|
||||||
"""Zip individual packages."""
|
"""Zip individual packages."""
|
||||||
name = 'zip'
|
name = 'zip'
|
||||||
|
@ -87,21 +96,27 @@ class ZipCommand(Command):
|
||||||
match_any.add(match)
|
match_any.add(match)
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
logger.debug("Skipping path %s because it doesn't match %s"
|
logger.debug(
|
||||||
% (path, ', '.join(self.select_paths)))
|
"Skipping path %s because it doesn't match %s",
|
||||||
|
path,
|
||||||
|
', '.join(self.select_paths),
|
||||||
|
)
|
||||||
for match in self.select_paths:
|
for match in self.select_paths:
|
||||||
if match not in match_any and '*' not in match:
|
if match not in match_any and '*' not in match:
|
||||||
result.append(match)
|
result.append(match)
|
||||||
logger.debug("Adding path %s because it doesn't match "
|
logger.debug(
|
||||||
"anything already on sys.path" % match)
|
"Adding path %s because it doesn't match "
|
||||||
|
"anything already on sys.path",
|
||||||
|
match,
|
||||||
|
)
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def run(self, options, args):
|
def run(self, options, args):
|
||||||
|
|
||||||
logger.deprecated(
|
warnings.warn(
|
||||||
'1.7',
|
"'pip zip' and 'pip unzip` are deprecated, and will be removed in "
|
||||||
"DEPRECATION: 'pip zip' and 'pip unzip` are deprecated, and will "
|
"a future release.",
|
||||||
"be removed in a future release."
|
RemovedInPip17Warning,
|
||||||
)
|
)
|
||||||
|
|
||||||
self.select_paths = options.paths
|
self.select_paths = options.paths
|
||||||
|
@ -144,20 +159,22 @@ class ZipCommand(Command):
|
||||||
% (module_name, filename, zip_filename))
|
% (module_name, filename, zip_filename))
|
||||||
package_path = os.path.dirname(zip_filename)
|
package_path = os.path.dirname(zip_filename)
|
||||||
if package_path not in self.paths():
|
if package_path not in self.paths():
|
||||||
logger.warn(
|
logger.warning(
|
||||||
'Unpacking %s into %s, but %s is not on sys.path'
|
'Unpacking %s into %s, but %s is not on sys.path',
|
||||||
% (display_path(zip_filename), display_path(package_path),
|
display_path(zip_filename),
|
||||||
display_path(package_path)))
|
display_path(package_path),
|
||||||
logger.notify(
|
display_path(package_path),
|
||||||
'Unzipping %s (in %s)' % (module_name, display_path(zip_filename))
|
)
|
||||||
|
logger.info(
|
||||||
|
'Unzipping %s (in %s)', module_name, display_path(zip_filename),
|
||||||
)
|
)
|
||||||
if self.simulate:
|
if self.simulate:
|
||||||
logger.notify(
|
logger.info(
|
||||||
'Skipping remaining operations because of --simulate'
|
'Skipping remaining operations because of --simulate'
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
logger.indent += 2
|
|
||||||
try:
|
with indent_log():
|
||||||
# FIXME: this should be undoable:
|
# FIXME: this should be undoable:
|
||||||
zip = zipfile.ZipFile(zip_filename)
|
zip = zipfile.ZipFile(zip_filename)
|
||||||
to_save = []
|
to_save = []
|
||||||
|
@ -179,44 +196,44 @@ class ZipCommand(Command):
|
||||||
to_save.append((name, zip.read(name)))
|
to_save.append((name, zip.read(name)))
|
||||||
zip.close()
|
zip.close()
|
||||||
if not to_save:
|
if not to_save:
|
||||||
logger.info(
|
logger.debug(
|
||||||
'Removing now-empty zip file %s' %
|
'Removing now-empty zip file %s',
|
||||||
display_path(zip_filename)
|
display_path(zip_filename)
|
||||||
)
|
)
|
||||||
os.unlink(zip_filename)
|
os.unlink(zip_filename)
|
||||||
self.remove_filename_from_pth(zip_filename)
|
self.remove_filename_from_pth(zip_filename)
|
||||||
else:
|
else:
|
||||||
logger.info(
|
logger.debug(
|
||||||
'Removing entries in %s/ from zip file %s' %
|
'Removing entries in %s/ from zip file %s',
|
||||||
(module_name, display_path(zip_filename))
|
module_name,
|
||||||
|
display_path(zip_filename),
|
||||||
)
|
)
|
||||||
zip = zipfile.ZipFile(zip_filename, 'w')
|
zip = zipfile.ZipFile(zip_filename, 'w')
|
||||||
for name, content in to_save:
|
for name, content in to_save:
|
||||||
zip.writestr(name, content)
|
zip.writestr(name, content)
|
||||||
zip.close()
|
zip.close()
|
||||||
finally:
|
|
||||||
logger.indent -= 2
|
|
||||||
|
|
||||||
def zip_package(self, module_name, filename, no_pyc):
|
def zip_package(self, module_name, filename, no_pyc):
|
||||||
|
logger.info('Zip %s (in %s)', module_name, display_path(filename))
|
||||||
|
|
||||||
orig_filename = filename
|
orig_filename = filename
|
||||||
logger.notify('Zip %s (in %s)' % (module_name, display_path(filename)))
|
|
||||||
logger.indent += 2
|
|
||||||
if filename.endswith('.egg'):
|
if filename.endswith('.egg'):
|
||||||
dest_filename = filename
|
dest_filename = filename
|
||||||
else:
|
else:
|
||||||
dest_filename = filename + '.zip'
|
dest_filename = filename + '.zip'
|
||||||
try:
|
|
||||||
|
with indent_log():
|
||||||
# FIXME: I think this needs to be undoable:
|
# FIXME: I think this needs to be undoable:
|
||||||
if filename == dest_filename:
|
if filename == dest_filename:
|
||||||
filename = backup_dir(orig_filename)
|
filename = backup_dir(orig_filename)
|
||||||
logger.notify(
|
logger.info(
|
||||||
'Moving %s aside to %s' % (orig_filename, filename)
|
'Moving %s aside to %s', orig_filename, filename,
|
||||||
)
|
)
|
||||||
if not self.simulate:
|
if not self.simulate:
|
||||||
shutil.move(orig_filename, filename)
|
shutil.move(orig_filename, filename)
|
||||||
try:
|
try:
|
||||||
logger.info(
|
logger.debug(
|
||||||
'Creating zip file in %s' % display_path(dest_filename)
|
'Creating zip file in %s', display_path(dest_filename),
|
||||||
)
|
)
|
||||||
if not self.simulate:
|
if not self.simulate:
|
||||||
zip = zipfile.ZipFile(dest_filename, 'w')
|
zip = zipfile.ZipFile(dest_filename, 'w')
|
||||||
|
@ -241,8 +258,8 @@ class ZipCommand(Command):
|
||||||
else:
|
else:
|
||||||
zip.write(full, dest)
|
zip.write(full, dest)
|
||||||
zip.close()
|
zip.close()
|
||||||
logger.info(
|
logger.debug(
|
||||||
'Removing old directory %s' % display_path(filename)
|
'Removing old directory %s', display_path(filename),
|
||||||
)
|
)
|
||||||
if not self.simulate:
|
if not self.simulate:
|
||||||
rmtree(filename)
|
rmtree(filename)
|
||||||
|
@ -251,8 +268,6 @@ class ZipCommand(Command):
|
||||||
raise
|
raise
|
||||||
# FIXME: should also be undone:
|
# FIXME: should also be undone:
|
||||||
self.add_filename_to_pth(dest_filename)
|
self.add_filename_to_pth(dest_filename)
|
||||||
finally:
|
|
||||||
logger.indent -= 2
|
|
||||||
|
|
||||||
def remove_filename_from_pth(self, filename):
|
def remove_filename_from_pth(self, filename):
|
||||||
for pth in self.pth_files():
|
for pth in self.pth_files():
|
||||||
|
@ -262,11 +277,14 @@ class ZipCommand(Command):
|
||||||
new_lines = [
|
new_lines = [
|
||||||
l for l in lines if l.strip() != filename]
|
l for l in lines if l.strip() != filename]
|
||||||
if lines != new_lines:
|
if lines != new_lines:
|
||||||
logger.info('Removing reference to %s from .pth file %s'
|
logger.debug(
|
||||||
% (display_path(filename), display_path(pth)))
|
'Removing reference to %s from .pth file %s',
|
||||||
|
display_path(filename),
|
||||||
|
display_path(pth),
|
||||||
|
)
|
||||||
if not [line for line in new_lines if line]:
|
if not [line for line in new_lines if line]:
|
||||||
logger.info(
|
logger.debug(
|
||||||
'%s file would be empty: deleting' % display_path(pth)
|
'%s file would be empty: deleting', display_path(pth)
|
||||||
)
|
)
|
||||||
if not self.simulate:
|
if not self.simulate:
|
||||||
os.unlink(pth)
|
os.unlink(pth)
|
||||||
|
@ -276,18 +294,18 @@ class ZipCommand(Command):
|
||||||
f.writelines(new_lines)
|
f.writelines(new_lines)
|
||||||
f.close()
|
f.close()
|
||||||
return
|
return
|
||||||
logger.warn(
|
logger.warning(
|
||||||
'Cannot find a reference to %s in any .pth file' %
|
'Cannot find a reference to %s in any .pth file',
|
||||||
display_path(filename)
|
display_path(filename),
|
||||||
)
|
)
|
||||||
|
|
||||||
def add_filename_to_pth(self, filename):
|
def add_filename_to_pth(self, filename):
|
||||||
path = os.path.dirname(filename)
|
path = os.path.dirname(filename)
|
||||||
dest = filename + '.pth'
|
dest = filename + '.pth'
|
||||||
if path not in self.paths():
|
if path not in self.paths():
|
||||||
logger.warn(
|
logger.warning(
|
||||||
'Adding .pth file %s, but it is not on sys.path' %
|
'Adding .pth file %s, but it is not on sys.path',
|
||||||
display_path(dest)
|
display_path(dest),
|
||||||
)
|
)
|
||||||
if not self.simulate:
|
if not self.simulate:
|
||||||
if os.path.exists(dest):
|
if os.path.exists(dest):
|
||||||
|
@ -340,24 +358,25 @@ class ZipCommand(Command):
|
||||||
basename = os.path.basename(path.rstrip(os.path.sep))
|
basename = os.path.basename(path.rstrip(os.path.sep))
|
||||||
if os.path.isfile(path) and zipfile.is_zipfile(path):
|
if os.path.isfile(path) and zipfile.is_zipfile(path):
|
||||||
if os.path.dirname(path) not in self.paths():
|
if os.path.dirname(path) not in self.paths():
|
||||||
logger.notify('Zipped egg: %s' % display_path(path))
|
logger.info('Zipped egg: %s', display_path(path))
|
||||||
continue
|
continue
|
||||||
if (basename != 'site-packages'
|
if (basename != 'site-packages'
|
||||||
and basename != 'dist-packages'
|
and basename != 'dist-packages'
|
||||||
and not path.replace('\\', '/').endswith('lib/python')):
|
and not path.replace('\\', '/').endswith('lib/python')):
|
||||||
continue
|
continue
|
||||||
logger.notify('In %s:' % display_path(path))
|
logger.info('In %s:', display_path(path))
|
||||||
logger.indent += 2
|
|
||||||
zipped = []
|
with indent_log():
|
||||||
unzipped = []
|
zipped = []
|
||||||
try:
|
unzipped = []
|
||||||
|
|
||||||
for filename in sorted(os.listdir(path)):
|
for filename in sorted(os.listdir(path)):
|
||||||
ext = os.path.splitext(filename)[1].lower()
|
ext = os.path.splitext(filename)[1].lower()
|
||||||
if ext in ('.pth', '.egg-info', '.egg-link'):
|
if ext in ('.pth', '.egg-info', '.egg-link'):
|
||||||
continue
|
continue
|
||||||
if ext == '.py':
|
if ext == '.py':
|
||||||
logger.info(
|
logger.debug(
|
||||||
'Not displaying %s: not a package' %
|
'Not displaying %s: not a package',
|
||||||
display_path(filename)
|
display_path(filename)
|
||||||
)
|
)
|
||||||
continue
|
continue
|
||||||
|
@ -367,33 +386,25 @@ class ZipCommand(Command):
|
||||||
elif zipfile.is_zipfile(full):
|
elif zipfile.is_zipfile(full):
|
||||||
zipped.append(filename)
|
zipped.append(filename)
|
||||||
else:
|
else:
|
||||||
logger.info(
|
logger.debug(
|
||||||
'Unknown file: %s' % display_path(filename)
|
'Unknown file: %s', display_path(filename),
|
||||||
)
|
)
|
||||||
if zipped:
|
if zipped:
|
||||||
logger.notify('Zipped packages:')
|
logger.info('Zipped packages:')
|
||||||
logger.indent += 2
|
with indent_log():
|
||||||
try:
|
|
||||||
for filename in zipped:
|
for filename in zipped:
|
||||||
logger.notify(filename)
|
logger.info(filename)
|
||||||
finally:
|
|
||||||
logger.indent -= 2
|
|
||||||
else:
|
else:
|
||||||
logger.notify('No zipped packages.')
|
logger.info('No zipped packages.')
|
||||||
if unzipped:
|
if unzipped:
|
||||||
if options.sort_files:
|
if options.sort_files:
|
||||||
unzipped.sort(key=lambda x: -x[1])
|
unzipped.sort(key=lambda x: -x[1])
|
||||||
logger.notify('Unzipped packages:')
|
logger.info('Unzipped packages:')
|
||||||
logger.indent += 2
|
with indent_log():
|
||||||
try:
|
|
||||||
for filename, count in unzipped:
|
for filename, count in unzipped:
|
||||||
logger.notify('%s (%i files)' % (filename, count))
|
logger.info('%s (%i files)', filename, count)
|
||||||
finally:
|
|
||||||
logger.indent -= 2
|
|
||||||
else:
|
else:
|
||||||
logger.notify('No unzipped packages.')
|
logger.info('No unzipped packages.')
|
||||||
finally:
|
|
||||||
logger.indent -= 2
|
|
||||||
|
|
||||||
def count_package(self, path):
|
def count_package(self, path):
|
||||||
total = 0
|
total = 0
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
"""Stuff that differs in different Python versions and platform
|
"""Stuff that differs in different Python versions and platform
|
||||||
distributions."""
|
distributions."""
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
# flake8: noqa
|
# flake8: noqa
|
||||||
|
|
||||||
|
@ -30,6 +31,12 @@ except NameError:
|
||||||
console_encoding = sys.__stdout__.encoding
|
console_encoding = sys.__stdout__.encoding
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
from logging.config import dictConfig as logging_dictConfig
|
||||||
|
except ImportError:
|
||||||
|
from pip.compat.dictconfig import dictConfig as logging_dictConfig
|
||||||
|
|
||||||
|
|
||||||
if sys.version_info >= (3,):
|
if sys.version_info >= (3,):
|
||||||
from io import StringIO
|
from io import StringIO
|
||||||
from urllib.error import URLError, HTTPError
|
from urllib.error import URLError, HTTPError
|
||||||
|
|
565
pip/compat/dictconfig.py
Normal file
565
pip/compat/dictconfig.py
Normal file
|
@ -0,0 +1,565 @@
|
||||||
|
# This is a copy of the Python logging.config.dictconfig module,
|
||||||
|
# reproduced with permission. It is provided here for backwards
|
||||||
|
# compatibility for Python versions prior to 2.7.
|
||||||
|
#
|
||||||
|
# Copyright 2009-2010 by Vinay Sajip. All Rights Reserved.
|
||||||
|
#
|
||||||
|
# Permission to use, copy, modify, and distribute this software and its
|
||||||
|
# documentation for any purpose and without fee is hereby granted,
|
||||||
|
# provided that the above copyright notice appear in all copies and that
|
||||||
|
# both that copyright notice and this permission notice appear in
|
||||||
|
# supporting documentation, and that the name of Vinay Sajip
|
||||||
|
# not be used in advertising or publicity pertaining to distribution
|
||||||
|
# of the software without specific, written prior permission.
|
||||||
|
# VINAY SAJIP DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING
|
||||||
|
# ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
|
||||||
|
# VINAY SAJIP BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR
|
||||||
|
# ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER
|
||||||
|
# IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||||
|
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import logging.handlers
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
import types
|
||||||
|
|
||||||
|
from pip._vendor import six
|
||||||
|
|
||||||
|
# flake8: noqa
|
||||||
|
|
||||||
|
IDENTIFIER = re.compile('^[a-z_][a-z0-9_]*$', re.I)
|
||||||
|
|
||||||
|
|
||||||
|
def valid_ident(s):
|
||||||
|
m = IDENTIFIER.match(s)
|
||||||
|
if not m:
|
||||||
|
raise ValueError('Not a valid Python identifier: %r' % s)
|
||||||
|
return True
|
||||||
|
|
||||||
|
#
|
||||||
|
# This function is defined in logging only in recent versions of Python
|
||||||
|
#
|
||||||
|
try:
|
||||||
|
from logging import _checkLevel
|
||||||
|
except ImportError:
|
||||||
|
def _checkLevel(level):
|
||||||
|
if isinstance(level, int):
|
||||||
|
rv = level
|
||||||
|
elif str(level) == level:
|
||||||
|
if level not in logging._levelNames:
|
||||||
|
raise ValueError('Unknown level: %r' % level)
|
||||||
|
rv = logging._levelNames[level]
|
||||||
|
else:
|
||||||
|
raise TypeError('Level not an integer or a '
|
||||||
|
'valid string: %r' % level)
|
||||||
|
return rv
|
||||||
|
|
||||||
|
# The ConvertingXXX classes are wrappers around standard Python containers,
|
||||||
|
# and they serve to convert any suitable values in the container. The
|
||||||
|
# conversion converts base dicts, lists and tuples to their wrapped
|
||||||
|
# equivalents, whereas strings which match a conversion format are converted
|
||||||
|
# appropriately.
|
||||||
|
#
|
||||||
|
# Each wrapper should have a configurator attribute holding the actual
|
||||||
|
# configurator to use for conversion.
|
||||||
|
|
||||||
|
|
||||||
|
class ConvertingDict(dict):
|
||||||
|
"""A converting dictionary wrapper."""
|
||||||
|
|
||||||
|
def __getitem__(self, key):
|
||||||
|
value = dict.__getitem__(self, key)
|
||||||
|
result = self.configurator.convert(value)
|
||||||
|
# If the converted value is different, save for next time
|
||||||
|
if value is not result:
|
||||||
|
self[key] = result
|
||||||
|
if type(result) in (ConvertingDict, ConvertingList,
|
||||||
|
ConvertingTuple):
|
||||||
|
result.parent = self
|
||||||
|
result.key = key
|
||||||
|
return result
|
||||||
|
|
||||||
|
def get(self, key, default=None):
|
||||||
|
value = dict.get(self, key, default)
|
||||||
|
result = self.configurator.convert(value)
|
||||||
|
# If the converted value is different, save for next time
|
||||||
|
if value is not result:
|
||||||
|
self[key] = result
|
||||||
|
if type(result) in (ConvertingDict, ConvertingList,
|
||||||
|
ConvertingTuple):
|
||||||
|
result.parent = self
|
||||||
|
result.key = key
|
||||||
|
return result
|
||||||
|
|
||||||
|
def pop(self, key, default=None):
|
||||||
|
value = dict.pop(self, key, default)
|
||||||
|
result = self.configurator.convert(value)
|
||||||
|
if value is not result:
|
||||||
|
if type(result) in (ConvertingDict, ConvertingList,
|
||||||
|
ConvertingTuple):
|
||||||
|
result.parent = self
|
||||||
|
result.key = key
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
class ConvertingList(list):
|
||||||
|
"""A converting list wrapper."""
|
||||||
|
def __getitem__(self, key):
|
||||||
|
value = list.__getitem__(self, key)
|
||||||
|
result = self.configurator.convert(value)
|
||||||
|
# If the converted value is different, save for next time
|
||||||
|
if value is not result:
|
||||||
|
self[key] = result
|
||||||
|
if type(result) in (ConvertingDict, ConvertingList,
|
||||||
|
ConvertingTuple):
|
||||||
|
result.parent = self
|
||||||
|
result.key = key
|
||||||
|
return result
|
||||||
|
|
||||||
|
def pop(self, idx=-1):
|
||||||
|
value = list.pop(self, idx)
|
||||||
|
result = self.configurator.convert(value)
|
||||||
|
if value is not result:
|
||||||
|
if type(result) in (ConvertingDict, ConvertingList,
|
||||||
|
ConvertingTuple):
|
||||||
|
result.parent = self
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
class ConvertingTuple(tuple):
|
||||||
|
"""A converting tuple wrapper."""
|
||||||
|
def __getitem__(self, key):
|
||||||
|
value = tuple.__getitem__(self, key)
|
||||||
|
result = self.configurator.convert(value)
|
||||||
|
if value is not result:
|
||||||
|
if type(result) in (ConvertingDict, ConvertingList,
|
||||||
|
ConvertingTuple):
|
||||||
|
result.parent = self
|
||||||
|
result.key = key
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
class BaseConfigurator(object):
|
||||||
|
"""
|
||||||
|
The configurator base class which defines some useful defaults.
|
||||||
|
"""
|
||||||
|
|
||||||
|
CONVERT_PATTERN = re.compile(r'^(?P<prefix>[a-z]+)://(?P<suffix>.*)$')
|
||||||
|
|
||||||
|
WORD_PATTERN = re.compile(r'^\s*(\w+)\s*')
|
||||||
|
DOT_PATTERN = re.compile(r'^\.\s*(\w+)\s*')
|
||||||
|
INDEX_PATTERN = re.compile(r'^\[\s*(\w+)\s*\]\s*')
|
||||||
|
DIGIT_PATTERN = re.compile(r'^\d+$')
|
||||||
|
|
||||||
|
value_converters = {
|
||||||
|
'ext' : 'ext_convert',
|
||||||
|
'cfg' : 'cfg_convert',
|
||||||
|
}
|
||||||
|
|
||||||
|
# We might want to use a different one, e.g. importlib
|
||||||
|
importer = __import__
|
||||||
|
|
||||||
|
def __init__(self, config):
|
||||||
|
self.config = ConvertingDict(config)
|
||||||
|
self.config.configurator = self
|
||||||
|
|
||||||
|
def resolve(self, s):
|
||||||
|
"""
|
||||||
|
Resolve strings to objects using standard import and attribute
|
||||||
|
syntax.
|
||||||
|
"""
|
||||||
|
name = s.split('.')
|
||||||
|
used = name.pop(0)
|
||||||
|
try:
|
||||||
|
found = self.importer(used)
|
||||||
|
for frag in name:
|
||||||
|
used += '.' + frag
|
||||||
|
try:
|
||||||
|
found = getattr(found, frag)
|
||||||
|
except AttributeError:
|
||||||
|
self.importer(used)
|
||||||
|
found = getattr(found, frag)
|
||||||
|
return found
|
||||||
|
except ImportError:
|
||||||
|
e, tb = sys.exc_info()[1:]
|
||||||
|
v = ValueError('Cannot resolve %r: %s' % (s, e))
|
||||||
|
v.__cause__, v.__traceback__ = e, tb
|
||||||
|
raise v
|
||||||
|
|
||||||
|
def ext_convert(self, value):
|
||||||
|
"""Default converter for the ext:// protocol."""
|
||||||
|
return self.resolve(value)
|
||||||
|
|
||||||
|
def cfg_convert(self, value):
|
||||||
|
"""Default converter for the cfg:// protocol."""
|
||||||
|
rest = value
|
||||||
|
m = self.WORD_PATTERN.match(rest)
|
||||||
|
if m is None:
|
||||||
|
raise ValueError("Unable to convert %r" % value)
|
||||||
|
else:
|
||||||
|
rest = rest[m.end():]
|
||||||
|
d = self.config[m.groups()[0]]
|
||||||
|
# print d, rest
|
||||||
|
while rest:
|
||||||
|
m = self.DOT_PATTERN.match(rest)
|
||||||
|
if m:
|
||||||
|
d = d[m.groups()[0]]
|
||||||
|
else:
|
||||||
|
m = self.INDEX_PATTERN.match(rest)
|
||||||
|
if m:
|
||||||
|
idx = m.groups()[0]
|
||||||
|
if not self.DIGIT_PATTERN.match(idx):
|
||||||
|
d = d[idx]
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
n = int(idx) # try as number first (most likely)
|
||||||
|
d = d[n]
|
||||||
|
except TypeError:
|
||||||
|
d = d[idx]
|
||||||
|
if m:
|
||||||
|
rest = rest[m.end():]
|
||||||
|
else:
|
||||||
|
raise ValueError('Unable to convert '
|
||||||
|
'%r at %r' % (value, rest))
|
||||||
|
# rest should be empty
|
||||||
|
return d
|
||||||
|
|
||||||
|
def convert(self, value):
|
||||||
|
"""
|
||||||
|
Convert values to an appropriate type. dicts, lists and tuples are
|
||||||
|
replaced by their converting alternatives. Strings are checked to
|
||||||
|
see if they have a conversion format and are converted if they do.
|
||||||
|
"""
|
||||||
|
if not isinstance(value, ConvertingDict) and isinstance(value, dict):
|
||||||
|
value = ConvertingDict(value)
|
||||||
|
value.configurator = self
|
||||||
|
elif not isinstance(value, ConvertingList) and isinstance(value, list):
|
||||||
|
value = ConvertingList(value)
|
||||||
|
value.configurator = self
|
||||||
|
elif not isinstance(value, ConvertingTuple) and\
|
||||||
|
isinstance(value, tuple):
|
||||||
|
value = ConvertingTuple(value)
|
||||||
|
value.configurator = self
|
||||||
|
elif isinstance(value, six.string_types): # str for py3k
|
||||||
|
m = self.CONVERT_PATTERN.match(value)
|
||||||
|
if m:
|
||||||
|
d = m.groupdict()
|
||||||
|
prefix = d['prefix']
|
||||||
|
converter = self.value_converters.get(prefix, None)
|
||||||
|
if converter:
|
||||||
|
suffix = d['suffix']
|
||||||
|
converter = getattr(self, converter)
|
||||||
|
value = converter(suffix)
|
||||||
|
return value
|
||||||
|
|
||||||
|
def configure_custom(self, config):
|
||||||
|
"""Configure an object with a user-supplied factory."""
|
||||||
|
c = config.pop('()')
|
||||||
|
if not hasattr(c, '__call__') and hasattr(types, 'ClassType') and type(c) != types.ClassType:
|
||||||
|
c = self.resolve(c)
|
||||||
|
props = config.pop('.', None)
|
||||||
|
# Check for valid identifiers
|
||||||
|
kwargs = dict((k, config[k]) for k in config if valid_ident(k))
|
||||||
|
result = c(**kwargs)
|
||||||
|
if props:
|
||||||
|
for name, value in props.items():
|
||||||
|
setattr(result, name, value)
|
||||||
|
return result
|
||||||
|
|
||||||
|
def as_tuple(self, value):
|
||||||
|
"""Utility function which converts lists to tuples."""
|
||||||
|
if isinstance(value, list):
|
||||||
|
value = tuple(value)
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
class DictConfigurator(BaseConfigurator):
|
||||||
|
"""
|
||||||
|
Configure logging using a dictionary-like object to describe the
|
||||||
|
configuration.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def configure(self):
|
||||||
|
"""Do the configuration."""
|
||||||
|
|
||||||
|
config = self.config
|
||||||
|
if 'version' not in config:
|
||||||
|
raise ValueError("dictionary doesn't specify a version")
|
||||||
|
if config['version'] != 1:
|
||||||
|
raise ValueError("Unsupported version: %s" % config['version'])
|
||||||
|
incremental = config.pop('incremental', False)
|
||||||
|
EMPTY_DICT = {}
|
||||||
|
logging._acquireLock()
|
||||||
|
try:
|
||||||
|
if incremental:
|
||||||
|
handlers = config.get('handlers', EMPTY_DICT)
|
||||||
|
# incremental handler config only if handler name
|
||||||
|
# ties in to logging._handlers (Python 2.7)
|
||||||
|
if sys.version_info[:2] == (2, 7):
|
||||||
|
for name in handlers:
|
||||||
|
if name not in logging._handlers:
|
||||||
|
raise ValueError('No handler found with '
|
||||||
|
'name %r' % name)
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
handler = logging._handlers[name]
|
||||||
|
handler_config = handlers[name]
|
||||||
|
level = handler_config.get('level', None)
|
||||||
|
if level:
|
||||||
|
handler.setLevel(_checkLevel(level))
|
||||||
|
except StandardError as e:
|
||||||
|
raise ValueError('Unable to configure handler '
|
||||||
|
'%r: %s' % (name, e))
|
||||||
|
loggers = config.get('loggers', EMPTY_DICT)
|
||||||
|
for name in loggers:
|
||||||
|
try:
|
||||||
|
self.configure_logger(name, loggers[name], True)
|
||||||
|
except StandardError as e:
|
||||||
|
raise ValueError('Unable to configure logger '
|
||||||
|
'%r: %s' % (name, e))
|
||||||
|
root = config.get('root', None)
|
||||||
|
if root:
|
||||||
|
try:
|
||||||
|
self.configure_root(root, True)
|
||||||
|
except StandardError as e:
|
||||||
|
raise ValueError('Unable to configure root '
|
||||||
|
'logger: %s' % e)
|
||||||
|
else:
|
||||||
|
disable_existing = config.pop('disable_existing_loggers', True)
|
||||||
|
|
||||||
|
logging._handlers.clear()
|
||||||
|
del logging._handlerList[:]
|
||||||
|
|
||||||
|
# Do formatters first - they don't refer to anything else
|
||||||
|
formatters = config.get('formatters', EMPTY_DICT)
|
||||||
|
for name in formatters:
|
||||||
|
try:
|
||||||
|
formatters[name] = self.configure_formatter(
|
||||||
|
formatters[name])
|
||||||
|
except StandardError as e:
|
||||||
|
raise ValueError('Unable to configure '
|
||||||
|
'formatter %r: %s' % (name, e))
|
||||||
|
# Next, do filters - they don't refer to anything else, either
|
||||||
|
filters = config.get('filters', EMPTY_DICT)
|
||||||
|
for name in filters:
|
||||||
|
try:
|
||||||
|
filters[name] = self.configure_filter(filters[name])
|
||||||
|
except StandardError as e:
|
||||||
|
raise ValueError('Unable to configure '
|
||||||
|
'filter %r: %s' % (name, e))
|
||||||
|
|
||||||
|
# Next, do handlers - they refer to formatters and filters
|
||||||
|
# As handlers can refer to other handlers, sort the keys
|
||||||
|
# to allow a deterministic order of configuration
|
||||||
|
handlers = config.get('handlers', EMPTY_DICT)
|
||||||
|
for name in sorted(handlers):
|
||||||
|
try:
|
||||||
|
handler = self.configure_handler(handlers[name])
|
||||||
|
handler.name = name
|
||||||
|
handlers[name] = handler
|
||||||
|
except StandardError as e:
|
||||||
|
raise ValueError('Unable to configure handler '
|
||||||
|
'%r: %s' % (name, e))
|
||||||
|
# Next, do loggers - they refer to handlers and filters
|
||||||
|
|
||||||
|
# we don't want to lose the existing loggers,
|
||||||
|
# since other threads may have pointers to them.
|
||||||
|
# existing is set to contain all existing loggers,
|
||||||
|
# and as we go through the new configuration we
|
||||||
|
# remove any which are configured. At the end,
|
||||||
|
# what's left in existing is the set of loggers
|
||||||
|
# which were in the previous configuration but
|
||||||
|
# which are not in the new configuration.
|
||||||
|
root = logging.root
|
||||||
|
existing = list(root.manager.loggerDict)
|
||||||
|
# The list needs to be sorted so that we can
|
||||||
|
# avoid disabling child loggers of explicitly
|
||||||
|
# named loggers. With a sorted list it is easier
|
||||||
|
# to find the child loggers.
|
||||||
|
existing.sort()
|
||||||
|
# We'll keep the list of existing loggers
|
||||||
|
# which are children of named loggers here...
|
||||||
|
child_loggers = []
|
||||||
|
# now set up the new ones...
|
||||||
|
loggers = config.get('loggers', EMPTY_DICT)
|
||||||
|
for name in loggers:
|
||||||
|
if name in existing:
|
||||||
|
i = existing.index(name)
|
||||||
|
prefixed = name + "."
|
||||||
|
pflen = len(prefixed)
|
||||||
|
num_existing = len(existing)
|
||||||
|
i = i + 1 # look at the entry after name
|
||||||
|
while (i < num_existing) and\
|
||||||
|
(existing[i][:pflen] == prefixed):
|
||||||
|
child_loggers.append(existing[i])
|
||||||
|
i = i + 1
|
||||||
|
existing.remove(name)
|
||||||
|
try:
|
||||||
|
self.configure_logger(name, loggers[name])
|
||||||
|
except StandardError as e:
|
||||||
|
raise ValueError('Unable to configure logger '
|
||||||
|
'%r: %s' % (name, e))
|
||||||
|
|
||||||
|
# Disable any old loggers. There's no point deleting
|
||||||
|
# them as other threads may continue to hold references
|
||||||
|
# and by disabling them, you stop them doing any logging.
|
||||||
|
# However, don't disable children of named loggers, as that's
|
||||||
|
# probably not what was intended by the user.
|
||||||
|
for log in existing:
|
||||||
|
logger = root.manager.loggerDict[log]
|
||||||
|
if log in child_loggers:
|
||||||
|
logger.level = logging.NOTSET
|
||||||
|
logger.handlers = []
|
||||||
|
logger.propagate = True
|
||||||
|
elif disable_existing:
|
||||||
|
logger.disabled = True
|
||||||
|
|
||||||
|
# And finally, do the root logger
|
||||||
|
root = config.get('root', None)
|
||||||
|
if root:
|
||||||
|
try:
|
||||||
|
self.configure_root(root)
|
||||||
|
except StandardError as e:
|
||||||
|
raise ValueError('Unable to configure root '
|
||||||
|
'logger: %s' % e)
|
||||||
|
finally:
|
||||||
|
logging._releaseLock()
|
||||||
|
|
||||||
|
def configure_formatter(self, config):
|
||||||
|
"""Configure a formatter from a dictionary."""
|
||||||
|
if '()' in config:
|
||||||
|
factory = config['()'] # for use in exception handler
|
||||||
|
try:
|
||||||
|
result = self.configure_custom(config)
|
||||||
|
except TypeError as te:
|
||||||
|
if "'format'" not in str(te):
|
||||||
|
raise
|
||||||
|
# Name of parameter changed from fmt to format.
|
||||||
|
# Retry with old name.
|
||||||
|
# This is so that code can be used with older Python versions
|
||||||
|
#(e.g. by Django)
|
||||||
|
config['fmt'] = config.pop('format')
|
||||||
|
config['()'] = factory
|
||||||
|
result = self.configure_custom(config)
|
||||||
|
else:
|
||||||
|
fmt = config.get('format', None)
|
||||||
|
dfmt = config.get('datefmt', None)
|
||||||
|
result = logging.Formatter(fmt, dfmt)
|
||||||
|
return result
|
||||||
|
|
||||||
|
def configure_filter(self, config):
|
||||||
|
"""Configure a filter from a dictionary."""
|
||||||
|
if '()' in config:
|
||||||
|
result = self.configure_custom(config)
|
||||||
|
else:
|
||||||
|
name = config.get('name', '')
|
||||||
|
result = logging.Filter(name)
|
||||||
|
return result
|
||||||
|
|
||||||
|
def add_filters(self, filterer, filters):
|
||||||
|
"""Add filters to a filterer from a list of names."""
|
||||||
|
for f in filters:
|
||||||
|
try:
|
||||||
|
filterer.addFilter(self.config['filters'][f])
|
||||||
|
except StandardError as e:
|
||||||
|
raise ValueError('Unable to add filter %r: %s' % (f, e))
|
||||||
|
|
||||||
|
def configure_handler(self, config):
|
||||||
|
"""Configure a handler from a dictionary."""
|
||||||
|
formatter = config.pop('formatter', None)
|
||||||
|
if formatter:
|
||||||
|
try:
|
||||||
|
formatter = self.config['formatters'][formatter]
|
||||||
|
except StandardError as e:
|
||||||
|
raise ValueError('Unable to set formatter '
|
||||||
|
'%r: %s' % (formatter, e))
|
||||||
|
level = config.pop('level', None)
|
||||||
|
filters = config.pop('filters', None)
|
||||||
|
if '()' in config:
|
||||||
|
c = config.pop('()')
|
||||||
|
if not hasattr(c, '__call__') and hasattr(types, 'ClassType') and type(c) != types.ClassType:
|
||||||
|
c = self.resolve(c)
|
||||||
|
factory = c
|
||||||
|
else:
|
||||||
|
klass = self.resolve(config.pop('class'))
|
||||||
|
# Special case for handler which refers to another handler
|
||||||
|
if issubclass(klass, logging.handlers.MemoryHandler) and\
|
||||||
|
'target' in config:
|
||||||
|
try:
|
||||||
|
config['target'] = self.config['handlers'][config['target']]
|
||||||
|
except StandardError as e:
|
||||||
|
raise ValueError('Unable to set target handler '
|
||||||
|
'%r: %s' % (config['target'], e))
|
||||||
|
elif issubclass(klass, logging.handlers.SMTPHandler) and\
|
||||||
|
'mailhost' in config:
|
||||||
|
config['mailhost'] = self.as_tuple(config['mailhost'])
|
||||||
|
elif issubclass(klass, logging.handlers.SysLogHandler) and\
|
||||||
|
'address' in config:
|
||||||
|
config['address'] = self.as_tuple(config['address'])
|
||||||
|
factory = klass
|
||||||
|
kwargs = dict((k, config[k]) for k in config if valid_ident(k))
|
||||||
|
try:
|
||||||
|
result = factory(**kwargs)
|
||||||
|
except TypeError as te:
|
||||||
|
if "'stream'" not in str(te):
|
||||||
|
raise
|
||||||
|
# The argument name changed from strm to stream
|
||||||
|
# Retry with old name.
|
||||||
|
# This is so that code can be used with older Python versions
|
||||||
|
#(e.g. by Django)
|
||||||
|
kwargs['strm'] = kwargs.pop('stream')
|
||||||
|
result = factory(**kwargs)
|
||||||
|
if formatter:
|
||||||
|
result.setFormatter(formatter)
|
||||||
|
if level is not None:
|
||||||
|
result.setLevel(_checkLevel(level))
|
||||||
|
if filters:
|
||||||
|
self.add_filters(result, filters)
|
||||||
|
return result
|
||||||
|
|
||||||
|
def add_handlers(self, logger, handlers):
|
||||||
|
"""Add handlers to a logger from a list of names."""
|
||||||
|
for h in handlers:
|
||||||
|
try:
|
||||||
|
logger.addHandler(self.config['handlers'][h])
|
||||||
|
except StandardError as e:
|
||||||
|
raise ValueError('Unable to add handler %r: %s' % (h, e))
|
||||||
|
|
||||||
|
def common_logger_config(self, logger, config, incremental=False):
|
||||||
|
"""
|
||||||
|
Perform configuration which is common to root and non-root loggers.
|
||||||
|
"""
|
||||||
|
level = config.get('level', None)
|
||||||
|
if level is not None:
|
||||||
|
logger.setLevel(_checkLevel(level))
|
||||||
|
if not incremental:
|
||||||
|
# Remove any existing handlers
|
||||||
|
for h in logger.handlers[:]:
|
||||||
|
logger.removeHandler(h)
|
||||||
|
handlers = config.get('handlers', None)
|
||||||
|
if handlers:
|
||||||
|
self.add_handlers(logger, handlers)
|
||||||
|
filters = config.get('filters', None)
|
||||||
|
if filters:
|
||||||
|
self.add_filters(logger, filters)
|
||||||
|
|
||||||
|
def configure_logger(self, name, config, incremental=False):
|
||||||
|
"""Configure a non-root logger from a dictionary."""
|
||||||
|
logger = logging.getLogger(name)
|
||||||
|
self.common_logger_config(logger, config, incremental)
|
||||||
|
propagate = config.get('propagate', None)
|
||||||
|
if propagate is not None:
|
||||||
|
logger.propagate = propagate
|
||||||
|
|
||||||
|
def configure_root(self, config, incremental=False):
|
||||||
|
"""Configure a root logger from a dictionary."""
|
||||||
|
root = logging.getLogger()
|
||||||
|
self.common_logger_config(root, config, incremental)
|
||||||
|
|
||||||
|
dictConfigClass = DictConfigurator
|
||||||
|
|
||||||
|
|
||||||
|
def dictConfig(config):
|
||||||
|
"""Configure logging using a dictionary."""
|
||||||
|
dictConfigClass(config).configure()
|
130
pip/download.py
130
pip/download.py
|
@ -1,8 +1,11 @@
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
import cgi
|
import cgi
|
||||||
import email.utils
|
import email.utils
|
||||||
import hashlib
|
import hashlib
|
||||||
import getpass
|
import getpass
|
||||||
import json
|
import json
|
||||||
|
import logging
|
||||||
import mimetypes
|
import mimetypes
|
||||||
import os
|
import os
|
||||||
import platform
|
import platform
|
||||||
|
@ -15,11 +18,11 @@ import pip
|
||||||
|
|
||||||
from pip.compat import urllib, urlparse
|
from pip.compat import urllib, urlparse
|
||||||
from pip.exceptions import InstallationError, HashMismatch
|
from pip.exceptions import InstallationError, HashMismatch
|
||||||
from pip.util import (splitext, rmtree, format_size, display_path,
|
from pip.utils import (splitext, rmtree, format_size, display_path,
|
||||||
backup_dir, ask_path_exists, unpack_file)
|
backup_dir, ask_path_exists, unpack_file)
|
||||||
|
from pip.utils.ui import DownloadProgressBar, DownloadProgressSpinner
|
||||||
from pip.locations import write_delete_marker_file
|
from pip.locations import write_delete_marker_file
|
||||||
from pip.vcs import vcs
|
from pip.vcs import vcs
|
||||||
from pip.log import logger
|
|
||||||
from pip._vendor import requests, six
|
from pip._vendor import requests, six
|
||||||
from pip._vendor.requests.adapters import BaseAdapter, HTTPAdapter
|
from pip._vendor.requests.adapters import BaseAdapter, HTTPAdapter
|
||||||
from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth
|
from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth
|
||||||
|
@ -40,6 +43,9 @@ __all__ = ['get_file_content',
|
||||||
'unpack_http_url', 'unpack_url']
|
'unpack_http_url', 'unpack_url']
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def user_agent():
|
def user_agent():
|
||||||
"""
|
"""
|
||||||
Return a string representing the user agent.
|
Return a string representing the user agent.
|
||||||
|
@ -408,16 +414,16 @@ def is_file_url(link):
|
||||||
|
|
||||||
def _check_hash(download_hash, link):
|
def _check_hash(download_hash, link):
|
||||||
if download_hash.digest_size != hashlib.new(link.hash_name).digest_size:
|
if download_hash.digest_size != hashlib.new(link.hash_name).digest_size:
|
||||||
logger.fatal(
|
logger.critical(
|
||||||
"Hash digest size of the package %d (%s) doesn't match the "
|
"Hash digest size of the package %d (%s) doesn't match the "
|
||||||
"expected hash name %s!" %
|
"expected hash name %s!",
|
||||||
(download_hash.digest_size, link, link.hash_name)
|
download_hash.digest_size, link, link.hash_name,
|
||||||
)
|
)
|
||||||
raise HashMismatch('Hash name mismatch for package %s' % link)
|
raise HashMismatch('Hash name mismatch for package %s' % link)
|
||||||
if download_hash.hexdigest() != link.hash:
|
if download_hash.hexdigest() != link.hash:
|
||||||
logger.fatal(
|
logger.critical(
|
||||||
"Hash of the package %s (%s) doesn't match the expected hash %s!" %
|
"Hash of the package %s (%s) doesn't match the expected hash %s!",
|
||||||
(link, download_hash.hexdigest(), link.hash)
|
link, download_hash.hexdigest(), link.hash,
|
||||||
)
|
)
|
||||||
raise HashMismatch(
|
raise HashMismatch(
|
||||||
'Bad %s hash for package %s' % (link.hash_name, link)
|
'Bad %s hash for package %s' % (link.hash_name, link)
|
||||||
|
@ -428,8 +434,8 @@ def _get_hash_from_file(target_file, link):
|
||||||
try:
|
try:
|
||||||
download_hash = hashlib.new(link.hash_name)
|
download_hash = hashlib.new(link.hash_name)
|
||||||
except (ValueError, TypeError):
|
except (ValueError, TypeError):
|
||||||
logger.warn(
|
logger.warning(
|
||||||
"Unsupported hash name %s for package %s" % (link.hash_name, link)
|
"Unsupported hash name %s for package %s", link.hash_name, link,
|
||||||
)
|
)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
@ -450,34 +456,29 @@ def _download_url(resp, link, temp_location):
|
||||||
try:
|
try:
|
||||||
download_hash = hashlib.new(link.hash_name)
|
download_hash = hashlib.new(link.hash_name)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
logger.warn(
|
logger.warning(
|
||||||
"Unsupported hash name %s for package %s" %
|
"Unsupported hash name %s for package %s",
|
||||||
(link.hash_name, link)
|
link.hash_name, link,
|
||||||
)
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
total_length = int(resp.headers['content-length'])
|
total_length = int(resp.headers['content-length'])
|
||||||
except (ValueError, KeyError, TypeError):
|
except (ValueError, KeyError, TypeError):
|
||||||
total_length = 0
|
total_length = 0
|
||||||
downloaded = 0
|
|
||||||
show_progress = total_length > 40 * 1000 or not total_length
|
cached_resp = getattr(resp, "from_cache", False)
|
||||||
|
|
||||||
|
if cached_resp:
|
||||||
|
show_progress = False
|
||||||
|
elif total_length > (40 * 1000):
|
||||||
|
show_progress = True
|
||||||
|
elif not total_length:
|
||||||
|
show_progress = True
|
||||||
|
else:
|
||||||
|
show_progress = False
|
||||||
|
|
||||||
show_url = link.show_url
|
show_url = link.show_url
|
||||||
try:
|
try:
|
||||||
if show_progress:
|
|
||||||
# FIXME: the URL can get really long in this message:
|
|
||||||
if total_length:
|
|
||||||
logger.start_progress(
|
|
||||||
'Downloading %s (%s): ' %
|
|
||||||
(show_url, format_size(total_length))
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
logger.start_progress(
|
|
||||||
'Downloading %s (unknown size): ' % show_url
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
logger.notify('Downloading %s' % show_url)
|
|
||||||
logger.info('Downloading from URL %s' % link)
|
|
||||||
|
|
||||||
def resp_read(chunk_size):
|
def resp_read(chunk_size):
|
||||||
try:
|
try:
|
||||||
# Special case for urllib3.
|
# Special case for urllib3.
|
||||||
|
@ -518,26 +519,32 @@ def _download_url(resp, link, temp_location):
|
||||||
break
|
break
|
||||||
yield chunk
|
yield chunk
|
||||||
|
|
||||||
for chunk in resp_read(4096):
|
progress_indicator = lambda x, *a, **k: x
|
||||||
downloaded += len(chunk)
|
|
||||||
if show_progress:
|
if show_progress: # We don't show progress on cached responses
|
||||||
if not total_length:
|
if total_length:
|
||||||
logger.show_progress('%s' % format_size(downloaded))
|
logger.info(
|
||||||
else:
|
"Downloading %s (%s)", show_url, format_size(total_length),
|
||||||
logger.show_progress(
|
)
|
||||||
'%3i%% %s' %
|
progress_indicator = DownloadProgressBar(
|
||||||
(
|
max=total_length,
|
||||||
100 * downloaded / total_length,
|
).iter
|
||||||
format_size(downloaded)
|
else:
|
||||||
)
|
logger.info("Downloading %s", show_url)
|
||||||
)
|
progress_indicator = DownloadProgressSpinner().iter
|
||||||
|
elif cached_resp:
|
||||||
|
logger.info("Using cached %s", show_url)
|
||||||
|
else:
|
||||||
|
logger.info("Downloading %s", show_url)
|
||||||
|
|
||||||
|
logger.debug('Downloading from URL %s', link)
|
||||||
|
|
||||||
|
for chunk in progress_indicator(resp_read(4096), 4096):
|
||||||
if download_hash is not None:
|
if download_hash is not None:
|
||||||
download_hash.update(chunk)
|
download_hash.update(chunk)
|
||||||
fp.write(chunk)
|
fp.write(chunk)
|
||||||
fp.close()
|
fp.close()
|
||||||
finally:
|
finally:
|
||||||
if show_progress:
|
|
||||||
logger.end_progress('%s downloaded' % format_size(downloaded))
|
|
||||||
if link.hash and link.hash_name:
|
if link.hash and link.hash_name:
|
||||||
_check_hash(download_hash, link)
|
_check_hash(download_hash, link)
|
||||||
return download_hash
|
return download_hash
|
||||||
|
@ -553,18 +560,19 @@ def _copy_file(filename, location, content_type, link):
|
||||||
if response == 'i':
|
if response == 'i':
|
||||||
copy = False
|
copy = False
|
||||||
elif response == 'w':
|
elif response == 'w':
|
||||||
logger.warn('Deleting %s' % display_path(download_location))
|
logger.warning('Deleting %s', display_path(download_location))
|
||||||
os.remove(download_location)
|
os.remove(download_location)
|
||||||
elif response == 'b':
|
elif response == 'b':
|
||||||
dest_file = backup_dir(download_location)
|
dest_file = backup_dir(download_location)
|
||||||
logger.warn(
|
logger.warning(
|
||||||
'Backing up %s to %s' %
|
'Backing up %s to %s',
|
||||||
(display_path(download_location), display_path(dest_file))
|
display_path(download_location),
|
||||||
|
display_path(dest_file),
|
||||||
)
|
)
|
||||||
shutil.move(download_location, dest_file)
|
shutil.move(download_location, dest_file)
|
||||||
if copy:
|
if copy:
|
||||||
shutil.copy(filename, download_location)
|
shutil.copy(filename, download_location)
|
||||||
logger.notify('Saved %s' % display_path(download_location))
|
logger.info('Saved %s', display_path(download_location))
|
||||||
|
|
||||||
|
|
||||||
def unpack_http_url(link, location, download_dir=None, session=None):
|
def unpack_http_url(link, location, download_dir=None, session=None):
|
||||||
|
@ -613,7 +621,7 @@ def unpack_file_url(link, location, download_dir=None):
|
||||||
rmtree(location)
|
rmtree(location)
|
||||||
shutil.copytree(link_path, location, symlinks=True)
|
shutil.copytree(link_path, location, symlinks=True)
|
||||||
if download_dir:
|
if download_dir:
|
||||||
logger.notify('Link is a directory, ignoring download_dir')
|
logger.info('Link is a directory, ignoring download_dir')
|
||||||
return
|
return
|
||||||
|
|
||||||
# if link has a hash, let's confirm it matches
|
# if link has a hash, let's confirm it matches
|
||||||
|
@ -663,8 +671,10 @@ class PipXmlrpcTransport(xmlrpc_client.Transport):
|
||||||
self.verbose = verbose
|
self.verbose = verbose
|
||||||
return self.parse_response(response.raw)
|
return self.parse_response(response.raw)
|
||||||
except requests.HTTPError as exc:
|
except requests.HTTPError as exc:
|
||||||
logger.fatal("HTTP error {0} while getting {1}".format(
|
logger.critical(
|
||||||
exc.response.status_code, url))
|
"HTTP error %s while getting %s",
|
||||||
|
exc.response.status_code, url,
|
||||||
|
)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
@ -734,8 +744,9 @@ def _download_http_url(link, session, temp_dir):
|
||||||
)
|
)
|
||||||
resp.raise_for_status()
|
resp.raise_for_status()
|
||||||
except requests.HTTPError as exc:
|
except requests.HTTPError as exc:
|
||||||
logger.fatal("HTTP error %s while getting %s" %
|
logger.critical(
|
||||||
(exc.response.status_code, link))
|
"HTTP error %s while getting %s", exc.response.status_code, link,
|
||||||
|
)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
content_type = resp.headers.get('content-type', '')
|
content_type = resp.headers.get('content-type', '')
|
||||||
|
@ -768,15 +779,16 @@ def _check_download_dir(link, download_dir):
|
||||||
download_path = os.path.join(download_dir, link.filename)
|
download_path = os.path.join(download_dir, link.filename)
|
||||||
if os.path.exists(download_path):
|
if os.path.exists(download_path):
|
||||||
# If already downloaded, does its hash match?
|
# If already downloaded, does its hash match?
|
||||||
logger.notify('File was already downloaded %s' % download_path)
|
logger.info('File was already downloaded %s', download_path)
|
||||||
if link.hash:
|
if link.hash:
|
||||||
download_hash = _get_hash_from_file(download_path, link)
|
download_hash = _get_hash_from_file(download_path, link)
|
||||||
try:
|
try:
|
||||||
_check_hash(download_hash, link)
|
_check_hash(download_hash, link)
|
||||||
except HashMismatch:
|
except HashMismatch:
|
||||||
logger.warn(
|
logger.warning(
|
||||||
'Previously-downloaded file %s has bad hash, '
|
'Previously-downloaded file %s has bad hash, '
|
||||||
're-downloading.' % download_path
|
're-downloading.',
|
||||||
|
download_path
|
||||||
)
|
)
|
||||||
os.unlink(download_path)
|
os.unlink(download_path)
|
||||||
return None
|
return None
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
"""Exceptions used throughout package"""
|
"""Exceptions used throughout package"""
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
|
||||||
class PipError(Exception):
|
class PipError(Exception):
|
||||||
|
|
242
pip/index.py
242
pip/index.py
|
@ -1,13 +1,17 @@
|
||||||
"""Routines related to PyPI, indexes"""
|
"""Routines related to PyPI, indexes"""
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import logging
|
||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import mimetypes
|
import mimetypes
|
||||||
import posixpath
|
import posixpath
|
||||||
|
import warnings
|
||||||
|
|
||||||
from pip.log import logger
|
from pip.utils import Inf, normalize_name, splitext, is_prerelease
|
||||||
from pip.util import Inf, normalize_name, splitext, is_prerelease
|
from pip.utils.deprecation import RemovedInPip17Warning
|
||||||
|
from pip.utils.logging import indent_log
|
||||||
from pip.exceptions import (
|
from pip.exceptions import (
|
||||||
DistributionNotFound, BestVersionAlreadyInstalled, InvalidWheelFilename,
|
DistributionNotFound, BestVersionAlreadyInstalled, InvalidWheelFilename,
|
||||||
UnsupportedWheel,
|
UnsupportedWheel,
|
||||||
|
@ -29,6 +33,9 @@ INSECURE_SCHEMES = {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class PackageFinder(object):
|
class PackageFinder(object):
|
||||||
"""This finds packages.
|
"""This finds packages.
|
||||||
|
|
||||||
|
@ -82,7 +89,6 @@ class PackageFinder(object):
|
||||||
|
|
||||||
# Do we process dependency links?
|
# Do we process dependency links?
|
||||||
self.process_dependency_links = process_dependency_links
|
self.process_dependency_links = process_dependency_links
|
||||||
self._have_warned_dependency_links = False
|
|
||||||
|
|
||||||
# The Session we'll use to make requests
|
# The Session we'll use to make requests
|
||||||
self.session = session
|
self.session = session
|
||||||
|
@ -93,13 +99,11 @@ class PackageFinder(object):
|
||||||
# # dependency_links value
|
# # dependency_links value
|
||||||
# # FIXME: also, we should track comes_from (i.e., use Link)
|
# # FIXME: also, we should track comes_from (i.e., use Link)
|
||||||
if self.process_dependency_links:
|
if self.process_dependency_links:
|
||||||
if not self._have_warned_dependency_links:
|
warnings.warn(
|
||||||
logger.deprecated(
|
"Dependency Links processing has been deprecated and will be "
|
||||||
"1.6",
|
"removed in a future release.",
|
||||||
"Dependency Links processing has been deprecated with an "
|
RemovedInPip17Warning,
|
||||||
"accelerated time schedule and will be removed in pip 1.6",
|
)
|
||||||
)
|
|
||||||
self._have_warned_dependency_links = True
|
|
||||||
self.dependency_links.extend(links)
|
self.dependency_links.extend(links)
|
||||||
|
|
||||||
def _sort_locations(self, locations):
|
def _sort_locations(self, locations):
|
||||||
|
@ -268,9 +272,9 @@ class PackageFinder(object):
|
||||||
# We explicitly do not trust links that came from dependency_links
|
# We explicitly do not trust links that came from dependency_links
|
||||||
locations.extend([Link(url) for url in _ulocations])
|
locations.extend([Link(url) for url in _ulocations])
|
||||||
|
|
||||||
logger.debug('URLs to search for versions for %s:' % req)
|
logger.debug('URLs to search for versions for %s:', req)
|
||||||
for location in locations:
|
for location in locations:
|
||||||
logger.debug('* %s' % location)
|
logger.debug('* %s', location)
|
||||||
self._warn_about_insecure_transport_scheme(logger, location)
|
self._warn_about_insecure_transport_scheme(logger, location)
|
||||||
|
|
||||||
found_versions = []
|
found_versions = []
|
||||||
|
@ -283,19 +287,16 @@ class PackageFinder(object):
|
||||||
)
|
)
|
||||||
page_versions = []
|
page_versions = []
|
||||||
for page in self._get_pages(locations, req):
|
for page in self._get_pages(locations, req):
|
||||||
logger.debug('Analyzing links from page %s' % page.url)
|
logger.debug('Analyzing links from page %s', page.url)
|
||||||
logger.indent += 2
|
with indent_log():
|
||||||
try:
|
|
||||||
page_versions.extend(
|
page_versions.extend(
|
||||||
self._package_versions(page.links, req.name.lower())
|
self._package_versions(page.links, req.name.lower())
|
||||||
)
|
)
|
||||||
finally:
|
|
||||||
logger.indent -= 2
|
|
||||||
dependency_versions = list(self._package_versions(
|
dependency_versions = list(self._package_versions(
|
||||||
[Link(url) for url in self.dependency_links], req.name.lower()))
|
[Link(url) for url in self.dependency_links], req.name.lower()))
|
||||||
if dependency_versions:
|
if dependency_versions:
|
||||||
logger.info(
|
logger.debug(
|
||||||
'dependency_links found: %s' %
|
'dependency_links found: %s',
|
||||||
', '.join([
|
', '.join([
|
||||||
link.url for p, link, version in dependency_versions
|
link.url for p, link, version in dependency_versions
|
||||||
])
|
])
|
||||||
|
@ -310,22 +311,25 @@ class PackageFinder(object):
|
||||||
and not page_versions
|
and not page_versions
|
||||||
and not dependency_versions
|
and not dependency_versions
|
||||||
and not file_versions):
|
and not file_versions):
|
||||||
logger.fatal(
|
logger.critical(
|
||||||
'Could not find any downloads that satisfy the requirement'
|
'Could not find any downloads that satisfy the requirement %s',
|
||||||
' %s' % req
|
req,
|
||||||
)
|
)
|
||||||
|
|
||||||
if self.need_warn_external:
|
if self.need_warn_external:
|
||||||
logger.warn(
|
logger.warning(
|
||||||
"Some externally hosted files were ignored as access to "
|
"Some externally hosted files were ignored as access to "
|
||||||
"them may be unreliable (use --allow-external %s to "
|
"them may be unreliable (use --allow-external %s to "
|
||||||
"allow)." % req.name
|
"allow).",
|
||||||
|
req.name,
|
||||||
)
|
)
|
||||||
|
|
||||||
if self.need_warn_unverified:
|
if self.need_warn_unverified:
|
||||||
logger.warn("Some insecure and unverifiable files were ignored"
|
logger.warning(
|
||||||
" (use --allow-unverified %s to allow)." %
|
"Some insecure and unverifiable files were ignored"
|
||||||
req.name)
|
" (use --allow-unverified %s to allow).",
|
||||||
|
req.name,
|
||||||
|
)
|
||||||
|
|
||||||
raise DistributionNotFound(
|
raise DistributionNotFound(
|
||||||
'No distributions at all found for %s' % req
|
'No distributions at all found for %s' % req
|
||||||
|
@ -339,8 +343,8 @@ class PackageFinder(object):
|
||||||
)]
|
)]
|
||||||
if file_versions:
|
if file_versions:
|
||||||
file_versions.sort(reverse=True)
|
file_versions.sort(reverse=True)
|
||||||
logger.info(
|
logger.debug(
|
||||||
'Local files found: %s' %
|
'Local files found: %s',
|
||||||
', '.join([
|
', '.join([
|
||||||
url_to_path(link.url)
|
url_to_path(link.url)
|
||||||
for _, link, _ in file_versions
|
for _, link, _ in file_versions
|
||||||
|
@ -352,13 +356,11 @@ class PackageFinder(object):
|
||||||
applicable_versions = []
|
applicable_versions = []
|
||||||
for (parsed_version, link, version) in all_versions:
|
for (parsed_version, link, version) in all_versions:
|
||||||
if version not in req.req:
|
if version not in req.req:
|
||||||
logger.info(
|
logger.debug(
|
||||||
"Ignoring link %s, version %s doesn't match %s" %
|
"Ignoring link %s, version %s doesn't match %s",
|
||||||
(
|
link,
|
||||||
link,
|
version,
|
||||||
version,
|
','.join([''.join(s) for s in req.req.specs]),
|
||||||
','.join([''.join(s) for s in req.req.specs])
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
continue
|
continue
|
||||||
elif (is_prerelease(version)
|
elif (is_prerelease(version)
|
||||||
|
@ -366,9 +368,11 @@ class PackageFinder(object):
|
||||||
# If this version isn't the already installed one, then
|
# If this version isn't the already installed one, then
|
||||||
# ignore it if it's a pre-release.
|
# ignore it if it's a pre-release.
|
||||||
if link is not INSTALLED_VERSION:
|
if link is not INSTALLED_VERSION:
|
||||||
logger.info(
|
logger.debug(
|
||||||
"Ignoring link %s, version %s is a pre-release (use "
|
"Ignoring link %s, version %s is a pre-release (use "
|
||||||
"--pre to allow)." % (link, version)
|
"--pre to allow).",
|
||||||
|
link,
|
||||||
|
version,
|
||||||
)
|
)
|
||||||
continue
|
continue
|
||||||
applicable_versions.append((parsed_version, link, version))
|
applicable_versions.append((parsed_version, link, version))
|
||||||
|
@ -380,80 +384,81 @@ class PackageFinder(object):
|
||||||
])
|
])
|
||||||
if not upgrade and existing_applicable:
|
if not upgrade and existing_applicable:
|
||||||
if applicable_versions[0][1] is INSTALLED_VERSION:
|
if applicable_versions[0][1] is INSTALLED_VERSION:
|
||||||
logger.info(
|
logger.debug(
|
||||||
'Existing installed version (%s) is most up-to-date and '
|
'Existing installed version (%s) is most up-to-date and '
|
||||||
'satisfies requirement' % req.satisfied_by.version
|
'satisfies requirement',
|
||||||
|
req.satisfied_by.version,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
logger.info(
|
logger.debug(
|
||||||
'Existing installed version (%s) satisfies requirement '
|
'Existing installed version (%s) satisfies requirement '
|
||||||
'(most up-to-date version is %s)' %
|
'(most up-to-date version is %s)',
|
||||||
(req.satisfied_by.version, applicable_versions[0][2])
|
req.satisfied_by.version,
|
||||||
|
applicable_versions[0][2],
|
||||||
)
|
)
|
||||||
return None
|
return None
|
||||||
if not applicable_versions:
|
if not applicable_versions:
|
||||||
logger.fatal(
|
logger.critical(
|
||||||
'Could not find a version that satisfies the requirement %s '
|
'Could not find a version that satisfies the requirement %s '
|
||||||
'(from versions: %s)' %
|
'(from versions: %s)',
|
||||||
(
|
req,
|
||||||
req,
|
', '.join(
|
||||||
', '.join(
|
sorted(set([
|
||||||
sorted(set([
|
version
|
||||||
version
|
for parsed_version, link, version in all_versions
|
||||||
for parsed_version, link, version in all_versions
|
]))),
|
||||||
])))
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if self.need_warn_external:
|
if self.need_warn_external:
|
||||||
logger.warn(
|
logger.warning(
|
||||||
"Some externally hosted files were ignored as access to "
|
"Some externally hosted files were ignored as access to "
|
||||||
"them may be unreliable (use --allow-external to allow)."
|
"them may be unreliable (use --allow-external to allow)."
|
||||||
)
|
)
|
||||||
|
|
||||||
if self.need_warn_unverified:
|
if self.need_warn_unverified:
|
||||||
logger.warn("Some insecure and unverifiable files were ignored"
|
logger.warning(
|
||||||
" (use --allow-unverified %s to allow)." %
|
"Some insecure and unverifiable files were ignored"
|
||||||
req.name)
|
" (use --allow-unverified %s to allow).",
|
||||||
|
req.name,
|
||||||
|
)
|
||||||
|
|
||||||
raise DistributionNotFound(
|
raise DistributionNotFound(
|
||||||
'No distributions matching the version for %s' % req
|
'No distributions matching the version for %s' % req
|
||||||
)
|
)
|
||||||
if applicable_versions[0][1] is INSTALLED_VERSION:
|
if applicable_versions[0][1] is INSTALLED_VERSION:
|
||||||
# We have an existing version, and its the best version
|
# We have an existing version, and its the best version
|
||||||
logger.info(
|
logger.debug(
|
||||||
'Installed version (%s) is most up-to-date (past versions: '
|
'Installed version (%s) is most up-to-date (past versions: '
|
||||||
'%s)' % (
|
'%s)',
|
||||||
req.satisfied_by.version,
|
req.satisfied_by.version,
|
||||||
', '.join([
|
', '.join([
|
||||||
version for parsed_version, link, version
|
version for parsed_version, link, version
|
||||||
in applicable_versions[1:]
|
in applicable_versions[1:]
|
||||||
]) or 'none'))
|
]) or 'none'),
|
||||||
raise BestVersionAlreadyInstalled
|
raise BestVersionAlreadyInstalled
|
||||||
if len(applicable_versions) > 1:
|
if len(applicable_versions) > 1:
|
||||||
logger.info(
|
logger.debug(
|
||||||
'Using version %s (newest of versions: %s)' %
|
'Using version %s (newest of versions: %s)',
|
||||||
(
|
applicable_versions[0][2],
|
||||||
applicable_versions[0][2],
|
', '.join([
|
||||||
', '.join([
|
version for parsed_version, link, version
|
||||||
version for parsed_version, link, version
|
in applicable_versions
|
||||||
in applicable_versions
|
])
|
||||||
])
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
selected_version = applicable_versions[0][1]
|
selected_version = applicable_versions[0][1]
|
||||||
|
|
||||||
if (selected_version.verifiable is not None
|
if (selected_version.verifiable is not None
|
||||||
and not selected_version.verifiable):
|
and not selected_version.verifiable):
|
||||||
logger.warn("%s is potentially insecure and "
|
logger.warning(
|
||||||
"unverifiable." % req.name)
|
"%s is potentially insecure and unverifiable.", req.name,
|
||||||
|
)
|
||||||
|
|
||||||
if selected_version._deprecated_regex:
|
if selected_version._deprecated_regex:
|
||||||
logger.deprecated(
|
warnings.warn(
|
||||||
"1.7",
|
"%s discovered using a deprecated method of parsing, in the "
|
||||||
"%s discovered using a deprecated method of parsing, "
|
"future it will no longer be discovered." % req.name,
|
||||||
"in the future it will no longer be discovered" % req.name
|
RemovedInPip17Warning,
|
||||||
)
|
)
|
||||||
|
|
||||||
return selected_version
|
return selected_version
|
||||||
|
@ -470,14 +475,14 @@ class PackageFinder(object):
|
||||||
index_url.url += '/'
|
index_url.url += '/'
|
||||||
page = self._get_page(index_url, req)
|
page = self._get_page(index_url, req)
|
||||||
if page is None:
|
if page is None:
|
||||||
logger.fatal('Cannot fetch index base URL %s' % index_url)
|
logger.critical('Cannot fetch index base URL %s', index_url)
|
||||||
return
|
return
|
||||||
norm_name = normalize_name(req.url_name)
|
norm_name = normalize_name(req.url_name)
|
||||||
for link in page.links:
|
for link in page.links:
|
||||||
base = posixpath.basename(link.path.rstrip('/'))
|
base = posixpath.basename(link.path.rstrip('/'))
|
||||||
if norm_name == normalize_name(base):
|
if norm_name == normalize_name(base):
|
||||||
logger.notify(
|
logger.debug(
|
||||||
'Real name of requirement %s is %s' % (url_name, base)
|
'Real name of requirement %s is %s', url_name, base,
|
||||||
)
|
)
|
||||||
return base
|
return base
|
||||||
return None
|
return None
|
||||||
|
@ -508,8 +513,11 @@ class PackageFinder(object):
|
||||||
if (normalized not in self.allow_external
|
if (normalized not in self.allow_external
|
||||||
and not self.allow_all_external):
|
and not self.allow_all_external):
|
||||||
self.need_warn_external = True
|
self.need_warn_external = True
|
||||||
logger.debug("Not searching %s for files because external "
|
logger.debug(
|
||||||
"urls are disallowed." % link)
|
"Not searching %s for files because external "
|
||||||
|
"urls are disallowed.",
|
||||||
|
link,
|
||||||
|
)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if (link.trusted is not None
|
if (link.trusted is not None
|
||||||
|
@ -518,7 +526,8 @@ class PackageFinder(object):
|
||||||
logger.debug(
|
logger.debug(
|
||||||
"Not searching %s for urls, it is an "
|
"Not searching %s for urls, it is an "
|
||||||
"untrusted link and cannot produce safe or "
|
"untrusted link and cannot produce safe or "
|
||||||
"verifiable files." % link
|
"verifiable files.",
|
||||||
|
link,
|
||||||
)
|
)
|
||||||
self.need_warn_unverified = True
|
self.need_warn_unverified = True
|
||||||
continue
|
continue
|
||||||
|
@ -573,7 +582,7 @@ class PackageFinder(object):
|
||||||
egg_info, ext = link.splitext()
|
egg_info, ext = link.splitext()
|
||||||
if not ext:
|
if not ext:
|
||||||
if link not in self.logged_links:
|
if link not in self.logged_links:
|
||||||
logger.debug('Skipping link %s; not a file' % link)
|
logger.debug('Skipping link %s; not a file', link)
|
||||||
self.logged_links.add(link)
|
self.logged_links.add(link)
|
||||||
return []
|
return []
|
||||||
if egg_info.endswith('.tar'):
|
if egg_info.endswith('.tar'):
|
||||||
|
@ -583,14 +592,15 @@ class PackageFinder(object):
|
||||||
if ext not in self._known_extensions():
|
if ext not in self._known_extensions():
|
||||||
if link not in self.logged_links:
|
if link not in self.logged_links:
|
||||||
logger.debug(
|
logger.debug(
|
||||||
'Skipping link %s; unknown archive format: %s' %
|
'Skipping link %s; unknown archive format: %s',
|
||||||
(link, ext)
|
link,
|
||||||
|
ext,
|
||||||
)
|
)
|
||||||
self.logged_links.add(link)
|
self.logged_links.add(link)
|
||||||
return []
|
return []
|
||||||
if "macosx10" in link.path and ext == '.zip':
|
if "macosx10" in link.path and ext == '.zip':
|
||||||
if link not in self.logged_links:
|
if link not in self.logged_links:
|
||||||
logger.debug('Skipping link %s; macosx10 one' % (link))
|
logger.debug('Skipping link %s; macosx10 one', link)
|
||||||
self.logged_links.add(link)
|
self.logged_links.add(link)
|
||||||
return []
|
return []
|
||||||
if ext == wheel_ext:
|
if ext == wheel_ext:
|
||||||
|
@ -598,20 +608,22 @@ class PackageFinder(object):
|
||||||
wheel = Wheel(link.filename)
|
wheel = Wheel(link.filename)
|
||||||
except InvalidWheelFilename:
|
except InvalidWheelFilename:
|
||||||
logger.debug(
|
logger.debug(
|
||||||
'Skipping %s because the wheel filename is invalid' %
|
'Skipping %s because the wheel filename is invalid',
|
||||||
link
|
link
|
||||||
)
|
)
|
||||||
return []
|
return []
|
||||||
if wheel.name.lower() != search_name.lower():
|
if wheel.name.lower() != search_name.lower():
|
||||||
logger.debug(
|
logger.debug(
|
||||||
'Skipping link %s; wrong project name (not %s)' %
|
'Skipping link %s; wrong project name (not %s)',
|
||||||
(link, search_name)
|
link,
|
||||||
|
search_name,
|
||||||
)
|
)
|
||||||
return []
|
return []
|
||||||
if not wheel.supported():
|
if not wheel.supported():
|
||||||
logger.debug(
|
logger.debug(
|
||||||
'Skipping %s because it is not compatible with this '
|
'Skipping %s because it is not compatible with this '
|
||||||
'Python' % link
|
'Python',
|
||||||
|
link,
|
||||||
)
|
)
|
||||||
return []
|
return []
|
||||||
# This is a dirty hack to prevent installing Binary Wheels from
|
# This is a dirty hack to prevent installing Binary Wheels from
|
||||||
|
@ -634,7 +646,8 @@ class PackageFinder(object):
|
||||||
if not wheel.supported(tags=supported_tags_noarch):
|
if not wheel.supported(tags=supported_tags_noarch):
|
||||||
logger.debug(
|
logger.debug(
|
||||||
"Skipping %s because it is a pypi-hosted binary "
|
"Skipping %s because it is a pypi-hosted binary "
|
||||||
"Wheel on an unsupported platform" % link
|
"Wheel on an unsupported platform",
|
||||||
|
link,
|
||||||
)
|
)
|
||||||
return []
|
return []
|
||||||
version = wheel.version
|
version = wheel.version
|
||||||
|
@ -643,8 +656,9 @@ class PackageFinder(object):
|
||||||
version = self._egg_info_matches(egg_info, search_name, link)
|
version = self._egg_info_matches(egg_info, search_name, link)
|
||||||
if version is None:
|
if version is None:
|
||||||
logger.debug(
|
logger.debug(
|
||||||
'Skipping link %s; wrong project name (not %s)' %
|
'Skipping link %s; wrong project name (not %s)',
|
||||||
(link, search_name)
|
link,
|
||||||
|
search_name,
|
||||||
)
|
)
|
||||||
return []
|
return []
|
||||||
|
|
||||||
|
@ -655,7 +669,7 @@ class PackageFinder(object):
|
||||||
and not self.allow_all_external):
|
and not self.allow_all_external):
|
||||||
# We have a link that we are sure is external, so we should skip
|
# We have a link that we are sure is external, so we should skip
|
||||||
# it unless we are allowing externals
|
# it unless we are allowing externals
|
||||||
logger.debug("Skipping %s because it is externally hosted." % link)
|
logger.debug("Skipping %s because it is externally hosted.", link)
|
||||||
self.need_warn_external = True
|
self.need_warn_external = True
|
||||||
return []
|
return []
|
||||||
|
|
||||||
|
@ -666,8 +680,10 @@ class PackageFinder(object):
|
||||||
# We have a link that we are sure we cannot verify its integrity,
|
# We have a link that we are sure we cannot verify its integrity,
|
||||||
# so we should skip it unless we are allowing unsafe installs
|
# so we should skip it unless we are allowing unsafe installs
|
||||||
# for this requirement.
|
# for this requirement.
|
||||||
logger.debug("Skipping %s because it is an insecure and "
|
logger.debug(
|
||||||
"unverifiable file." % link)
|
"Skipping %s because it is an insecure and unverifiable file.",
|
||||||
|
link,
|
||||||
|
)
|
||||||
self.need_warn_unverified = True
|
self.need_warn_unverified = True
|
||||||
return []
|
return []
|
||||||
|
|
||||||
|
@ -677,10 +693,10 @@ class PackageFinder(object):
|
||||||
py_version = match.group(1)
|
py_version = match.group(1)
|
||||||
if py_version != sys.version[:3]:
|
if py_version != sys.version[:3]:
|
||||||
logger.debug(
|
logger.debug(
|
||||||
'Skipping %s because Python version is incorrect' % link
|
'Skipping %s because Python version is incorrect', link
|
||||||
)
|
)
|
||||||
return []
|
return []
|
||||||
logger.debug('Found link %s, version: %s' % (link, version))
|
logger.debug('Found link %s, version: %s', link, version)
|
||||||
return [(
|
return [(
|
||||||
pkg_resources.parse_version(version),
|
pkg_resources.parse_version(version),
|
||||||
link,
|
link,
|
||||||
|
@ -690,7 +706,7 @@ class PackageFinder(object):
|
||||||
def _egg_info_matches(self, egg_info, search_name, link):
|
def _egg_info_matches(self, egg_info, search_name, link):
|
||||||
match = self._egg_info_re.search(egg_info)
|
match = self._egg_info_re.search(egg_info)
|
||||||
if not match:
|
if not match:
|
||||||
logger.debug('Could not parse version from link: %s' % link)
|
logger.debug('Could not parse version from link: %s', link)
|
||||||
return None
|
return None
|
||||||
name = match.group(0).lower()
|
name = match.group(0).lower()
|
||||||
# To match the "safe" name that pkg_resources creates:
|
# To match the "safe" name that pkg_resources creates:
|
||||||
|
@ -741,9 +757,7 @@ class HTMLPage(object):
|
||||||
from pip.vcs import VcsSupport
|
from pip.vcs import VcsSupport
|
||||||
for scheme in VcsSupport.schemes:
|
for scheme in VcsSupport.schemes:
|
||||||
if url.lower().startswith(scheme) and url[len(scheme)] in '+:':
|
if url.lower().startswith(scheme) and url[len(scheme)] in '+:':
|
||||||
logger.debug(
|
logger.debug('Cannot look at %s URL %s', scheme, link)
|
||||||
'Cannot look at %(scheme)s URL %(link)s' % locals()
|
|
||||||
)
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -758,12 +772,13 @@ class HTMLPage(object):
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
logger.debug(
|
logger.debug(
|
||||||
'Skipping page %s because of Content-Type: '
|
'Skipping page %s because of Content-Type: %s',
|
||||||
'%s' % (link, content_type)
|
link,
|
||||||
|
content_type,
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
logger.debug('Getting page %s' % url)
|
logger.debug('Getting page %s', url)
|
||||||
|
|
||||||
# Tack index.html onto file:// URLs that point to directories
|
# Tack index.html onto file:// URLs that point to directories
|
||||||
(scheme, netloc, path, params, query, fragment) = \
|
(scheme, netloc, path, params, query, fragment) = \
|
||||||
|
@ -774,7 +789,7 @@ class HTMLPage(object):
|
||||||
if not url.endswith('/'):
|
if not url.endswith('/'):
|
||||||
url += '/'
|
url += '/'
|
||||||
url = urlparse.urljoin(url, 'index.html')
|
url = urlparse.urljoin(url, 'index.html')
|
||||||
logger.debug(' file: URL is directory, getting %s' % url)
|
logger.debug(' file: URL is directory, getting %s', url)
|
||||||
|
|
||||||
resp = session.get(
|
resp = session.get(
|
||||||
url,
|
url,
|
||||||
|
@ -793,8 +808,9 @@ class HTMLPage(object):
|
||||||
content_type = resp.headers.get('Content-Type', 'unknown')
|
content_type = resp.headers.get('Content-Type', 'unknown')
|
||||||
if not content_type.lower().startswith("text/html"):
|
if not content_type.lower().startswith("text/html"):
|
||||||
logger.debug(
|
logger.debug(
|
||||||
'Skipping page %s because of Content-Type: %s' %
|
'Skipping page %s because of Content-Type: %s',
|
||||||
(link, content_type)
|
link,
|
||||||
|
content_type,
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
@ -814,7 +830,7 @@ class HTMLPage(object):
|
||||||
cls._handle_fail(
|
cls._handle_fail(
|
||||||
req, link, reason, url,
|
req, link, reason, url,
|
||||||
level=2,
|
level=2,
|
||||||
meth=logger.notify,
|
meth=logger.info,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
return inst
|
return inst
|
||||||
|
@ -822,7 +838,7 @@ class HTMLPage(object):
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _handle_fail(req, link, reason, url, level=1, meth=None):
|
def _handle_fail(req, link, reason, url, level=1, meth=None):
|
||||||
if meth is None:
|
if meth is None:
|
||||||
meth = logger.info
|
meth = logger.debug
|
||||||
|
|
||||||
meth("Could not fetch URL %s: %s", link, reason)
|
meth("Could not fetch URL %s: %s", link, reason)
|
||||||
meth("Will skip URL %s when looking for download links for %s" %
|
meth("Will skip URL %s when looking for download links for %s" %
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
"""Locations where we look for configs, install stuff, etc"""
|
"""Locations where we look for configs, install stuff, etc"""
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
import getpass
|
import getpass
|
||||||
import os
|
import os
|
||||||
|
@ -184,7 +185,6 @@ if WINDOWS:
|
||||||
default_storage_dir,
|
default_storage_dir,
|
||||||
default_config_basename,
|
default_config_basename,
|
||||||
)
|
)
|
||||||
default_log_file = os.path.join(default_storage_dir, 'pip.log')
|
|
||||||
else:
|
else:
|
||||||
bin_py = os.path.join(sys.prefix, 'bin')
|
bin_py = os.path.join(sys.prefix, 'bin')
|
||||||
bin_user = os.path.join(user_site, 'bin')
|
bin_user = os.path.join(user_site, 'bin')
|
||||||
|
@ -194,13 +194,11 @@ else:
|
||||||
default_storage_dir,
|
default_storage_dir,
|
||||||
default_config_basename,
|
default_config_basename,
|
||||||
)
|
)
|
||||||
default_log_file = os.path.join(default_storage_dir, 'pip.log')
|
|
||||||
|
|
||||||
# Forcing to use /usr/local/bin for standard Mac OS X framework installs
|
# Forcing to use /usr/local/bin for standard Mac OS X framework installs
|
||||||
# Also log to ~/Library/Logs/ for use with the Console.app log viewer
|
# Also log to ~/Library/Logs/ for use with the Console.app log viewer
|
||||||
if sys.platform[:6] == 'darwin' and sys.prefix[:16] == '/System/Library/':
|
if sys.platform[:6] == 'darwin' and sys.prefix[:16] == '/System/Library/':
|
||||||
bin_py = '/usr/local/bin'
|
bin_py = '/usr/local/bin'
|
||||||
default_log_file = os.path.join(user_dir, 'Library/Logs/pip.log')
|
|
||||||
|
|
||||||
site_config_files = [
|
site_config_files = [
|
||||||
os.path.join(path, default_config_basename)
|
os.path.join(path, default_config_basename)
|
||||||
|
|
297
pip/log.py
297
pip/log.py
|
@ -1,297 +0,0 @@
|
||||||
"""Logging
|
|
||||||
"""
|
|
||||||
|
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
import logging
|
|
||||||
|
|
||||||
from pip._vendor import colorama, pkg_resources
|
|
||||||
from pip.compat import WINDOWS
|
|
||||||
|
|
||||||
|
|
||||||
def _color_wrap(*colors):
|
|
||||||
def wrapped(inp):
|
|
||||||
return "".join(list(colors) + [inp, colorama.Style.RESET_ALL])
|
|
||||||
return wrapped
|
|
||||||
|
|
||||||
|
|
||||||
def should_color(consumer, environ, std=(sys.stdout, sys.stderr)):
|
|
||||||
real_consumer = (
|
|
||||||
consumer if not isinstance(consumer, colorama.AnsiToWin32)
|
|
||||||
else consumer.wrapped
|
|
||||||
)
|
|
||||||
|
|
||||||
# If consumer isn't stdout or stderr we shouldn't colorize it
|
|
||||||
if real_consumer not in std:
|
|
||||||
return False
|
|
||||||
|
|
||||||
# If consumer is a tty we should color it
|
|
||||||
if hasattr(real_consumer, "isatty") and real_consumer.isatty():
|
|
||||||
return True
|
|
||||||
|
|
||||||
# If we have an ASNI term we should color it
|
|
||||||
if environ.get("TERM") == "ANSI":
|
|
||||||
return True
|
|
||||||
|
|
||||||
# If anything else we should not color it
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def should_warn(current_version, removal_version):
|
|
||||||
# Our Significant digits on versions is 2, so remove everything but the
|
|
||||||
# first two places.
|
|
||||||
current_version = ".".join(current_version.split(".")[:2])
|
|
||||||
removal_version = ".".join(removal_version.split(".")[:2])
|
|
||||||
|
|
||||||
# Our warning threshold is one minor version before removal, so we
|
|
||||||
# decrement the minor version by one
|
|
||||||
major, minor = removal_version.split(".")
|
|
||||||
minor = str(int(minor) - 1)
|
|
||||||
warn_version = ".".join([major, minor])
|
|
||||||
|
|
||||||
# Test if our current_version should be a warn
|
|
||||||
return (pkg_resources.parse_version(current_version)
|
|
||||||
< pkg_resources.parse_version(warn_version))
|
|
||||||
|
|
||||||
|
|
||||||
def sanitize(write_content, encoding=sys.getdefaultencoding()):
|
|
||||||
# Important for certain values of LANG
|
|
||||||
if sys.version_info[0] == 2:
|
|
||||||
return write_content
|
|
||||||
write_content = write_content.encode(encoding, 'replace')
|
|
||||||
return write_content.decode(encoding)
|
|
||||||
|
|
||||||
|
|
||||||
class Logger(object):
|
|
||||||
"""
|
|
||||||
Logging object for use in command-line script. Allows ranges of
|
|
||||||
levels, to avoid some redundancy of displayed information.
|
|
||||||
"""
|
|
||||||
VERBOSE_DEBUG = logging.DEBUG - 1
|
|
||||||
DEBUG = logging.DEBUG
|
|
||||||
INFO = logging.INFO
|
|
||||||
NOTIFY = (logging.INFO + logging.WARN) / 2
|
|
||||||
WARN = WARNING = logging.WARN
|
|
||||||
ERROR = logging.ERROR
|
|
||||||
FATAL = logging.FATAL
|
|
||||||
|
|
||||||
LEVELS = [VERBOSE_DEBUG, DEBUG, INFO, NOTIFY, WARN, ERROR, FATAL]
|
|
||||||
|
|
||||||
COLORS = {
|
|
||||||
WARN: _color_wrap(colorama.Fore.YELLOW),
|
|
||||||
ERROR: _color_wrap(colorama.Fore.RED),
|
|
||||||
FATAL: _color_wrap(colorama.Fore.RED),
|
|
||||||
}
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.consumers = []
|
|
||||||
self.indent = 0
|
|
||||||
self.explicit_levels = False
|
|
||||||
self.in_progress = None
|
|
||||||
self.in_progress_hanging = False
|
|
||||||
|
|
||||||
def add_consumers(self, *consumers):
|
|
||||||
for level, consumer in consumers:
|
|
||||||
# Try to check for duplicate consumers before adding them
|
|
||||||
for chk_level, chk_consumer in self.consumers:
|
|
||||||
# Account for coloroma wrapped streams
|
|
||||||
if isinstance(chk_consumer, colorama.AnsiToWin32):
|
|
||||||
chk_consumer = chk_consumer.wrapped
|
|
||||||
|
|
||||||
if (level, consumer) == (chk_level, chk_consumer):
|
|
||||||
break
|
|
||||||
# If we didn't find a duplicate, then add it
|
|
||||||
else:
|
|
||||||
# Colorize consumer for Windows
|
|
||||||
if WINDOWS and hasattr(consumer, 'write'):
|
|
||||||
consumer = colorama.AnsiToWin32(consumer)
|
|
||||||
|
|
||||||
self.consumers.append((level, consumer))
|
|
||||||
|
|
||||||
def debug(self, msg, *args, **kw):
|
|
||||||
self.log(self.DEBUG, msg, *args, **kw)
|
|
||||||
|
|
||||||
def info(self, msg, *args, **kw):
|
|
||||||
self.log(self.INFO, msg, *args, **kw)
|
|
||||||
|
|
||||||
def notify(self, msg, *args, **kw):
|
|
||||||
self.log(self.NOTIFY, msg, *args, **kw)
|
|
||||||
|
|
||||||
def warn(self, msg, *args, **kw):
|
|
||||||
self.log(self.WARN, msg, *args, **kw)
|
|
||||||
|
|
||||||
def error(self, msg, *args, **kw):
|
|
||||||
self.log(self.ERROR, msg, *args, **kw)
|
|
||||||
|
|
||||||
def fatal(self, msg, *args, **kw):
|
|
||||||
self.log(self.FATAL, msg, *args, **kw)
|
|
||||||
|
|
||||||
def deprecated(self, removal_version, msg, *args, **kwargs):
|
|
||||||
"""
|
|
||||||
Logs deprecation message which is log level WARN if the
|
|
||||||
``removal_version`` is > 1 minor release away and log level ERROR
|
|
||||||
otherwise.
|
|
||||||
|
|
||||||
removal_version should be the version that the deprecated feature is
|
|
||||||
expected to be removed in, so something that will not exist in
|
|
||||||
version 1.7, but will in 1.6 would have a removal_version of 1.7.
|
|
||||||
"""
|
|
||||||
from pip import __version__
|
|
||||||
|
|
||||||
if should_warn(__version__, removal_version):
|
|
||||||
self.warn(msg, *args, **kwargs)
|
|
||||||
else:
|
|
||||||
self.error(msg, *args, **kwargs)
|
|
||||||
|
|
||||||
def log(self, level, msg, *args, **kw):
|
|
||||||
if args:
|
|
||||||
if kw:
|
|
||||||
raise TypeError(
|
|
||||||
"You may give positional or keyword arguments, not both")
|
|
||||||
args = args or kw
|
|
||||||
|
|
||||||
# render
|
|
||||||
if args:
|
|
||||||
rendered = msg % args
|
|
||||||
else:
|
|
||||||
rendered = msg
|
|
||||||
rendered = ' ' * self.indent + rendered
|
|
||||||
if self.explicit_levels:
|
|
||||||
# FIXME: should this be a name, not a level number?
|
|
||||||
rendered = '%02i %s' % (level, rendered)
|
|
||||||
|
|
||||||
for consumer_level, consumer in self.consumers:
|
|
||||||
if self.level_matches(level, consumer_level):
|
|
||||||
if (self.in_progress_hanging
|
|
||||||
and consumer in (sys.stdout, sys.stderr)):
|
|
||||||
self.in_progress_hanging = False
|
|
||||||
sys.stdout.write('\n')
|
|
||||||
sys.stdout.flush()
|
|
||||||
if hasattr(consumer, 'write'):
|
|
||||||
write_content = rendered + '\n'
|
|
||||||
if should_color(consumer, os.environ):
|
|
||||||
# We are printing to stdout or stderr and it supports
|
|
||||||
# colors so render our text colored
|
|
||||||
colorizer = self.COLORS.get(level, lambda x: x)
|
|
||||||
write_content = colorizer(write_content)
|
|
||||||
consumer.write(sanitize(write_content))
|
|
||||||
if hasattr(consumer, 'flush'):
|
|
||||||
consumer.flush()
|
|
||||||
else:
|
|
||||||
consumer(rendered)
|
|
||||||
|
|
||||||
def _show_progress(self):
|
|
||||||
"""Should we display download progress?"""
|
|
||||||
return (self.stdout_level_matches(self.NOTIFY) and sys.stdout.isatty())
|
|
||||||
|
|
||||||
def start_progress(self, msg):
|
|
||||||
assert not self.in_progress, (
|
|
||||||
"Tried to start_progress(%r) while in_progress %r"
|
|
||||||
% (msg, self.in_progress))
|
|
||||||
if self._show_progress():
|
|
||||||
sys.stdout.write(' ' * self.indent + msg)
|
|
||||||
sys.stdout.flush()
|
|
||||||
self.in_progress_hanging = True
|
|
||||||
else:
|
|
||||||
self.in_progress_hanging = False
|
|
||||||
self.in_progress = msg
|
|
||||||
self.last_message = None
|
|
||||||
|
|
||||||
def end_progress(self, msg='done.'):
|
|
||||||
assert self.in_progress, (
|
|
||||||
"Tried to end_progress without start_progress")
|
|
||||||
if self._show_progress():
|
|
||||||
if not self.in_progress_hanging:
|
|
||||||
# Some message has been printed out since start_progress
|
|
||||||
sys.stdout.write('...' + self.in_progress + msg + '\n')
|
|
||||||
sys.stdout.flush()
|
|
||||||
else:
|
|
||||||
# These erase any messages shown with show_progress
|
|
||||||
# (besides .'s)
|
|
||||||
logger.show_progress('')
|
|
||||||
logger.show_progress('')
|
|
||||||
sys.stdout.write(msg + '\n')
|
|
||||||
sys.stdout.flush()
|
|
||||||
self.in_progress = None
|
|
||||||
self.in_progress_hanging = False
|
|
||||||
|
|
||||||
def show_progress(self, message=None):
|
|
||||||
"""If we are in a progress scope, and no log messages have been
|
|
||||||
shown, write out another '.'"""
|
|
||||||
if self.in_progress_hanging:
|
|
||||||
if message is None:
|
|
||||||
sys.stdout.write('.')
|
|
||||||
sys.stdout.flush()
|
|
||||||
else:
|
|
||||||
if self.last_message:
|
|
||||||
padding = ' ' * max(
|
|
||||||
0,
|
|
||||||
len(self.last_message) - len(message)
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
padding = ''
|
|
||||||
sys.stdout.write(
|
|
||||||
'\r%s%s%s%s' %
|
|
||||||
(' ' * self.indent, self.in_progress, message, padding)
|
|
||||||
)
|
|
||||||
sys.stdout.flush()
|
|
||||||
self.last_message = message
|
|
||||||
|
|
||||||
def stdout_level_matches(self, level):
|
|
||||||
"""Returns true if a message at this level will go to stdout"""
|
|
||||||
return self.level_matches(level, self._stdout_level())
|
|
||||||
|
|
||||||
def _stdout_level(self):
|
|
||||||
"""Returns the level that stdout runs at"""
|
|
||||||
for level, consumer in self.consumers:
|
|
||||||
if consumer is sys.stdout:
|
|
||||||
return level
|
|
||||||
return self.FATAL
|
|
||||||
|
|
||||||
def level_matches(self, level, consumer_level):
|
|
||||||
"""
|
|
||||||
>>> l = Logger()
|
|
||||||
>>> l.level_matches(3, 4)
|
|
||||||
False
|
|
||||||
>>> l.level_matches(3, 2)
|
|
||||||
True
|
|
||||||
>>> l.level_matches(slice(None, 3), 3)
|
|
||||||
False
|
|
||||||
>>> l.level_matches(slice(None, 3), 2)
|
|
||||||
True
|
|
||||||
>>> l.level_matches(slice(1, 3), 1)
|
|
||||||
True
|
|
||||||
>>> l.level_matches(slice(2, 3), 1)
|
|
||||||
False
|
|
||||||
"""
|
|
||||||
if isinstance(level, slice):
|
|
||||||
start, stop = level.start, level.stop
|
|
||||||
if start is not None and start > consumer_level:
|
|
||||||
return False
|
|
||||||
if stop is not None or stop <= consumer_level:
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
return level >= consumer_level
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def level_for_integer(cls, level):
|
|
||||||
levels = cls.LEVELS
|
|
||||||
if level < 0:
|
|
||||||
return levels[0]
|
|
||||||
if level >= len(levels):
|
|
||||||
return levels[-1]
|
|
||||||
return levels[level]
|
|
||||||
|
|
||||||
def move_stdout_to_stderr(self):
|
|
||||||
to_remove = []
|
|
||||||
to_add = []
|
|
||||||
for consumer_level, consumer in self.consumers:
|
|
||||||
if consumer == sys.stdout:
|
|
||||||
to_remove.append((consumer_level, consumer))
|
|
||||||
to_add.append((consumer_level, sys.stderr))
|
|
||||||
for item in to_remove:
|
|
||||||
self.consumers.remove(item)
|
|
||||||
self.consumers.extend(to_add)
|
|
||||||
|
|
||||||
logger = Logger()
|
|
|
@ -1,4 +1,5 @@
|
||||||
"""Generate and work with PEP 425 Compatibility Tags."""
|
"""Generate and work with PEP 425 Compatibility Tags."""
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
from .req_install import InstallRequirement
|
from .req_install import InstallRequirement
|
||||||
from .req_set import RequirementSet, Requirements
|
from .req_set import RequirementSet, Requirements
|
||||||
|
|
|
@ -1,10 +1,12 @@
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from pip.compat import urlparse
|
from pip.compat import urlparse
|
||||||
from pip.download import get_file_content
|
from pip.download import get_file_content
|
||||||
from pip.req.req_install import InstallRequirement
|
from pip.req.req_install import InstallRequirement
|
||||||
from pip.util import normalize_name
|
from pip.utils import normalize_name
|
||||||
|
|
||||||
_scheme_re = re.compile(r'^(http|https|file):', re.I)
|
_scheme_re = re.compile(r'^(http|https|file):', re.I)
|
||||||
|
|
||||||
|
|
|
@ -1,14 +1,19 @@
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import logging
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import shutil
|
import shutil
|
||||||
import sys
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
import zipfile
|
import zipfile
|
||||||
|
|
||||||
from distutils.util import change_root
|
from distutils.util import change_root
|
||||||
from distutils import sysconfig
|
from distutils import sysconfig
|
||||||
from email.parser import FeedParser
|
from email.parser import FeedParser
|
||||||
|
|
||||||
import pip.wheel
|
import pip.wheel
|
||||||
|
|
||||||
from pip._vendor import pkg_resources, six
|
from pip._vendor import pkg_resources, six
|
||||||
from pip._vendor.six.moves import configparser
|
from pip._vendor.six.moves import configparser
|
||||||
from pip.compat import urllib, native_str, WINDOWS
|
from pip.compat import urllib, native_str, WINDOWS
|
||||||
|
@ -20,17 +25,20 @@ from pip.index import Link
|
||||||
from pip.locations import (
|
from pip.locations import (
|
||||||
bin_py, running_under_virtualenv, PIP_DELETE_MARKER_FILENAME, bin_user,
|
bin_py, running_under_virtualenv, PIP_DELETE_MARKER_FILENAME, bin_user,
|
||||||
)
|
)
|
||||||
from pip.log import logger
|
from pip.utils import (
|
||||||
from pip.util import (
|
|
||||||
display_path, rmtree, ask_path_exists, backup_dir, is_installable_dir,
|
display_path, rmtree, ask_path_exists, backup_dir, is_installable_dir,
|
||||||
dist_in_usersite, dist_in_site_packages, egg_link_path, make_path_relative,
|
dist_in_usersite, dist_in_site_packages, egg_link_path, make_path_relative,
|
||||||
call_subprocess, is_prerelease, read_text_file, FakeFile, _make_build_dir,
|
call_subprocess, is_prerelease, read_text_file, FakeFile, _make_build_dir,
|
||||||
)
|
)
|
||||||
|
from pip.utils.logging import indent_log
|
||||||
from pip.req.req_uninstall import UninstallPathSet
|
from pip.req.req_uninstall import UninstallPathSet
|
||||||
from pip.vcs import vcs
|
from pip.vcs import vcs
|
||||||
from pip.wheel import move_wheel_files, Wheel, wheel_ext
|
from pip.wheel import move_wheel_files, Wheel, wheel_ext
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class InstallRequirement(object):
|
class InstallRequirement(object):
|
||||||
|
|
||||||
def __init__(self, req, comes_from, source_dir=None, editable=False,
|
def __init__(self, req, comes_from, source_dir=None, editable=False,
|
||||||
|
@ -122,7 +130,7 @@ class InstallRequirement(object):
|
||||||
link = Link(path_to_url(name))
|
link = Link(path_to_url(name))
|
||||||
elif is_archive_file(path):
|
elif is_archive_file(path):
|
||||||
if not os.path.isfile(path):
|
if not os.path.isfile(path):
|
||||||
logger.warn(
|
logger.warning(
|
||||||
'Requirement %r looks like a filename, but the file does '
|
'Requirement %r looks like a filename, but the file does '
|
||||||
'not exist',
|
'not exist',
|
||||||
name
|
name
|
||||||
|
@ -221,15 +229,15 @@ class InstallRequirement(object):
|
||||||
name = self.name
|
name = self.name
|
||||||
new_location = os.path.join(new_build_dir, name)
|
new_location = os.path.join(new_build_dir, name)
|
||||||
if not os.path.exists(new_build_dir):
|
if not os.path.exists(new_build_dir):
|
||||||
logger.debug('Creating directory %s' % new_build_dir)
|
logger.debug('Creating directory %s', new_build_dir)
|
||||||
_make_build_dir(new_build_dir)
|
_make_build_dir(new_build_dir)
|
||||||
if os.path.exists(new_location):
|
if os.path.exists(new_location):
|
||||||
raise InstallationError(
|
raise InstallationError(
|
||||||
'A package already exists in %s; please remove it to continue'
|
'A package already exists in %s; please remove it to continue'
|
||||||
% display_path(new_location))
|
% display_path(new_location))
|
||||||
logger.debug(
|
logger.debug(
|
||||||
'Moving package %s from %s to new location %s' %
|
'Moving package %s from %s to new location %s',
|
||||||
(self, display_path(old_location), display_path(new_location))
|
self, display_path(old_location), display_path(new_location),
|
||||||
)
|
)
|
||||||
shutil.move(old_location, new_location)
|
shutil.move(old_location, new_location)
|
||||||
self._temp_build_dir = new_location
|
self._temp_build_dir = new_location
|
||||||
|
@ -278,18 +286,17 @@ class InstallRequirement(object):
|
||||||
def run_egg_info(self):
|
def run_egg_info(self):
|
||||||
assert self.source_dir
|
assert self.source_dir
|
||||||
if self.name:
|
if self.name:
|
||||||
logger.notify(
|
logger.info(
|
||||||
'Running setup.py (path:%s) egg_info for package %s' %
|
'Running setup.py (path:%s) egg_info for package %s',
|
||||||
(self.setup_py, self.name)
|
self.setup_py, self.name,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
logger.notify(
|
logger.info(
|
||||||
'Running setup.py (path:%s) egg_info for package from %s' %
|
'Running setup.py (path:%s) egg_info for package from %s',
|
||||||
(self.setup_py, self.url)
|
self.setup_py, self.url,
|
||||||
)
|
)
|
||||||
logger.indent += 2
|
|
||||||
try:
|
|
||||||
|
|
||||||
|
with indent_log():
|
||||||
# if it's distribute>=0.7, it won't contain an importable
|
# if it's distribute>=0.7, it won't contain an importable
|
||||||
# setuptools, and having an egg-info dir blocks the ability of
|
# setuptools, and having an egg-info dir blocks the ability of
|
||||||
# setup.py to find setuptools plugins, so delete the egg-info dir
|
# setup.py to find setuptools plugins, so delete the egg-info dir
|
||||||
|
@ -325,10 +332,9 @@ class InstallRequirement(object):
|
||||||
cwd=cwd,
|
cwd=cwd,
|
||||||
filter_stdout=self._filter_install,
|
filter_stdout=self._filter_install,
|
||||||
show_stdout=False,
|
show_stdout=False,
|
||||||
command_level=logger.VERBOSE_DEBUG,
|
command_level=logging.DEBUG,
|
||||||
command_desc='python setup.py egg_info')
|
command_desc='python setup.py egg_info')
|
||||||
finally:
|
|
||||||
logger.indent -= 2
|
|
||||||
if not self.req:
|
if not self.req:
|
||||||
self.req = pkg_resources.Requirement.parse(
|
self.req = pkg_resources.Requirement.parse(
|
||||||
"%(Name)s==%(Version)s" % self.pkg_info())
|
"%(Name)s==%(Version)s" % self.pkg_info())
|
||||||
|
@ -441,9 +447,9 @@ exec(compile(
|
||||||
p = FeedParser()
|
p = FeedParser()
|
||||||
data = self.egg_info_data('PKG-INFO')
|
data = self.egg_info_data('PKG-INFO')
|
||||||
if not data:
|
if not data:
|
||||||
logger.warn(
|
logger.warning(
|
||||||
'No PKG-INFO file found in %s' %
|
'No PKG-INFO file found in %s',
|
||||||
display_path(self.egg_info_path('PKG-INFO'))
|
display_path(self.egg_info_path('PKG-INFO')),
|
||||||
)
|
)
|
||||||
p.feed(data or '')
|
p.feed(data or '')
|
||||||
return p.close()
|
return p.close()
|
||||||
|
@ -466,7 +472,7 @@ exec(compile(
|
||||||
in_extra = match.group(1)
|
in_extra = match.group(1)
|
||||||
continue
|
continue
|
||||||
if in_extra and in_extra not in extras:
|
if in_extra and in_extra not in extras:
|
||||||
logger.debug('skipping extra %s' % in_extra)
|
logger.debug('skipping extra %s', in_extra)
|
||||||
# Skip requirement for an extra we aren't requiring
|
# Skip requirement for an extra we aren't requiring
|
||||||
continue
|
continue
|
||||||
yield line
|
yield line
|
||||||
|
@ -485,21 +491,25 @@ exec(compile(
|
||||||
assert self.source_dir
|
assert self.source_dir
|
||||||
version = self.installed_version
|
version = self.installed_version
|
||||||
if version not in self.req:
|
if version not in self.req:
|
||||||
logger.warn(
|
logger.warning(
|
||||||
'Requested %s, but installing version %s' %
|
'Requested %s, but installing version %s',
|
||||||
(self, self.installed_version)
|
self,
|
||||||
|
self.installed_version,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
logger.debug(
|
logger.debug(
|
||||||
'Source in %s has version %s, which satisfies requirement %s' %
|
'Source in %s has version %s, which satisfies requirement %s',
|
||||||
(display_path(self.source_dir), version, self)
|
display_path(self.source_dir),
|
||||||
|
version,
|
||||||
|
self,
|
||||||
)
|
)
|
||||||
|
|
||||||
def update_editable(self, obtain=True):
|
def update_editable(self, obtain=True):
|
||||||
if not self.url:
|
if not self.url:
|
||||||
logger.info(
|
logger.debug(
|
||||||
"Cannot update repository at %s; repository location is "
|
"Cannot update repository at %s; repository location is "
|
||||||
"unknown" % self.source_dir
|
"unknown",
|
||||||
|
self.source_dir,
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
assert self.editable
|
assert self.editable
|
||||||
|
@ -655,15 +665,17 @@ exec(compile(
|
||||||
if self.uninstalled:
|
if self.uninstalled:
|
||||||
self.uninstalled.rollback()
|
self.uninstalled.rollback()
|
||||||
else:
|
else:
|
||||||
logger.error("Can't rollback %s, nothing uninstalled."
|
logger.error(
|
||||||
% (self.project_name,))
|
"Can't rollback %s, nothing uninstalled.", self.project_name,
|
||||||
|
)
|
||||||
|
|
||||||
def commit_uninstall(self):
|
def commit_uninstall(self):
|
||||||
if self.uninstalled:
|
if self.uninstalled:
|
||||||
self.uninstalled.commit()
|
self.uninstalled.commit()
|
||||||
else:
|
else:
|
||||||
logger.error("Can't commit %s, nothing uninstalled."
|
logger.error(
|
||||||
% (self.project_name,))
|
"Can't commit %s, nothing uninstalled.", self.project_name,
|
||||||
|
)
|
||||||
|
|
||||||
def archive(self, build_dir):
|
def archive(self, build_dir):
|
||||||
assert self.source_dir
|
assert self.source_dir
|
||||||
|
@ -677,13 +689,14 @@ exec(compile(
|
||||||
if response == 'i':
|
if response == 'i':
|
||||||
create_archive = False
|
create_archive = False
|
||||||
elif response == 'w':
|
elif response == 'w':
|
||||||
logger.warn('Deleting %s' % display_path(archive_path))
|
logger.warning('Deleting %s', display_path(archive_path))
|
||||||
os.remove(archive_path)
|
os.remove(archive_path)
|
||||||
elif response == 'b':
|
elif response == 'b':
|
||||||
dest_file = backup_dir(archive_path)
|
dest_file = backup_dir(archive_path)
|
||||||
logger.warn(
|
logger.warning(
|
||||||
'Backing up %s to %s' %
|
'Backing up %s to %s',
|
||||||
(display_path(archive_path), display_path(dest_file))
|
display_path(archive_path),
|
||||||
|
display_path(dest_file),
|
||||||
)
|
)
|
||||||
shutil.move(archive_path, dest_file)
|
shutil.move(archive_path, dest_file)
|
||||||
if create_archive:
|
if create_archive:
|
||||||
|
@ -708,8 +721,7 @@ exec(compile(
|
||||||
name = self._clean_zip_name(filename, dir)
|
name = self._clean_zip_name(filename, dir)
|
||||||
zip.write(filename, self.name + '/' + name)
|
zip.write(filename, self.name + '/' + name)
|
||||||
zip.close()
|
zip.close()
|
||||||
logger.indent -= 2
|
logger.info('Saved %s', display_path(archive_path))
|
||||||
logger.notify('Saved %s' % display_path(archive_path))
|
|
||||||
|
|
||||||
def _clean_zip_name(self, name, prefix):
|
def _clean_zip_name(self, name, prefix):
|
||||||
assert name.startswith(prefix + os.path.sep), (
|
assert name.startswith(prefix + os.path.sep), (
|
||||||
|
@ -763,19 +775,17 @@ exec(compile(
|
||||||
install_args += ['--install-headers',
|
install_args += ['--install-headers',
|
||||||
os.path.join(sys.prefix, 'include', 'site',
|
os.path.join(sys.prefix, 'include', 'site',
|
||||||
py_ver_str)]
|
py_ver_str)]
|
||||||
logger.notify('Running setup.py install for %s' % self.name)
|
logger.info('Running setup.py install for %s', self.name)
|
||||||
logger.indent += 2
|
with indent_log():
|
||||||
try:
|
|
||||||
call_subprocess(
|
call_subprocess(
|
||||||
install_args + install_options,
|
install_args + install_options,
|
||||||
cwd=self.source_dir,
|
cwd=self.source_dir,
|
||||||
filter_stdout=self._filter_install,
|
filter_stdout=self._filter_install,
|
||||||
show_stdout=False,
|
show_stdout=False,
|
||||||
)
|
)
|
||||||
finally:
|
|
||||||
logger.indent -= 2
|
|
||||||
if not os.path.exists(record_filename):
|
if not os.path.exists(record_filename):
|
||||||
logger.notify('Record file %s not found' % record_filename)
|
logger.debug('Record file %s not found', record_filename)
|
||||||
return
|
return
|
||||||
self.install_succeeded = True
|
self.install_succeeded = True
|
||||||
if self.as_egg:
|
if self.as_egg:
|
||||||
|
@ -796,9 +806,10 @@ exec(compile(
|
||||||
egg_info_dir = prepend_root(line)
|
egg_info_dir = prepend_root(line)
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
logger.warn(
|
logger.warning(
|
||||||
'Could not find .egg-info directory in install record for '
|
'Could not find .egg-info directory in install record for '
|
||||||
'%s' % self
|
'%s',
|
||||||
|
self,
|
||||||
)
|
)
|
||||||
f.close()
|
f.close()
|
||||||
# FIXME: put the record somewhere
|
# FIXME: put the record somewhere
|
||||||
|
@ -827,7 +838,7 @@ exec(compile(
|
||||||
"""Remove the source files from this requirement, if they are marked
|
"""Remove the source files from this requirement, if they are marked
|
||||||
for deletion"""
|
for deletion"""
|
||||||
if os.path.exists(self.delete_marker_filename):
|
if os.path.exists(self.delete_marker_filename):
|
||||||
logger.info('Removing source in %s' % self.source_dir)
|
logger.debug('Removing source in %s', self.source_dir)
|
||||||
if self.source_dir:
|
if self.source_dir:
|
||||||
rmtree(self.source_dir)
|
rmtree(self.source_dir)
|
||||||
self.source_dir = None
|
self.source_dir = None
|
||||||
|
@ -836,9 +847,8 @@ exec(compile(
|
||||||
self._temp_build_dir = None
|
self._temp_build_dir = None
|
||||||
|
|
||||||
def install_editable(self, install_options, global_options=()):
|
def install_editable(self, install_options, global_options=()):
|
||||||
logger.notify('Running setup.py develop for %s' % self.name)
|
logger.info('Running setup.py develop for %s', self.name)
|
||||||
logger.indent += 2
|
with indent_log():
|
||||||
try:
|
|
||||||
# FIXME: should we do --install-headers here too?
|
# FIXME: should we do --install-headers here too?
|
||||||
cwd = self.source_dir
|
cwd = self.source_dir
|
||||||
if self.editable_options and \
|
if self.editable_options and \
|
||||||
|
@ -858,12 +868,11 @@ exec(compile(
|
||||||
|
|
||||||
cwd=cwd, filter_stdout=self._filter_install,
|
cwd=cwd, filter_stdout=self._filter_install,
|
||||||
show_stdout=False)
|
show_stdout=False)
|
||||||
finally:
|
|
||||||
logger.indent -= 2
|
|
||||||
self.install_succeeded = True
|
self.install_succeeded = True
|
||||||
|
|
||||||
def _filter_install(self, line):
|
def _filter_install(self, line):
|
||||||
level = logger.NOTIFY
|
level = logging.INFO
|
||||||
for regex in [
|
for regex in [
|
||||||
r'^running .*',
|
r'^running .*',
|
||||||
r'^writing .*',
|
r'^writing .*',
|
||||||
|
@ -877,7 +886,7 @@ exec(compile(
|
||||||
# Not sure what this warning is, but it seems harmless:
|
# Not sure what this warning is, but it seems harmless:
|
||||||
r"^warning: manifest_maker: standard file '-c' not found$"]:
|
r"^warning: manifest_maker: standard file '-c' not found$"]:
|
||||||
if re.search(regex, line.strip()):
|
if re.search(regex, line.strip()):
|
||||||
level = logger.INFO
|
level = logging.DEBUG
|
||||||
break
|
break
|
||||||
return (level, line)
|
return (level, line)
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,6 @@
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import logging
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
|
|
||||||
|
@ -8,14 +11,17 @@ from pip.exceptions import (InstallationError, BestVersionAlreadyInstalled,
|
||||||
DistributionNotFound, PreviousBuildDirError)
|
DistributionNotFound, PreviousBuildDirError)
|
||||||
from pip.index import Link
|
from pip.index import Link
|
||||||
from pip.locations import (PIP_DELETE_MARKER_FILENAME, build_prefix)
|
from pip.locations import (PIP_DELETE_MARKER_FILENAME, build_prefix)
|
||||||
from pip.log import logger
|
|
||||||
from pip.req.req_install import InstallRequirement
|
from pip.req.req_install import InstallRequirement
|
||||||
from pip.util import (display_path, rmtree, dist_in_usersite, call_subprocess,
|
from pip.utils import (display_path, rmtree, dist_in_usersite, call_subprocess,
|
||||||
_make_build_dir, normalize_path)
|
_make_build_dir, normalize_path)
|
||||||
|
from pip.utils.logging import indent_log
|
||||||
from pip.vcs import vcs
|
from pip.vcs import vcs
|
||||||
from pip.wheel import wheel_ext
|
from pip.wheel import wheel_ext
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class Requirements(object):
|
class Requirements(object):
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
|
@ -130,7 +136,7 @@ class RequirementSet(object):
|
||||||
if os.path.exists(self.download_dir):
|
if os.path.exists(self.download_dir):
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
logger.fatal('Could not find download directory')
|
logger.critical('Could not find download directory')
|
||||||
raise InstallationError(
|
raise InstallationError(
|
||||||
"Could not find or access download directory '%s'"
|
"Could not find or access download directory '%s'"
|
||||||
% display_path(self.download_dir))
|
% display_path(self.download_dir))
|
||||||
|
@ -176,9 +182,11 @@ class RequirementSet(object):
|
||||||
else:
|
else:
|
||||||
install_needed = False
|
install_needed = False
|
||||||
if req_to_install.satisfied_by:
|
if req_to_install.satisfied_by:
|
||||||
logger.notify('Requirement already satisfied '
|
logger.info(
|
||||||
'(use --upgrade to upgrade): %s'
|
'Requirement already satisfied (use --upgrade to '
|
||||||
% req_to_install)
|
'upgrade): %s',
|
||||||
|
req_to_install,
|
||||||
|
)
|
||||||
|
|
||||||
if req_to_install.editable:
|
if req_to_install.editable:
|
||||||
if req_to_install.source_dir is None:
|
if req_to_install.source_dir is None:
|
||||||
|
@ -250,30 +258,33 @@ class RequirementSet(object):
|
||||||
install = False
|
install = False
|
||||||
if req_to_install.satisfied_by:
|
if req_to_install.satisfied_by:
|
||||||
if best_installed:
|
if best_installed:
|
||||||
logger.notify('Requirement already up-to-date: %s'
|
logger.info(
|
||||||
% req_to_install)
|
'Requirement already up-to-date: %s',
|
||||||
|
req_to_install,
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
logger.notify('Requirement already satisfied '
|
logger.info(
|
||||||
'(use --upgrade to upgrade): %s'
|
'Requirement already satisfied (use --upgrade to '
|
||||||
% req_to_install)
|
'upgrade): %s',
|
||||||
|
req_to_install,
|
||||||
|
)
|
||||||
if req_to_install.editable:
|
if req_to_install.editable:
|
||||||
logger.notify('Obtaining %s' % req_to_install)
|
logger.info('Obtaining %s', req_to_install)
|
||||||
elif install:
|
elif install:
|
||||||
if (req_to_install.url
|
if (req_to_install.url
|
||||||
and req_to_install.url.lower().startswith('file:')):
|
and req_to_install.url.lower().startswith('file:')):
|
||||||
logger.notify(
|
logger.info(
|
||||||
'Unpacking %s' %
|
'Unpacking %s',
|
||||||
display_path(url_to_path(req_to_install.url))
|
display_path(url_to_path(req_to_install.url)),
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
logger.notify('Downloading/unpacking %s' % req_to_install)
|
logger.info('Downloading/unpacking %s', req_to_install)
|
||||||
logger.indent += 2
|
|
||||||
|
|
||||||
# ################################ #
|
with indent_log():
|
||||||
# # vcs update or unpack archive # #
|
# ################################ #
|
||||||
# ################################ #
|
# # vcs update or unpack archive # #
|
||||||
|
# ################################ #
|
||||||
|
|
||||||
try:
|
|
||||||
is_wheel = False
|
is_wheel = False
|
||||||
if req_to_install.editable:
|
if req_to_install.editable:
|
||||||
if req_to_install.source_dir is None:
|
if req_to_install.source_dir is None:
|
||||||
|
@ -349,9 +360,11 @@ class RequirementSet(object):
|
||||||
do_download, session=self.session,
|
do_download, session=self.session,
|
||||||
)
|
)
|
||||||
except HTTPError as exc:
|
except HTTPError as exc:
|
||||||
logger.fatal(
|
logger.critical(
|
||||||
'Could not install requirement %s because '
|
'Could not install requirement %s because '
|
||||||
'of error %s' % (req_to_install, exc)
|
'of error %s',
|
||||||
|
req_to_install,
|
||||||
|
exc,
|
||||||
)
|
)
|
||||||
raise InstallationError(
|
raise InstallationError(
|
||||||
'Could not install requirement %s because '
|
'Could not install requirement %s because '
|
||||||
|
@ -392,10 +405,10 @@ class RequirementSet(object):
|
||||||
req_to_install.satisfied_by
|
req_to_install.satisfied_by
|
||||||
req_to_install.satisfied_by = None
|
req_to_install.satisfied_by = None
|
||||||
else:
|
else:
|
||||||
logger.notify(
|
logger.info(
|
||||||
'Requirement already satisfied (use '
|
'Requirement already satisfied (use '
|
||||||
'--upgrade to upgrade): %s' %
|
'--upgrade to upgrade): %s',
|
||||||
req_to_install
|
req_to_install,
|
||||||
)
|
)
|
||||||
install = False
|
install = False
|
||||||
|
|
||||||
|
@ -428,9 +441,9 @@ class RequirementSet(object):
|
||||||
req_to_install.dependency_links
|
req_to_install.dependency_links
|
||||||
)
|
)
|
||||||
if (req_to_install.extras):
|
if (req_to_install.extras):
|
||||||
logger.notify(
|
logger.info(
|
||||||
"Installing extra requirements: %r" %
|
"Installing extra requirements: %r",
|
||||||
','.join(req_to_install.extras)
|
','.join(req_to_install.extras),
|
||||||
)
|
)
|
||||||
if not self.ignore_dependencies:
|
if not self.ignore_dependencies:
|
||||||
for req in req_to_install.requirements(
|
for req in req_to_install.requirements(
|
||||||
|
@ -443,8 +456,8 @@ class RequirementSet(object):
|
||||||
# FIXME: proper warning
|
# FIXME: proper warning
|
||||||
logger.error(
|
logger.error(
|
||||||
'Invalid requirement: %r (%s) in '
|
'Invalid requirement: %r (%s) in '
|
||||||
'requirement %s' %
|
'requirement %s',
|
||||||
(req, exc, req_to_install)
|
req, exc, req_to_install,
|
||||||
)
|
)
|
||||||
continue
|
continue
|
||||||
if self.has_requirement(name):
|
if self.has_requirement(name):
|
||||||
|
@ -465,26 +478,21 @@ class RequirementSet(object):
|
||||||
if install:
|
if install:
|
||||||
self.successfully_downloaded.append(req_to_install)
|
self.successfully_downloaded.append(req_to_install)
|
||||||
|
|
||||||
finally:
|
|
||||||
logger.indent -= 2
|
|
||||||
|
|
||||||
def cleanup_files(self):
|
def cleanup_files(self):
|
||||||
"""Clean up files, remove builds."""
|
"""Clean up files, remove builds."""
|
||||||
logger.notify('Cleaning up...')
|
logger.info('Cleaning up...')
|
||||||
logger.indent += 2
|
with indent_log():
|
||||||
for req in self.reqs_to_cleanup:
|
for req in self.reqs_to_cleanup:
|
||||||
req.remove_temporary_source()
|
req.remove_temporary_source()
|
||||||
|
|
||||||
remove_dir = []
|
remove_dir = []
|
||||||
if self._pip_has_created_build_dir():
|
if self._pip_has_created_build_dir():
|
||||||
remove_dir.append(self.build_dir)
|
remove_dir.append(self.build_dir)
|
||||||
|
|
||||||
for dir in remove_dir:
|
for dir in remove_dir:
|
||||||
if os.path.exists(dir):
|
if os.path.exists(dir):
|
||||||
logger.info('Removing temporary dir %s...' % dir)
|
logger.debug('Removing temporary dir %s...', dir)
|
||||||
rmtree(dir)
|
rmtree(dir)
|
||||||
|
|
||||||
logger.indent -= 2
|
|
||||||
|
|
||||||
def _pip_has_created_build_dir(self):
|
def _pip_has_created_build_dir(self):
|
||||||
return (
|
return (
|
||||||
|
@ -496,7 +504,7 @@ class RequirementSet(object):
|
||||||
|
|
||||||
def copy_to_build_dir(self, req_to_install):
|
def copy_to_build_dir(self, req_to_install):
|
||||||
target_dir = req_to_install.editable and self.src_dir or self.build_dir
|
target_dir = req_to_install.editable and self.src_dir or self.build_dir
|
||||||
logger.info("Copying %s to %s" % (req_to_install.name, target_dir))
|
logger.debug("Copying %s to %s", req_to_install.name, target_dir)
|
||||||
dest = os.path.join(target_dir, req_to_install.name)
|
dest = os.path.join(target_dir, req_to_install.name)
|
||||||
shutil.copytree(req_to_install.source_dir, dest)
|
shutil.copytree(req_to_install.source_dir, dest)
|
||||||
call_subprocess(["python", "%s/setup.py" % dest, "clean"], cwd=dest,
|
call_subprocess(["python", "%s/setup.py" % dest, "clean"], cwd=dest,
|
||||||
|
@ -524,12 +532,12 @@ class RequirementSet(object):
|
||||||
to_install.append(req)
|
to_install.append(req)
|
||||||
|
|
||||||
if to_install:
|
if to_install:
|
||||||
logger.notify(
|
logger.info(
|
||||||
'Installing collected packages: %s' %
|
'Installing collected packages: %s',
|
||||||
', '.join([req.name for req in to_install])
|
', '.join([req.name for req in to_install]),
|
||||||
)
|
)
|
||||||
logger.indent += 2
|
|
||||||
try:
|
with indent_log():
|
||||||
for requirement in to_install:
|
for requirement in to_install:
|
||||||
|
|
||||||
# DISTRIBUTE TO SETUPTOOLS UPGRADE HACK (1 of 3 parts)
|
# DISTRIBUTE TO SETUPTOOLS UPGRADE HACK (1 of 3 parts)
|
||||||
|
@ -559,13 +567,12 @@ class RequirementSet(object):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
if requirement.conflicts_with:
|
if requirement.conflicts_with:
|
||||||
logger.notify('Found existing installation: %s'
|
logger.info(
|
||||||
% requirement.conflicts_with)
|
'Found existing installation: %s',
|
||||||
logger.indent += 2
|
requirement.conflicts_with,
|
||||||
try:
|
)
|
||||||
|
with indent_log():
|
||||||
requirement.uninstall(auto_confirm=True)
|
requirement.uninstall(auto_confirm=True)
|
||||||
finally:
|
|
||||||
logger.indent -= 2
|
|
||||||
try:
|
try:
|
||||||
requirement.install(
|
requirement.install(
|
||||||
install_options,
|
install_options,
|
||||||
|
@ -584,6 +591,5 @@ class RequirementSet(object):
|
||||||
and requirement.install_succeeded):
|
and requirement.install_succeeded):
|
||||||
requirement.commit_uninstall()
|
requirement.commit_uninstall()
|
||||||
requirement.remove_temporary_source()
|
requirement.remove_temporary_source()
|
||||||
finally:
|
|
||||||
logger.indent -= 2
|
|
||||||
self.successfully_installed = to_install
|
self.successfully_installed = to_install
|
||||||
|
|
|
@ -1,13 +1,19 @@
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
import imp
|
import imp
|
||||||
|
import logging
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
|
|
||||||
from pip.compat import uses_pycache, WINDOWS
|
from pip.compat import uses_pycache, WINDOWS
|
||||||
from pip.exceptions import UninstallationError
|
from pip.exceptions import UninstallationError
|
||||||
from pip.log import logger
|
from pip.utils import (rmtree, ask, is_local, dist_is_local, renames,
|
||||||
from pip.util import (rmtree, ask, is_local, dist_is_local, renames,
|
normalize_path)
|
||||||
normalize_path)
|
from pip.utils.logging import indent_log
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class UninstallPathSet(object):
|
class UninstallPathSet(object):
|
||||||
|
@ -31,13 +37,11 @@ class UninstallPathSet(object):
|
||||||
|
|
||||||
def _can_uninstall(self):
|
def _can_uninstall(self):
|
||||||
if not dist_is_local(self.dist):
|
if not dist_is_local(self.dist):
|
||||||
logger.notify(
|
logger.info(
|
||||||
"Not uninstalling %s at %s, outside environment %s" %
|
"Not uninstalling %s at %s, outside environment %s",
|
||||||
(
|
self.dist.project_name,
|
||||||
self.dist.project_name,
|
normalize_path(self.dist.location),
|
||||||
normalize_path(self.dist.location),
|
sys.prefix,
|
||||||
sys.prefix
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
@ -89,54 +93,52 @@ class UninstallPathSet(object):
|
||||||
if not self._can_uninstall():
|
if not self._can_uninstall():
|
||||||
return
|
return
|
||||||
if not self.paths:
|
if not self.paths:
|
||||||
logger.notify(
|
logger.info(
|
||||||
"Can't uninstall '%s'. No files were found to uninstall." %
|
"Can't uninstall '%s'. No files were found to uninstall.",
|
||||||
self.dist.project_name
|
self.dist.project_name,
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
logger.notify('Uninstalling %s:' % self.dist.project_name)
|
logger.info('Uninstalling %s:', self.dist.project_name)
|
||||||
logger.indent += 2
|
|
||||||
paths = sorted(self.compact(self.paths))
|
with indent_log():
|
||||||
try:
|
paths = sorted(self.compact(self.paths))
|
||||||
|
|
||||||
if auto_confirm:
|
if auto_confirm:
|
||||||
response = 'y'
|
response = 'y'
|
||||||
else:
|
else:
|
||||||
for path in paths:
|
for path in paths:
|
||||||
logger.notify(path)
|
logger.info(path)
|
||||||
response = ask('Proceed (y/n)? ', ('y', 'n'))
|
response = ask('Proceed (y/n)? ', ('y', 'n'))
|
||||||
if self._refuse:
|
if self._refuse:
|
||||||
logger.notify('Not removing or modifying (outside of prefix):')
|
logger.info('Not removing or modifying (outside of prefix):')
|
||||||
for path in self.compact(self._refuse):
|
for path in self.compact(self._refuse):
|
||||||
logger.notify(path)
|
logger.info(path)
|
||||||
if response == 'y':
|
if response == 'y':
|
||||||
self.save_dir = tempfile.mkdtemp(suffix='-uninstall',
|
self.save_dir = tempfile.mkdtemp(suffix='-uninstall',
|
||||||
prefix='pip-')
|
prefix='pip-')
|
||||||
for path in paths:
|
for path in paths:
|
||||||
new_path = self._stash(path)
|
new_path = self._stash(path)
|
||||||
logger.info('Removing file or directory %s' % path)
|
logger.debug('Removing file or directory %s', path)
|
||||||
self._moved_paths.append(path)
|
self._moved_paths.append(path)
|
||||||
renames(path, new_path)
|
renames(path, new_path)
|
||||||
for pth in self.pth.values():
|
for pth in self.pth.values():
|
||||||
pth.remove()
|
pth.remove()
|
||||||
logger.notify(
|
logger.info(
|
||||||
'Successfully uninstalled %s' % self.dist.project_name
|
'Successfully uninstalled %s', self.dist.project_name
|
||||||
)
|
)
|
||||||
|
|
||||||
finally:
|
|
||||||
logger.indent -= 2
|
|
||||||
|
|
||||||
def rollback(self):
|
def rollback(self):
|
||||||
"""Rollback the changes previously made by remove()."""
|
"""Rollback the changes previously made by remove()."""
|
||||||
if self.save_dir is None:
|
if self.save_dir is None:
|
||||||
logger.error(
|
logger.error(
|
||||||
"Can't roll back %s; was not uninstalled" %
|
"Can't roll back %s; was not uninstalled",
|
||||||
self.dist.project_name
|
self.dist.project_name,
|
||||||
)
|
)
|
||||||
return False
|
return False
|
||||||
logger.notify('Rolling back uninstall of %s' % self.dist.project_name)
|
logger.info('Rolling back uninstall of %s', self.dist.project_name)
|
||||||
for path in self._moved_paths:
|
for path in self._moved_paths:
|
||||||
tmp_path = self._stash(path)
|
tmp_path = self._stash(path)
|
||||||
logger.info('Replacing %s' % path)
|
logger.debug('Replacing %s', path)
|
||||||
renames(tmp_path, path)
|
renames(tmp_path, path)
|
||||||
for pth in self.pth.values():
|
for pth in self.pth.values():
|
||||||
pth.rollback()
|
pth.rollback()
|
||||||
|
@ -170,7 +172,7 @@ class UninstallPthEntries(object):
|
||||||
self.entries.add(entry)
|
self.entries.add(entry)
|
||||||
|
|
||||||
def remove(self):
|
def remove(self):
|
||||||
logger.info('Removing pth entries from %s:' % self.file)
|
logger.debug('Removing pth entries from %s:', self.file)
|
||||||
fh = open(self.file, 'rb')
|
fh = open(self.file, 'rb')
|
||||||
# windows uses '\r\n' with py3k, but uses '\n' with py2.x
|
# windows uses '\r\n' with py3k, but uses '\n' with py2.x
|
||||||
lines = fh.readlines()
|
lines = fh.readlines()
|
||||||
|
@ -182,7 +184,7 @@ class UninstallPthEntries(object):
|
||||||
endline = '\n'
|
endline = '\n'
|
||||||
for entry in self.entries:
|
for entry in self.entries:
|
||||||
try:
|
try:
|
||||||
logger.info('Removing entry: %s' % entry)
|
logger.debug('Removing entry: %s', entry)
|
||||||
lines.remove((entry + endline).encode("utf-8"))
|
lines.remove((entry + endline).encode("utf-8"))
|
||||||
except ValueError:
|
except ValueError:
|
||||||
pass
|
pass
|
||||||
|
@ -193,10 +195,10 @@ class UninstallPthEntries(object):
|
||||||
def rollback(self):
|
def rollback(self):
|
||||||
if self._saved_lines is None:
|
if self._saved_lines is None:
|
||||||
logger.error(
|
logger.error(
|
||||||
'Cannot roll back changes to %s, none were made' % self.file
|
'Cannot roll back changes to %s, none were made', self.file
|
||||||
)
|
)
|
||||||
return False
|
return False
|
||||||
logger.info('Rolling %s back to previous state' % self.file)
|
logger.debug('Rolling %s back to previous state', self.file)
|
||||||
fh = open(self.file, 'wb')
|
fh = open(self.file, 'wb')
|
||||||
fh.writelines(self._saved_lines)
|
fh.writelines(self._saved_lines)
|
||||||
fh.close()
|
fh.close()
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
SUCCESS = 0
|
SUCCESS = 0
|
||||||
ERROR = 1
|
ERROR = 1
|
||||||
UNKNOWN_ERROR = 2
|
UNKNOWN_ERROR = 2
|
||||||
|
|
|
@ -1,5 +1,8 @@
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
import contextlib
|
import contextlib
|
||||||
import locale
|
import locale
|
||||||
|
import logging
|
||||||
import re
|
import re
|
||||||
import os
|
import os
|
||||||
import posixpath
|
import posixpath
|
||||||
|
@ -16,7 +19,6 @@ from pip.locations import (
|
||||||
site_packages, user_site, running_under_virtualenv, virtualenv_no_global,
|
site_packages, user_site, running_under_virtualenv, virtualenv_no_global,
|
||||||
write_delete_marker_file
|
write_delete_marker_file
|
||||||
)
|
)
|
||||||
from pip.log import logger
|
|
||||||
from pip._vendor import pkg_resources, six
|
from pip._vendor import pkg_resources, six
|
||||||
from pip._vendor.distlib import version
|
from pip._vendor.distlib import version
|
||||||
from pip._vendor.six.moves import input
|
from pip._vendor.six.moves import input
|
||||||
|
@ -40,6 +42,9 @@ __all__ = ['rmtree', 'display_path', 'backup_dir',
|
||||||
'captured_stdout', 'remove_tracebacks']
|
'captured_stdout', 'remove_tracebacks']
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def get_prog():
|
def get_prog():
|
||||||
try:
|
try:
|
||||||
if os.path.basename(sys.argv[0]) in ('__main__.py', '-c'):
|
if os.path.basename(sys.argv[0]) in ('__main__.py', '-c'):
|
||||||
|
@ -562,7 +567,9 @@ def untar_file(filename, location):
|
||||||
elif filename.lower().endswith('.tar'):
|
elif filename.lower().endswith('.tar'):
|
||||||
mode = 'r'
|
mode = 'r'
|
||||||
else:
|
else:
|
||||||
logger.warn('Cannot determine compression type for file %s' % filename)
|
logger.warning(
|
||||||
|
'Cannot determine compression type for file %s', filename,
|
||||||
|
)
|
||||||
mode = 'r:*'
|
mode = 'r:*'
|
||||||
tar = tarfile.open(filename, mode)
|
tar = tarfile.open(filename, mode)
|
||||||
try:
|
try:
|
||||||
|
@ -587,9 +594,10 @@ def untar_file(filename, location):
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
# Some corrupt tar files seem to produce this
|
# Some corrupt tar files seem to produce this
|
||||||
# (specifically bad symlinks)
|
# (specifically bad symlinks)
|
||||||
logger.warn(
|
logger.warning(
|
||||||
'In the tar file %s the member %s is invalid: %s'
|
'In the tar file %s the member %s is invalid: %s',
|
||||||
% (filename, member.name, exc))
|
filename, member.name, exc,
|
||||||
|
)
|
||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
|
@ -597,9 +605,10 @@ def untar_file(filename, location):
|
||||||
except (KeyError, AttributeError) as exc:
|
except (KeyError, AttributeError) as exc:
|
||||||
# Some corrupt tar files seem to produce this
|
# Some corrupt tar files seem to produce this
|
||||||
# (specifically bad symlinks)
|
# (specifically bad symlinks)
|
||||||
logger.warn(
|
logger.warning(
|
||||||
'In the tar file %s the member %s is invalid: %s'
|
'In the tar file %s the member %s is invalid: %s',
|
||||||
% (filename, member.name, exc))
|
filename, member.name, exc,
|
||||||
|
)
|
||||||
continue
|
continue
|
||||||
if not os.path.exists(os.path.dirname(path)):
|
if not os.path.exists(os.path.dirname(path)):
|
||||||
os.makedirs(os.path.dirname(path))
|
os.makedirs(os.path.dirname(path))
|
||||||
|
@ -642,10 +651,10 @@ def unpack_file(filename, location, content_type, link):
|
||||||
else:
|
else:
|
||||||
# FIXME: handle?
|
# FIXME: handle?
|
||||||
# FIXME: magic signatures?
|
# FIXME: magic signatures?
|
||||||
logger.fatal(
|
logger.critical(
|
||||||
'Cannot unpack file %s (downloaded from %s, content-type: %s); '
|
'Cannot unpack file %s (downloaded from %s, content-type: %s); '
|
||||||
'cannot detect archive format' %
|
'cannot detect archive format',
|
||||||
(filename, location, content_type)
|
filename, location, content_type,
|
||||||
)
|
)
|
||||||
raise InstallationError(
|
raise InstallationError(
|
||||||
'Cannot determine archive format of %s' % location
|
'Cannot determine archive format of %s' % location
|
||||||
|
@ -666,7 +675,7 @@ def remove_tracebacks(output):
|
||||||
def call_subprocess(cmd, show_stdout=True,
|
def call_subprocess(cmd, show_stdout=True,
|
||||||
filter_stdout=None, cwd=None,
|
filter_stdout=None, cwd=None,
|
||||||
raise_on_returncode=True,
|
raise_on_returncode=True,
|
||||||
command_level=logger.DEBUG, command_desc=None,
|
command_level=logging.DEBUG, command_desc=None,
|
||||||
extra_environ=None):
|
extra_environ=None):
|
||||||
if command_desc is None:
|
if command_desc is None:
|
||||||
cmd_parts = []
|
cmd_parts = []
|
||||||
|
@ -679,7 +688,7 @@ def call_subprocess(cmd, show_stdout=True,
|
||||||
stdout = None
|
stdout = None
|
||||||
else:
|
else:
|
||||||
stdout = subprocess.PIPE
|
stdout = subprocess.PIPE
|
||||||
logger.log(command_level, "Running command %s" % command_desc)
|
logger.log(command_level, "Running command %s", command_desc)
|
||||||
env = os.environ.copy()
|
env = os.environ.copy()
|
||||||
if extra_environ:
|
if extra_environ:
|
||||||
env.update(extra_environ)
|
env.update(extra_environ)
|
||||||
|
@ -688,8 +697,9 @@ def call_subprocess(cmd, show_stdout=True,
|
||||||
cmd, stderr=subprocess.STDOUT, stdin=None, stdout=stdout,
|
cmd, stderr=subprocess.STDOUT, stdin=None, stdout=stdout,
|
||||||
cwd=cwd, env=env)
|
cwd=cwd, env=env)
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
logger.fatal(
|
logger.critical(
|
||||||
"Error %s while executing command %s" % (exc, command_desc))
|
"Error %s while executing command %s", exc, command_desc,
|
||||||
|
)
|
||||||
raise
|
raise
|
||||||
all_output = []
|
all_output = []
|
||||||
if stdout is not None:
|
if stdout is not None:
|
||||||
|
@ -706,10 +716,11 @@ def call_subprocess(cmd, show_stdout=True,
|
||||||
if isinstance(level, tuple):
|
if isinstance(level, tuple):
|
||||||
level, line = level
|
level, line = level
|
||||||
logger.log(level, line)
|
logger.log(level, line)
|
||||||
if not logger.stdout_level_matches(level):
|
# if not logger.stdout_level_matches(level) and False:
|
||||||
logger.show_progress()
|
# # TODO(dstufft): Handle progress bar.
|
||||||
|
# logger.show_progress()
|
||||||
else:
|
else:
|
||||||
logger.info(line)
|
logger.debug(line)
|
||||||
else:
|
else:
|
||||||
returned_stdout, returned_stderr = proc.communicate()
|
returned_stdout, returned_stderr = proc.communicate()
|
||||||
all_output = [returned_stdout or '']
|
all_output = [returned_stdout or '']
|
||||||
|
@ -717,10 +728,10 @@ def call_subprocess(cmd, show_stdout=True,
|
||||||
if proc.returncode:
|
if proc.returncode:
|
||||||
if raise_on_returncode:
|
if raise_on_returncode:
|
||||||
if all_output:
|
if all_output:
|
||||||
logger.notify(
|
logger.info(
|
||||||
'Complete output from command %s:' % command_desc
|
'Complete output from command %s:', command_desc,
|
||||||
)
|
)
|
||||||
logger.notify(
|
logger.info(
|
||||||
'\n'.join(all_output) +
|
'\n'.join(all_output) +
|
||||||
'\n----------------------------------------'
|
'\n----------------------------------------'
|
||||||
)
|
)
|
||||||
|
@ -728,9 +739,10 @@ def call_subprocess(cmd, show_stdout=True,
|
||||||
'Command "%s" failed with error code %s in %s'
|
'Command "%s" failed with error code %s in %s'
|
||||||
% (command_desc, proc.returncode, cwd))
|
% (command_desc, proc.returncode, cwd))
|
||||||
else:
|
else:
|
||||||
logger.warn(
|
logger.warning(
|
||||||
'Command "%s" had error code %s in %s'
|
'Command "%s" had error code %s in %s',
|
||||||
% (command_desc, proc.returncode, cwd))
|
command_desc, proc.returncode, cwd,
|
||||||
|
)
|
||||||
if stdout is not None:
|
if stdout is not None:
|
||||||
return remove_tracebacks(''.join(all_output))
|
return remove_tracebacks(''.join(all_output))
|
||||||
|
|
68
pip/utils/deprecation.py
Normal file
68
pip/utils/deprecation.py
Normal file
|
@ -0,0 +1,68 @@
|
||||||
|
"""
|
||||||
|
A module that implments tooling to enable easy warnings about deprecations.
|
||||||
|
"""
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import warnings
|
||||||
|
|
||||||
|
|
||||||
|
class PipDeprecationWarning(Warning):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class RemovedInPip17Warning(PipDeprecationWarning, DeprecationWarning):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class RemovedInPip18Warning(PipDeprecationWarning, PendingDeprecationWarning):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
DEPRECATIONS = [RemovedInPip17Warning, RemovedInPip18Warning]
|
||||||
|
|
||||||
|
|
||||||
|
# Warnings <-> Logging Integration
|
||||||
|
|
||||||
|
|
||||||
|
_warnings_showwarning = None
|
||||||
|
|
||||||
|
|
||||||
|
def _showwarning(message, category, filename, lineno, file=None, line=None):
|
||||||
|
if file is not None:
|
||||||
|
if _warnings_showwarning is not None:
|
||||||
|
_warnings_showwarning(
|
||||||
|
message, category, filename, lineno, file, line,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
if issubclass(category, PipDeprecationWarning):
|
||||||
|
# We use a specially named logger which will handle all of the
|
||||||
|
# deprecation messages for pip.
|
||||||
|
logger = logging.getLogger("pip.deprecations")
|
||||||
|
|
||||||
|
# This is purposely using the % formatter here instead of letting
|
||||||
|
# the logging module handle the interpolation. This is because we
|
||||||
|
# want it to appear as if someone typed this entire message out.
|
||||||
|
log_message = "DEPRECATION: %s" % message
|
||||||
|
|
||||||
|
# Things that are DeprecationWarnings will be removed in the very
|
||||||
|
# next version of pip. We want these to be more obvious so we
|
||||||
|
# use the ERROR logging level while the PendingDeprecationWarnings
|
||||||
|
# are still have at least 2 versions to go until they are removed
|
||||||
|
# so they can just be warnings.
|
||||||
|
if issubclass(category, DeprecationWarning):
|
||||||
|
logger.error(log_message)
|
||||||
|
else:
|
||||||
|
logger.warning(log_message)
|
||||||
|
else:
|
||||||
|
_warnings_showwarning(
|
||||||
|
message, category, filename, lineno, file, line,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def install_warning_logger():
|
||||||
|
global _warnings_showwarning
|
||||||
|
|
||||||
|
if _warnings_showwarning is None:
|
||||||
|
_warnings_showwarning = warnings.showwarning
|
||||||
|
warnings.showwarning = _showwarning
|
121
pip/utils/logging.py
Normal file
121
pip/utils/logging.py
Normal file
|
@ -0,0 +1,121 @@
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import contextlib
|
||||||
|
import logging
|
||||||
|
import logging.handlers
|
||||||
|
import os
|
||||||
|
|
||||||
|
try:
|
||||||
|
import threading
|
||||||
|
except ImportError:
|
||||||
|
import dummy_threading as threading
|
||||||
|
|
||||||
|
from pip.compat import WINDOWS
|
||||||
|
|
||||||
|
try:
|
||||||
|
from pip._vendor import colorama
|
||||||
|
# Lots of different errors can come from this, including SystemError and
|
||||||
|
# ImportError.
|
||||||
|
except Exception:
|
||||||
|
colorama = None
|
||||||
|
|
||||||
|
|
||||||
|
_log_state = threading.local()
|
||||||
|
_log_state.indentation = 0
|
||||||
|
|
||||||
|
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def indent_log(num=2):
|
||||||
|
"""
|
||||||
|
A context manager which will cause the log output to be indented for any
|
||||||
|
log messages emited inside it.
|
||||||
|
"""
|
||||||
|
_log_state.indentation += num
|
||||||
|
yield
|
||||||
|
_log_state.indentation -= num
|
||||||
|
|
||||||
|
|
||||||
|
def get_indentation():
|
||||||
|
return _log_state.indentation
|
||||||
|
|
||||||
|
|
||||||
|
class IndentingFormatter(logging.Formatter):
|
||||||
|
|
||||||
|
def format(self, record):
|
||||||
|
"""
|
||||||
|
Calls the standard formatter, but will indent all of the log messages
|
||||||
|
by our current indentation level.
|
||||||
|
"""
|
||||||
|
formatted = logging.Formatter.format(self, record)
|
||||||
|
formatted = "".join([
|
||||||
|
(" " * get_indentation()) + line
|
||||||
|
for line in formatted.splitlines(True)
|
||||||
|
])
|
||||||
|
return formatted
|
||||||
|
|
||||||
|
|
||||||
|
def _color_wrap(*colors):
|
||||||
|
def wrapped(inp):
|
||||||
|
return "".join(list(colors) + [inp, colorama.Style.RESET_ALL])
|
||||||
|
return wrapped
|
||||||
|
|
||||||
|
|
||||||
|
class ColorizedStreamHandler(logging.StreamHandler):
|
||||||
|
|
||||||
|
# Don't build up a list of colors if we don't have colorama
|
||||||
|
if colorama:
|
||||||
|
COLORS = [
|
||||||
|
# This needs to be in order from highest logging level to lowest.
|
||||||
|
(logging.ERROR, _color_wrap(colorama.Fore.RED)),
|
||||||
|
(logging.WARNING, _color_wrap(colorama.Fore.YELLOW)),
|
||||||
|
]
|
||||||
|
else:
|
||||||
|
COLORS = []
|
||||||
|
|
||||||
|
def __init__(self, stream=None):
|
||||||
|
logging.StreamHandler.__init__(self, stream)
|
||||||
|
|
||||||
|
if WINDOWS and colorama:
|
||||||
|
self.stream = colorama.AnsiToWin32(self.stream)
|
||||||
|
|
||||||
|
def should_color(self):
|
||||||
|
# Don't colorize things if we do not have colorama
|
||||||
|
if not colorama:
|
||||||
|
return False
|
||||||
|
|
||||||
|
real_stream = (
|
||||||
|
self.stream if not isinstance(self.stream, colorama.AnsiToWin32)
|
||||||
|
else self.stream.wrapped
|
||||||
|
)
|
||||||
|
|
||||||
|
# If the stream is a tty we should color it
|
||||||
|
if hasattr(real_stream, "isatty") and real_stream.isatty():
|
||||||
|
return True
|
||||||
|
|
||||||
|
# If we have an ASNI term we should color it
|
||||||
|
if os.environ.get("TERM") == "ANSI":
|
||||||
|
return True
|
||||||
|
|
||||||
|
# If anything else we should not color it
|
||||||
|
return False
|
||||||
|
|
||||||
|
def format(self, record):
|
||||||
|
msg = logging.StreamHandler.format(self, record)
|
||||||
|
|
||||||
|
if self.should_color():
|
||||||
|
for level, color in self.COLORS:
|
||||||
|
if record.levelno >= level:
|
||||||
|
msg = color(msg)
|
||||||
|
break
|
||||||
|
|
||||||
|
return msg
|
||||||
|
|
||||||
|
|
||||||
|
class BetterRotatingFileHandler(logging.handlers.RotatingFileHandler):
|
||||||
|
|
||||||
|
def _open(self):
|
||||||
|
# Ensure the directory exists
|
||||||
|
if not os.path.exists(os.path.dirname(self.baseFilename)):
|
||||||
|
os.makedirs(os.path.dirname(self.baseFilename))
|
||||||
|
|
||||||
|
return logging.handlers.RotatingFileHandler._open(self)
|
70
pip/utils/ui.py
Normal file
70
pip/utils/ui.py
Normal file
|
@ -0,0 +1,70 @@
|
||||||
|
from __future__ import absolute_import
|
||||||
|
from __future__ import division
|
||||||
|
|
||||||
|
import itertools
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from pip.utils import format_size
|
||||||
|
from pip.utils.logging import get_indentation
|
||||||
|
from pip._vendor.progress.bar import Bar
|
||||||
|
from pip._vendor.progress.helpers import WritelnMixin
|
||||||
|
from pip._vendor.progress.spinner import Spinner
|
||||||
|
|
||||||
|
|
||||||
|
class DownloadProgressMixin(object):
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
super(DownloadProgressMixin, self).__init__(*args, **kwargs)
|
||||||
|
self.message = (" " * (get_indentation() + 2)) + self.message
|
||||||
|
|
||||||
|
@property
|
||||||
|
def downloaded(self):
|
||||||
|
return format_size(self.index)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def download_speed(self):
|
||||||
|
return format_size(1 / self.avg) + "/s"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def pretty_eta(self):
|
||||||
|
if self.eta:
|
||||||
|
return "eta %s" % self.eta_td
|
||||||
|
return ""
|
||||||
|
|
||||||
|
def iter(self, it, n=1):
|
||||||
|
for x in it:
|
||||||
|
yield x
|
||||||
|
self.next(n)
|
||||||
|
self.finish()
|
||||||
|
|
||||||
|
|
||||||
|
class DownloadProgressBar(DownloadProgressMixin, Bar):
|
||||||
|
|
||||||
|
file = sys.stdout
|
||||||
|
message = "%(percent)d%%"
|
||||||
|
suffix = "%(downloaded)s %(download_speed)s %(pretty_eta)s"
|
||||||
|
|
||||||
|
|
||||||
|
class DownloadProgressSpinner(DownloadProgressMixin, WritelnMixin, Spinner):
|
||||||
|
|
||||||
|
file = sys.stdout
|
||||||
|
suffix = "%(downloaded)s %(download_speed)s"
|
||||||
|
|
||||||
|
def next_phase(self):
|
||||||
|
if not hasattr(self, "_phaser"):
|
||||||
|
self._phaser = itertools.cycle(self.phases)
|
||||||
|
return next(self._phaser)
|
||||||
|
|
||||||
|
def update(self):
|
||||||
|
message = self.message % self
|
||||||
|
phase = self.next_phase()
|
||||||
|
suffix = self.suffix % self
|
||||||
|
line = ''.join([
|
||||||
|
message,
|
||||||
|
" " if message else "",
|
||||||
|
phase,
|
||||||
|
" " if suffix else "",
|
||||||
|
suffix,
|
||||||
|
])
|
||||||
|
|
||||||
|
self.writeln(line)
|
|
@ -1,18 +1,22 @@
|
||||||
"""Handles all VCS (version control) support"""
|
"""Handles all VCS (version control) support"""
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import logging
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
|
|
||||||
from pip.compat import urlparse, urllib
|
from pip.compat import urlparse, urllib
|
||||||
from pip.exceptions import BadCommand
|
from pip.exceptions import BadCommand
|
||||||
from pip.log import logger
|
from pip.utils import (display_path, backup_dir, find_command,
|
||||||
from pip.util import (display_path, backup_dir, find_command,
|
rmtree, ask_path_exists)
|
||||||
rmtree, ask_path_exists)
|
|
||||||
|
|
||||||
|
|
||||||
__all__ = ['vcs', 'get_src_requirement']
|
__all__ = ['vcs', 'get_src_requirement']
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class VcsSupport(object):
|
class VcsSupport(object):
|
||||||
_registry = {}
|
_registry = {}
|
||||||
schemes = ['ssh', 'git', 'hg', 'bzr', 'sftp', 'svn']
|
schemes = ['ssh', 'git', 'hg', 'bzr', 'sftp', 'svn']
|
||||||
|
@ -46,7 +50,7 @@ class VcsSupport(object):
|
||||||
|
|
||||||
def register(self, cls):
|
def register(self, cls):
|
||||||
if not hasattr(cls, 'name'):
|
if not hasattr(cls, 'name'):
|
||||||
logger.warn('Cannot register VCS %s' % cls.__name__)
|
logger.warning('Cannot register VCS %s', cls.__name__)
|
||||||
return
|
return
|
||||||
if cls.name not in self._registry:
|
if cls.name not in self._registry:
|
||||||
self._registry[cls.name] = cls
|
self._registry[cls.name] = cls
|
||||||
|
@ -57,7 +61,7 @@ class VcsSupport(object):
|
||||||
elif cls in self._registry.values():
|
elif cls in self._registry.values():
|
||||||
del self._registry[cls.name]
|
del self._registry[cls.name]
|
||||||
else:
|
else:
|
||||||
logger.warn('Cannot unregister because no class or name given')
|
logger.warning('Cannot unregister because no class or name given')
|
||||||
|
|
||||||
def get_backend_name(self, location):
|
def get_backend_name(self, location):
|
||||||
"""
|
"""
|
||||||
|
@ -95,7 +99,7 @@ class VersionControl(object):
|
||||||
super(VersionControl, self).__init__(*args, **kwargs)
|
super(VersionControl, self).__init__(*args, **kwargs)
|
||||||
|
|
||||||
def _filter(self, line):
|
def _filter(self, line):
|
||||||
return (logger.INFO, line)
|
return (logging.DEBUG, line)
|
||||||
|
|
||||||
def _is_local_repository(self, repo):
|
def _is_local_repository(self, repo):
|
||||||
"""
|
"""
|
||||||
|
@ -110,7 +114,7 @@ class VersionControl(object):
|
||||||
if self._cmd is not None:
|
if self._cmd is not None:
|
||||||
return self._cmd
|
return self._cmd
|
||||||
command = find_command(self.name)
|
command = find_command(self.name)
|
||||||
logger.info('Found command %r at %r' % (self.name, command))
|
logger.debug('Found command %r at %r', self.name, command)
|
||||||
self._cmd = command
|
self._cmd = command
|
||||||
return command
|
return command
|
||||||
|
|
||||||
|
@ -187,46 +191,67 @@ class VersionControl(object):
|
||||||
if os.path.exists(os.path.join(dest, self.dirname)):
|
if os.path.exists(os.path.join(dest, self.dirname)):
|
||||||
existing_url = self.get_url(dest)
|
existing_url = self.get_url(dest)
|
||||||
if self.compare_urls(existing_url, url):
|
if self.compare_urls(existing_url, url):
|
||||||
logger.info('%s in %s exists, and has correct URL (%s)' %
|
logger.debug(
|
||||||
(self.repo_name.title(), display_path(dest),
|
'%s in %s exists, and has correct URL (%s)',
|
||||||
url))
|
self.repo_name.title(),
|
||||||
logger.notify('Updating %s %s%s' %
|
display_path(dest),
|
||||||
(display_path(dest), self.repo_name,
|
url,
|
||||||
rev_display))
|
)
|
||||||
|
logger.info(
|
||||||
|
'Updating %s %s%s',
|
||||||
|
display_path(dest),
|
||||||
|
self.repo_name,
|
||||||
|
rev_display,
|
||||||
|
)
|
||||||
self.update(dest, rev_options)
|
self.update(dest, rev_options)
|
||||||
else:
|
else:
|
||||||
logger.warn('%s %s in %s exists with URL %s' %
|
logger.warning(
|
||||||
(self.name, self.repo_name,
|
'%s %s in %s exists with URL %s',
|
||||||
display_path(dest), existing_url))
|
self.name,
|
||||||
|
self.repo_name,
|
||||||
|
display_path(dest),
|
||||||
|
existing_url,
|
||||||
|
)
|
||||||
prompt = ('(s)witch, (i)gnore, (w)ipe, (b)ackup ',
|
prompt = ('(s)witch, (i)gnore, (w)ipe, (b)ackup ',
|
||||||
('s', 'i', 'w', 'b'))
|
('s', 'i', 'w', 'b'))
|
||||||
else:
|
else:
|
||||||
logger.warn('Directory %s already exists, '
|
logger.warning(
|
||||||
'and is not a %s %s.' %
|
'Directory %s already exists, and is not a %s %s.',
|
||||||
(dest, self.name, self.repo_name))
|
dest,
|
||||||
|
self.name,
|
||||||
|
self.repo_name,
|
||||||
|
)
|
||||||
prompt = ('(i)gnore, (w)ipe, (b)ackup ', ('i', 'w', 'b'))
|
prompt = ('(i)gnore, (w)ipe, (b)ackup ', ('i', 'w', 'b'))
|
||||||
if prompt:
|
if prompt:
|
||||||
logger.warn('The plan is to install the %s repository %s' %
|
logger.warning(
|
||||||
(self.name, url))
|
'The plan is to install the %s repository %s',
|
||||||
|
self.name,
|
||||||
|
url,
|
||||||
|
)
|
||||||
response = ask_path_exists('What to do? %s' % prompt[0],
|
response = ask_path_exists('What to do? %s' % prompt[0],
|
||||||
prompt[1])
|
prompt[1])
|
||||||
|
|
||||||
if response == 's':
|
if response == 's':
|
||||||
logger.notify('Switching %s %s to %s%s' %
|
logger.info(
|
||||||
(self.repo_name, display_path(dest), url,
|
'Switching %s %s to %s%s',
|
||||||
rev_display))
|
self.repo_name,
|
||||||
|
display_path(dest),
|
||||||
|
url,
|
||||||
|
rev_display,
|
||||||
|
)
|
||||||
self.switch(dest, url, rev_options)
|
self.switch(dest, url, rev_options)
|
||||||
elif response == 'i':
|
elif response == 'i':
|
||||||
# do nothing
|
# do nothing
|
||||||
pass
|
pass
|
||||||
elif response == 'w':
|
elif response == 'w':
|
||||||
logger.warn('Deleting %s' % display_path(dest))
|
logger.warning('Deleting %s', display_path(dest))
|
||||||
rmtree(dest)
|
rmtree(dest)
|
||||||
checkout = True
|
checkout = True
|
||||||
elif response == 'b':
|
elif response == 'b':
|
||||||
dest_dir = backup_dir(dest)
|
dest_dir = backup_dir(dest)
|
||||||
logger.warn('Backing up %s to %s'
|
logger.warning(
|
||||||
% (display_path(dest), dest_dir))
|
'Backing up %s to %s', display_path(dest), dest_dir,
|
||||||
|
)
|
||||||
shutil.move(dest, dest_dir)
|
shutil.move(dest, dest_dir)
|
||||||
checkout = True
|
checkout = True
|
||||||
return checkout
|
return checkout
|
||||||
|
@ -248,12 +273,16 @@ def get_src_requirement(dist, location, find_tags):
|
||||||
location,
|
location,
|
||||||
find_tags)
|
find_tags)
|
||||||
except BadCommand:
|
except BadCommand:
|
||||||
logger.warn('cannot determine version of editable source in %s '
|
logger.warning(
|
||||||
'(%s command not found in path)' % (
|
'cannot determine version of editable source in %s '
|
||||||
location, version_control.name))
|
'(%s command not found in path)',
|
||||||
|
location,
|
||||||
|
version_control.name,
|
||||||
|
)
|
||||||
return dist.as_requirement()
|
return dist.as_requirement()
|
||||||
logger.warn(
|
logger.warning(
|
||||||
'cannot determine version of editable source in %s (is not SVN '
|
'cannot determine version of editable source in %s (is not SVN '
|
||||||
'checkout, Git clone, Mercurial clone or Bazaar branch)' % location
|
'checkout, Git clone, Mercurial clone or Bazaar branch)',
|
||||||
|
location,
|
||||||
)
|
)
|
||||||
return dist.as_requirement()
|
return dist.as_requirement()
|
||||||
|
|
|
@ -1,13 +1,19 @@
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import logging
|
||||||
import os
|
import os
|
||||||
import tempfile
|
import tempfile
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from pip.compat import urlparse
|
from pip.compat import urlparse
|
||||||
from pip.log import logger
|
from pip.utils import rmtree, display_path, call_subprocess
|
||||||
from pip.util import rmtree, display_path, call_subprocess
|
|
||||||
from pip.vcs import vcs, VersionControl
|
from pip.vcs import vcs, VersionControl
|
||||||
from pip.download import path_to_url
|
from pip.download import path_to_url
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class Bazaar(VersionControl):
|
class Bazaar(VersionControl):
|
||||||
name = 'bzr'
|
name = 'bzr'
|
||||||
dirname = '.bzr'
|
dirname = '.bzr'
|
||||||
|
@ -56,8 +62,12 @@ class Bazaar(VersionControl):
|
||||||
rev_options = []
|
rev_options = []
|
||||||
rev_display = ''
|
rev_display = ''
|
||||||
if self.check_destination(dest, url, rev_options, rev_display):
|
if self.check_destination(dest, url, rev_options, rev_display):
|
||||||
logger.notify('Checking out %s%s to %s'
|
logger.info(
|
||||||
% (url, rev_display, display_path(dest)))
|
'Checking out %s%s to %s',
|
||||||
|
url,
|
||||||
|
rev_display,
|
||||||
|
display_path(dest),
|
||||||
|
)
|
||||||
call_subprocess(
|
call_subprocess(
|
||||||
[self.cmd, 'branch', '-q'] + rev_options + [url, dest])
|
[self.cmd, 'branch', '-q'] + rev_options + [url, dest])
|
||||||
|
|
||||||
|
|
|
@ -1,16 +1,21 @@
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import logging
|
||||||
import tempfile
|
import tempfile
|
||||||
import os.path
|
import os.path
|
||||||
|
|
||||||
from pip.util import call_subprocess
|
from pip.utils import call_subprocess
|
||||||
from pip.util import display_path, rmtree
|
from pip.utils import display_path, rmtree
|
||||||
from pip.vcs import vcs, VersionControl
|
from pip.vcs import vcs, VersionControl
|
||||||
from pip.log import logger
|
|
||||||
from pip.compat import url2pathname, urlparse
|
from pip.compat import url2pathname, urlparse
|
||||||
|
|
||||||
urlsplit = urlparse.urlsplit
|
urlsplit = urlparse.urlsplit
|
||||||
urlunsplit = urlparse.urlunsplit
|
urlunsplit = urlparse.urlunsplit
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class Git(VersionControl):
|
class Git(VersionControl):
|
||||||
name = 'git'
|
name = 'git'
|
||||||
dirname = '.git'
|
dirname = '.git'
|
||||||
|
@ -67,8 +72,8 @@ class Git(VersionControl):
|
||||||
# a local tag or branch name
|
# a local tag or branch name
|
||||||
return [revisions[rev]]
|
return [revisions[rev]]
|
||||||
else:
|
else:
|
||||||
logger.warn(
|
logger.warning(
|
||||||
"Could not find a tag or branch '%s', assuming commit." % rev,
|
"Could not find a tag or branch '%s', assuming commit.", rev,
|
||||||
)
|
)
|
||||||
return rev_options
|
return rev_options
|
||||||
|
|
||||||
|
@ -104,8 +109,8 @@ class Git(VersionControl):
|
||||||
rev_options = ['origin/master']
|
rev_options = ['origin/master']
|
||||||
rev_display = ''
|
rev_display = ''
|
||||||
if self.check_destination(dest, url, rev_options, rev_display):
|
if self.check_destination(dest, url, rev_options, rev_display):
|
||||||
logger.notify(
|
logger.info(
|
||||||
'Cloning %s%s to %s' % (url, rev_display, display_path(dest)),
|
'Cloning %s%s to %s', url, rev_display, display_path(dest),
|
||||||
)
|
)
|
||||||
call_subprocess([self.cmd, 'clone', '-q', url, dest])
|
call_subprocess([self.cmd, 'clone', '-q', url, dest])
|
||||||
#: repo may contain submodules
|
#: repo may contain submodules
|
||||||
|
|
|
@ -1,15 +1,20 @@
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import logging
|
||||||
import os
|
import os
|
||||||
import tempfile
|
import tempfile
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from pip.util import call_subprocess
|
from pip.utils import call_subprocess
|
||||||
from pip.util import display_path, rmtree
|
from pip.utils import display_path, rmtree
|
||||||
from pip.log import logger
|
|
||||||
from pip.vcs import vcs, VersionControl
|
from pip.vcs import vcs, VersionControl
|
||||||
from pip.download import path_to_url
|
from pip.download import path_to_url
|
||||||
from pip._vendor.six.moves import configparser
|
from pip._vendor.six.moves import configparser
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class Mercurial(VersionControl):
|
class Mercurial(VersionControl):
|
||||||
name = 'hg'
|
name = 'hg'
|
||||||
dirname = '.hg'
|
dirname = '.hg'
|
||||||
|
@ -37,9 +42,9 @@ class Mercurial(VersionControl):
|
||||||
config.write(config_file)
|
config.write(config_file)
|
||||||
config_file.close()
|
config_file.close()
|
||||||
except (OSError, configparser.NoSectionError) as exc:
|
except (OSError, configparser.NoSectionError) as exc:
|
||||||
logger.warn(
|
logger.warning(
|
||||||
'Could not switch Mercurial repository to %s: %s'
|
'Could not switch Mercurial repository to %s: %s', url, exc,
|
||||||
% (url, exc))
|
)
|
||||||
else:
|
else:
|
||||||
call_subprocess([self.cmd, 'update', '-q'] + rev_options, cwd=dest)
|
call_subprocess([self.cmd, 'update', '-q'] + rev_options, cwd=dest)
|
||||||
|
|
||||||
|
@ -57,8 +62,12 @@ class Mercurial(VersionControl):
|
||||||
rev_options = []
|
rev_options = []
|
||||||
rev_display = ''
|
rev_display = ''
|
||||||
if self.check_destination(dest, url, rev_options, rev_display):
|
if self.check_destination(dest, url, rev_options, rev_display):
|
||||||
logger.notify('Cloning hg %s%s to %s'
|
logger.info(
|
||||||
% (url, rev_display, display_path(dest)))
|
'Cloning hg %s%s to %s',
|
||||||
|
url,
|
||||||
|
rev_display,
|
||||||
|
display_path(dest),
|
||||||
|
)
|
||||||
call_subprocess([self.cmd, 'clone', '--noupdate', '-q', url, dest])
|
call_subprocess([self.cmd, 'clone', '--noupdate', '-q', url, dest])
|
||||||
call_subprocess([self.cmd, 'update', '-q'] + rev_options, cwd=dest)
|
call_subprocess([self.cmd, 'update', '-q'] + rev_options, cwd=dest)
|
||||||
|
|
||||||
|
|
|
@ -1,10 +1,13 @@
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import logging
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from pip.compat import urlparse
|
from pip.compat import urlparse
|
||||||
from pip.index import Link
|
from pip.index import Link
|
||||||
from pip.util import rmtree, display_path, call_subprocess
|
from pip.utils import rmtree, display_path, call_subprocess
|
||||||
from pip.log import logger
|
from pip.utils.logging import indent_log
|
||||||
from pip.vcs import vcs, VersionControl
|
from pip.vcs import vcs, VersionControl
|
||||||
|
|
||||||
_svn_xml_url_re = re.compile('url="([^"]+)"')
|
_svn_xml_url_re = re.compile('url="([^"]+)"')
|
||||||
|
@ -15,6 +18,9 @@ _svn_info_xml_rev_re = re.compile(r'\s*revision="(\d+)"')
|
||||||
_svn_info_xml_url_re = re.compile(r'<url>(.*)</url>')
|
_svn_info_xml_url_re = re.compile(r'<url>(.*)</url>')
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class Subversion(VersionControl):
|
class Subversion(VersionControl):
|
||||||
name = 'svn'
|
name = 'svn'
|
||||||
dirname = '.svn'
|
dirname = '.svn'
|
||||||
|
@ -32,20 +38,20 @@ class Subversion(VersionControl):
|
||||||
)
|
)
|
||||||
match = _svn_url_re.search(output)
|
match = _svn_url_re.search(output)
|
||||||
if not match:
|
if not match:
|
||||||
logger.warn(
|
logger.warning(
|
||||||
'Cannot determine URL of svn checkout %s' %
|
'Cannot determine URL of svn checkout %s',
|
||||||
display_path(location)
|
display_path(location),
|
||||||
)
|
)
|
||||||
logger.info('Output that cannot be parsed: \n%s' % output)
|
logger.debug('Output that cannot be parsed: \n%s', output)
|
||||||
return None, None
|
return None, None
|
||||||
url = match.group(1).strip()
|
url = match.group(1).strip()
|
||||||
match = _svn_revision_re.search(output)
|
match = _svn_revision_re.search(output)
|
||||||
if not match:
|
if not match:
|
||||||
logger.warn(
|
logger.warning(
|
||||||
'Cannot determine revision of svn checkout %s' %
|
'Cannot determine revision of svn checkout %s',
|
||||||
display_path(location)
|
display_path(location),
|
||||||
)
|
)
|
||||||
logger.info('Output that cannot be parsed: \n%s' % output)
|
logger.debug('Output that cannot be parsed: \n%s', output)
|
||||||
return url, None
|
return url, None
|
||||||
return url, match.group(1)
|
return url, match.group(1)
|
||||||
|
|
||||||
|
@ -53,9 +59,8 @@ class Subversion(VersionControl):
|
||||||
"""Export the svn repository at the url to the destination location"""
|
"""Export the svn repository at the url to the destination location"""
|
||||||
url, rev = self.get_url_rev()
|
url, rev = self.get_url_rev()
|
||||||
rev_options = get_rev_options(url, rev)
|
rev_options = get_rev_options(url, rev)
|
||||||
logger.notify('Exporting svn repository %s to %s' % (url, location))
|
logger.info('Exporting svn repository %s to %s', url, location)
|
||||||
logger.indent += 2
|
with indent_log():
|
||||||
try:
|
|
||||||
if os.path.exists(location):
|
if os.path.exists(location):
|
||||||
# Subversion doesn't like to check out over an existing
|
# Subversion doesn't like to check out over an existing
|
||||||
# directory --force fixes this, but was only added in svn 1.5
|
# directory --force fixes this, but was only added in svn 1.5
|
||||||
|
@ -63,8 +68,6 @@ class Subversion(VersionControl):
|
||||||
call_subprocess(
|
call_subprocess(
|
||||||
[self.cmd, 'export'] + rev_options + [url, location],
|
[self.cmd, 'export'] + rev_options + [url, location],
|
||||||
filter_stdout=self._filter, show_stdout=False)
|
filter_stdout=self._filter, show_stdout=False)
|
||||||
finally:
|
|
||||||
logger.indent -= 2
|
|
||||||
|
|
||||||
def switch(self, dest, url, rev_options):
|
def switch(self, dest, url, rev_options):
|
||||||
call_subprocess(
|
call_subprocess(
|
||||||
|
@ -82,8 +85,12 @@ class Subversion(VersionControl):
|
||||||
else:
|
else:
|
||||||
rev_display = ''
|
rev_display = ''
|
||||||
if self.check_destination(dest, url, rev_options, rev_display):
|
if self.check_destination(dest, url, rev_options, rev_display):
|
||||||
logger.notify('Checking out %s%s to %s'
|
logger.info(
|
||||||
% (url, rev_display, display_path(dest)))
|
'Checking out %s%s to %s',
|
||||||
|
url,
|
||||||
|
rev_display,
|
||||||
|
display_path(dest),
|
||||||
|
)
|
||||||
call_subprocess(
|
call_subprocess(
|
||||||
[self.cmd, 'checkout', '-q'] + rev_options + [url, dest])
|
[self.cmd, 'checkout', '-q'] + rev_options + [url, dest])
|
||||||
|
|
||||||
|
@ -146,10 +153,10 @@ class Subversion(VersionControl):
|
||||||
if location == last_location:
|
if location == last_location:
|
||||||
# We've traversed up to the root of the filesystem without
|
# We've traversed up to the root of the filesystem without
|
||||||
# finding setup.py
|
# finding setup.py
|
||||||
logger.warn(
|
logger.warning(
|
||||||
"Could not find setup.py for directory %s (tried all "
|
"Could not find setup.py for directory %s (tried all "
|
||||||
"parent directories)" %
|
"parent directories)",
|
||||||
orig_location
|
orig_location,
|
||||||
)
|
)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
@ -240,17 +247,18 @@ class Subversion(VersionControl):
|
||||||
tag_revs = self.get_tag_revs(tag_url)
|
tag_revs = self.get_tag_revs(tag_url)
|
||||||
match = self.find_tag_match(rev, tag_revs)
|
match = self.find_tag_match(rev, tag_revs)
|
||||||
if match:
|
if match:
|
||||||
logger.notify(
|
logger.info(
|
||||||
'trunk checkout %s seems to be equivalent to tag %s' %
|
'trunk checkout %s seems to be equivalent to tag %s',
|
||||||
match
|
match,
|
||||||
)
|
)
|
||||||
repo = '%s/%s' % (tag_url, match)
|
repo = '%s/%s' % (tag_url, match)
|
||||||
full_egg_name = '%s-%s' % (egg_project_name, match)
|
full_egg_name = '%s-%s' % (egg_project_name, match)
|
||||||
else:
|
else:
|
||||||
# Don't know what it is
|
# Don't know what it is
|
||||||
logger.warn(
|
logger.warning(
|
||||||
'svn URL does not fit normal structure (tags/branches/trunk): '
|
'svn URL does not fit normal structure (tags/branches/trunk): '
|
||||||
'%s' % repo
|
'%s',
|
||||||
|
repo,
|
||||||
)
|
)
|
||||||
full_egg_name = '%s-dev_r%s' % (egg_project_name, rev)
|
full_egg_name = '%s-dev_r%s' % (egg_project_name, rev)
|
||||||
return 'svn+%s@%s#egg=%s' % (repo, rev, full_egg_name)
|
return 'svn+%s@%s#egg=%s' % (repo, rev, full_egg_name)
|
||||||
|
|
69
pip/wheel.py
69
pip/wheel.py
|
@ -1,12 +1,13 @@
|
||||||
"""
|
"""
|
||||||
Support for installing and building the "wheel" binary package format.
|
Support for installing and building the "wheel" binary package format.
|
||||||
"""
|
"""
|
||||||
from __future__ import with_statement
|
from __future__ import absolute_import
|
||||||
|
|
||||||
import compileall
|
import compileall
|
||||||
import csv
|
import csv
|
||||||
import functools
|
import functools
|
||||||
import hashlib
|
import hashlib
|
||||||
|
import logging
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import shutil
|
import shutil
|
||||||
|
@ -18,10 +19,10 @@ from email.parser import Parser
|
||||||
from pip.compat import StringIO
|
from pip.compat import StringIO
|
||||||
from pip.exceptions import InvalidWheelFilename, UnsupportedWheel
|
from pip.exceptions import InvalidWheelFilename, UnsupportedWheel
|
||||||
from pip.locations import distutils_scheme
|
from pip.locations import distutils_scheme
|
||||||
from pip.log import logger
|
|
||||||
from pip import pep425tags
|
from pip import pep425tags
|
||||||
from pip.util import (call_subprocess, normalize_path, make_path_relative,
|
from pip.utils import (call_subprocess, normalize_path, make_path_relative,
|
||||||
captured_stdout, remove_tracebacks)
|
captured_stdout, remove_tracebacks)
|
||||||
|
from pip.utils.logging import indent_log
|
||||||
from pip._vendor.distlib.scripts import ScriptMaker
|
from pip._vendor.distlib.scripts import ScriptMaker
|
||||||
from pip._vendor import pkg_resources
|
from pip._vendor import pkg_resources
|
||||||
from pip._vendor.six.moves import configparser
|
from pip._vendor.six.moves import configparser
|
||||||
|
@ -33,6 +34,9 @@ wheel_ext = '.whl'
|
||||||
VERSION_COMPATIBLE = (1, 0)
|
VERSION_COMPATIBLE = (1, 0)
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def rehash(path, algo='sha256', blocksize=1 << 20):
|
def rehash(path, algo='sha256', blocksize=1 << 20):
|
||||||
"""Return (hash, length) for path using hashlib.new(algo)"""
|
"""Return (hash, length) for path using hashlib.new(algo)"""
|
||||||
h = hashlib.new(algo)
|
h = hashlib.new(algo)
|
||||||
|
@ -411,7 +415,7 @@ def uninstallation_paths(dist):
|
||||||
|
|
||||||
UninstallPathSet.add() takes care of the __pycache__ .pyc.
|
UninstallPathSet.add() takes care of the __pycache__ .pyc.
|
||||||
"""
|
"""
|
||||||
from pip.util import FakeFile # circular import
|
from pip.utils import FakeFile # circular import
|
||||||
r = csv.reader(FakeFile(dist.get_metadata_lines('RECORD')))
|
r = csv.reader(FakeFile(dist.get_metadata_lines('RECORD')))
|
||||||
for row in r:
|
for row in r:
|
||||||
path = os.path.join(dist.location, row[0])
|
path = os.path.join(dist.location, row[0])
|
||||||
|
@ -465,8 +469,10 @@ def check_compatibility(version, name):
|
||||||
"of pip" % (name, '.'.join(map(str, version)))
|
"of pip" % (name, '.'.join(map(str, version)))
|
||||||
)
|
)
|
||||||
elif version > VERSION_COMPATIBLE:
|
elif version > VERSION_COMPATIBLE:
|
||||||
logger.warn('Installing from a newer Wheel-Version (%s)'
|
logger.warning(
|
||||||
% '.'.join(map(str, version)))
|
'Installing from a newer Wheel-Version (%s)',
|
||||||
|
'.'.join(map(str, version)),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class Wheel(object):
|
class Wheel(object):
|
||||||
|
@ -545,15 +551,15 @@ class WheelBuilder(object):
|
||||||
"__file__, 'exec'))" % req.setup_py
|
"__file__, 'exec'))" % req.setup_py
|
||||||
] + list(self.global_options)
|
] + list(self.global_options)
|
||||||
|
|
||||||
logger.notify('Running setup.py bdist_wheel for %s' % req.name)
|
logger.info('Running setup.py bdist_wheel for %s', req.name)
|
||||||
logger.notify('Destination directory: %s' % self.wheel_dir)
|
logger.info('Destination directory: %s', self.wheel_dir)
|
||||||
wheel_args = base_args + ['bdist_wheel', '-d', self.wheel_dir] \
|
wheel_args = base_args + ['bdist_wheel', '-d', self.wheel_dir] \
|
||||||
+ self.build_options
|
+ self.build_options
|
||||||
try:
|
try:
|
||||||
call_subprocess(wheel_args, cwd=req.source_dir, show_stdout=False)
|
call_subprocess(wheel_args, cwd=req.source_dir, show_stdout=False)
|
||||||
return True
|
return True
|
||||||
except:
|
except:
|
||||||
logger.error('Failed building wheel for %s' % req.name)
|
logger.error('Failed building wheel for %s', req.name)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def build(self):
|
def build(self):
|
||||||
|
@ -567,11 +573,13 @@ class WheelBuilder(object):
|
||||||
buildset = []
|
buildset = []
|
||||||
for req in reqset:
|
for req in reqset:
|
||||||
if req.is_wheel:
|
if req.is_wheel:
|
||||||
logger.notify(
|
logger.info(
|
||||||
'Skipping %s, due to already being wheel.' % req.name)
|
'Skipping %s, due to already being wheel.', req.name,
|
||||||
|
)
|
||||||
elif req.editable:
|
elif req.editable:
|
||||||
logger.notify(
|
logger.info(
|
||||||
'Skipping %s, due to being editable' % req.name)
|
'Skipping %s, due to being editable', req.name,
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
buildset.append(req)
|
buildset.append(req)
|
||||||
|
|
||||||
|
@ -579,29 +587,28 @@ class WheelBuilder(object):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
# Build the wheels.
|
# Build the wheels.
|
||||||
logger.notify(
|
logger.info(
|
||||||
'Building wheels for collected packages: %s' %
|
'Building wheels for collected packages: %s',
|
||||||
', '.join([req.name for req in buildset])
|
', '.join([req.name for req in buildset]),
|
||||||
)
|
)
|
||||||
logger.indent += 2
|
with indent_log():
|
||||||
build_success, build_failure = [], []
|
build_success, build_failure = [], []
|
||||||
for req in buildset:
|
for req in buildset:
|
||||||
if self._build_one(req):
|
if self._build_one(req):
|
||||||
build_success.append(req)
|
build_success.append(req)
|
||||||
else:
|
else:
|
||||||
build_failure.append(req)
|
build_failure.append(req)
|
||||||
logger.indent -= 2
|
|
||||||
|
|
||||||
# notify success/failure
|
# notify success/failure
|
||||||
if build_success:
|
if build_success:
|
||||||
logger.notify(
|
logger.info(
|
||||||
'Successfully built %s' %
|
'Successfully built %s',
|
||||||
' '.join([req.name for req in build_success])
|
' '.join([req.name for req in build_success]),
|
||||||
)
|
)
|
||||||
if build_failure:
|
if build_failure:
|
||||||
logger.notify(
|
logger.info(
|
||||||
'Failed to build %s' %
|
'Failed to build %s',
|
||||||
' '.join([req.name for req in build_failure])
|
' '.join([req.name for req in build_failure]),
|
||||||
)
|
)
|
||||||
# Return True if all builds were successful
|
# Return True if all builds were successful
|
||||||
return len(build_failure) == 0
|
return len(build_failure) == 0
|
||||||
|
|
|
@ -6,7 +6,7 @@ from os.path import join, curdir, pardir
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from pip.util import rmtree
|
from pip.utils import rmtree
|
||||||
from tests.lib import pyversion
|
from tests.lib import pyversion
|
||||||
from tests.lib.local_repos import local_checkout
|
from tests.lib.local_repos import local_checkout
|
||||||
from tests.lib.path import Path
|
from tests.lib.path import Path
|
||||||
|
@ -62,8 +62,8 @@ def test_editable_install(script):
|
||||||
"INITools==0.2 should either be a path to a local project or a VCS url"
|
"INITools==0.2 should either be a path to a local project or a VCS url"
|
||||||
in result.stdout
|
in result.stdout
|
||||||
)
|
)
|
||||||
assert len(result.files_created) == 1, result.files_created
|
assert not result.files_created
|
||||||
assert not result.files_updated, result.files_updated
|
assert not result.files_updated
|
||||||
|
|
||||||
|
|
||||||
def test_install_editable_from_svn(script, tmpdir):
|
def test_install_editable_from_svn(script, tmpdir):
|
||||||
|
@ -248,17 +248,13 @@ def test_vcs_url_final_slash_normalization(script, tmpdir):
|
||||||
"""
|
"""
|
||||||
Test that presence or absence of final slash in VCS URL is normalized.
|
Test that presence or absence of final slash in VCS URL is normalized.
|
||||||
"""
|
"""
|
||||||
result = script.pip(
|
script.pip(
|
||||||
'install', '-e',
|
'install', '-e',
|
||||||
'%s/#egg=ScriptTest' %
|
'%s/#egg=ScriptTest' %
|
||||||
local_checkout(
|
local_checkout(
|
||||||
'hg+https://bitbucket.org/ianb/scripttest',
|
'hg+https://bitbucket.org/ianb/scripttest',
|
||||||
tmpdir.join("cache"),
|
tmpdir.join("cache"),
|
||||||
),
|
),
|
||||||
expect_error=True,
|
|
||||||
)
|
|
||||||
assert 'pip-log.txt' not in result.files_created, (
|
|
||||||
result.files_created['pip-log.txt'].bytes
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -283,7 +279,7 @@ def test_vcs_url_urlquote_normalization(script, tmpdir):
|
||||||
"""
|
"""
|
||||||
Test that urlquoted characters are normalized for repo URL comparison.
|
Test that urlquoted characters are normalized for repo URL comparison.
|
||||||
"""
|
"""
|
||||||
result = script.pip(
|
script.pip(
|
||||||
'install', '-e',
|
'install', '-e',
|
||||||
'%s/#egg=django-wikiapp' %
|
'%s/#egg=django-wikiapp' %
|
||||||
local_checkout(
|
local_checkout(
|
||||||
|
@ -291,10 +287,6 @@ def test_vcs_url_urlquote_normalization(script, tmpdir):
|
||||||
'/release-0.1',
|
'/release-0.1',
|
||||||
tmpdir.join("cache"),
|
tmpdir.join("cache"),
|
||||||
),
|
),
|
||||||
expect_error=True,
|
|
||||||
)
|
|
||||||
assert 'pip-log.txt' not in result.files_created, (
|
|
||||||
result.files_created['pip-log.txt'].bytes
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -332,8 +324,7 @@ def test_install_from_local_directory_with_no_setup_py(script, data):
|
||||||
Test installing from a local directory with no 'setup.py'.
|
Test installing from a local directory with no 'setup.py'.
|
||||||
"""
|
"""
|
||||||
result = script.pip('install', data.root, expect_error=True)
|
result = script.pip('install', data.root, expect_error=True)
|
||||||
assert len(result.files_created) == 1, result.files_created
|
assert not result.files_created
|
||||||
assert 'pip-log.txt' in result.files_created, result.files_created
|
|
||||||
assert "is not installable. File 'setup.py' not found." in result.stdout
|
assert "is not installable. File 'setup.py' not found." in result.stdout
|
||||||
|
|
||||||
|
|
||||||
|
@ -342,8 +333,7 @@ def test_editable_install_from_local_directory_with_no_setup_py(script, data):
|
||||||
Test installing from a local directory with no 'setup.py'.
|
Test installing from a local directory with no 'setup.py'.
|
||||||
"""
|
"""
|
||||||
result = script.pip('install', '-e', data.root, expect_error=True)
|
result = script.pip('install', '-e', data.root, expect_error=True)
|
||||||
assert len(result.files_created) == 1, result.files_created
|
assert not result.files_created
|
||||||
assert 'pip-log.txt' in result.files_created, result.files_created
|
|
||||||
assert "is not installable. File 'setup.py' not found." in result.stdout
|
assert "is not installable. File 'setup.py' not found." in result.stdout
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -167,19 +167,6 @@ def _test_config_file_override_stack(script, virtualenv, config_file):
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_log_file_no_directory():
|
|
||||||
"""
|
|
||||||
Test opening a log file with no directory name.
|
|
||||||
|
|
||||||
"""
|
|
||||||
from pip.basecommand import open_logfile
|
|
||||||
fp = open_logfile('testpip.log')
|
|
||||||
fp.write('can write')
|
|
||||||
fp.close()
|
|
||||||
assert os.path.exists(fp.name)
|
|
||||||
os.remove(fp.name)
|
|
||||||
|
|
||||||
|
|
||||||
def test_options_from_venv_config(script, virtualenv):
|
def test_options_from_venv_config(script, virtualenv):
|
||||||
"""
|
"""
|
||||||
Test if ConfigOptionParser reads a virtualenv-local config file
|
Test if ConfigOptionParser reads a virtualenv-local config file
|
||||||
|
|
|
@ -197,7 +197,7 @@ def test_uninstall_rollback(script, data):
|
||||||
assert_all_changes(
|
assert_all_changes(
|
||||||
result.files_after,
|
result.files_after,
|
||||||
result2,
|
result2,
|
||||||
[script.venv / 'build', 'pip-log.txt'],
|
[script.venv / 'build'],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -9,7 +9,7 @@ from mock import patch
|
||||||
from tests.lib import assert_all_changes, pyversion
|
from tests.lib import assert_all_changes, pyversion
|
||||||
from tests.lib.local_repos import local_repo, local_checkout
|
from tests.lib.local_repos import local_repo, local_checkout
|
||||||
|
|
||||||
from pip.util import rmtree
|
from pip.utils import rmtree
|
||||||
|
|
||||||
|
|
||||||
def test_simple_uninstall(script):
|
def test_simple_uninstall(script):
|
||||||
|
@ -299,8 +299,7 @@ def test_uninstall_as_egg(script, data):
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@patch('pip.req.req_uninstall.logger')
|
def test_uninstallpathset_no_paths(caplog):
|
||||||
def test_uninstallpathset_no_paths(mock_logger):
|
|
||||||
"""
|
"""
|
||||||
Test UninstallPathSet logs notification when there are no paths to
|
Test UninstallPathSet logs notification when there are no paths to
|
||||||
uninstall
|
uninstall
|
||||||
|
@ -313,13 +312,14 @@ def test_uninstallpathset_no_paths(mock_logger):
|
||||||
mock_dist_is_local.return_value = True
|
mock_dist_is_local.return_value = True
|
||||||
uninstall_set = UninstallPathSet(test_dist)
|
uninstall_set = UninstallPathSet(test_dist)
|
||||||
uninstall_set.remove() # with no files added to set
|
uninstall_set.remove() # with no files added to set
|
||||||
mock_logger.notify.assert_any_call(
|
|
||||||
"Can't uninstall 'pip'. No files were found to uninstall.",
|
assert (
|
||||||
|
"Can't uninstall 'pip'. No files were found to uninstall."
|
||||||
|
in caplog.text()
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@patch('pip.req.req_uninstall.logger')
|
def test_uninstallpathset_non_local(caplog):
|
||||||
def test_uninstallpathset_non_local(mock_logger):
|
|
||||||
"""
|
"""
|
||||||
Test UninstallPathSet logs notification and returns (with no exception)
|
Test UninstallPathSet logs notification and returns (with no exception)
|
||||||
when dist is non-local
|
when dist is non-local
|
||||||
|
@ -338,11 +338,12 @@ def test_uninstallpathset_non_local(mock_logger):
|
||||||
# with no files added to set; which is the case when trying to remove
|
# with no files added to set; which is the case when trying to remove
|
||||||
# non-local dists
|
# non-local dists
|
||||||
uninstall_set.remove()
|
uninstall_set.remove()
|
||||||
mock_logger.notify.assert_any_call(
|
|
||||||
"Not uninstalling pip at %s, outside environment %s" %
|
assert (
|
||||||
(nonlocal_path, sys.prefix)
|
"Not uninstalling pip at %s, outside environment %s"
|
||||||
|
% (nonlocal_path, sys.prefix)
|
||||||
|
in caplog.text()
|
||||||
)
|
)
|
||||||
mock_logger.notify.mock_calls
|
|
||||||
|
|
||||||
|
|
||||||
def test_uninstall_wheel(script, data):
|
def test_uninstall_wheel(script, data):
|
||||||
|
|
|
@ -277,7 +277,6 @@ class PipTestEnvironment(scripttest.TestFileEnvironment):
|
||||||
if environ is None:
|
if environ is None:
|
||||||
environ = os.environ.copy()
|
environ = os.environ.copy()
|
||||||
|
|
||||||
environ["PIP_LOG_FILE"] = base_path.join("pip-log.txt")
|
|
||||||
environ["PATH"] = Path.pathsep.join(
|
environ["PATH"] = Path.pathsep.join(
|
||||||
[self.bin_path] + [environ.get("PATH", [])],
|
[self.bin_path] + [environ.get("PATH", [])],
|
||||||
)
|
)
|
||||||
|
|
|
@ -5,7 +5,7 @@ import subprocess
|
||||||
from os.path import dirname, abspath
|
from os.path import dirname, abspath
|
||||||
|
|
||||||
from pip.compat import urllib
|
from pip.compat import urllib
|
||||||
from pip.util import rmtree
|
from pip.utils import rmtree
|
||||||
|
|
||||||
|
|
||||||
src_folder = dirname(dirname(abspath(__file__)))
|
src_folder = dirname(dirname(abspath(__file__)))
|
||||||
|
|
|
@ -90,3 +90,102 @@ class TestUserCacheDir:
|
||||||
'/etc/xdg/pip',
|
'/etc/xdg/pip',
|
||||||
'/etc'
|
'/etc'
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class TestUserDataDir:
|
||||||
|
|
||||||
|
def test_user_data_dir_win_no_roaming(self, monkeypatch):
|
||||||
|
@pretend.call_recorder
|
||||||
|
def _get_win_folder(base):
|
||||||
|
return "C:\\Users\\test\\AppData\\Local"
|
||||||
|
|
||||||
|
monkeypatch.setattr(
|
||||||
|
appdirs,
|
||||||
|
"_get_win_folder",
|
||||||
|
_get_win_folder,
|
||||||
|
raising=False,
|
||||||
|
)
|
||||||
|
monkeypatch.setattr(appdirs, "WINDOWS", True)
|
||||||
|
|
||||||
|
assert (appdirs.user_data_dir("pip").replace("/", "\\")
|
||||||
|
== "C:\\Users\\test\\AppData\\Local\\pip")
|
||||||
|
assert _get_win_folder.calls == [pretend.call("CSIDL_LOCAL_APPDATA")]
|
||||||
|
|
||||||
|
def test_user_data_dir_win_yes_roaming(self, monkeypatch):
|
||||||
|
@pretend.call_recorder
|
||||||
|
def _get_win_folder(base):
|
||||||
|
return "C:\\Users\\test\\AppData\\Roaming"
|
||||||
|
|
||||||
|
monkeypatch.setattr(
|
||||||
|
appdirs,
|
||||||
|
"_get_win_folder",
|
||||||
|
_get_win_folder,
|
||||||
|
raising=False,
|
||||||
|
)
|
||||||
|
monkeypatch.setattr(appdirs, "WINDOWS", True)
|
||||||
|
|
||||||
|
assert (appdirs.user_data_dir("pip", roaming=True).replace("/", "\\")
|
||||||
|
== "C:\\Users\\test\\AppData\\Roaming\\pip")
|
||||||
|
assert _get_win_folder.calls == [pretend.call("CSIDL_APPDATA")]
|
||||||
|
|
||||||
|
def test_user_data_dir_osx(self, monkeypatch):
|
||||||
|
monkeypatch.setenv("HOME", "/home/test")
|
||||||
|
monkeypatch.setattr(sys, "platform", "darwin")
|
||||||
|
|
||||||
|
assert (appdirs.user_data_dir("pip")
|
||||||
|
== "/home/test/Library/Application Support/pip")
|
||||||
|
|
||||||
|
def test_user_data_dir_linux(self, monkeypatch):
|
||||||
|
monkeypatch.delenv("XDG_DATA_HOME")
|
||||||
|
monkeypatch.setenv("HOME", "/home/test")
|
||||||
|
monkeypatch.setattr(sys, "platform", "linux2")
|
||||||
|
|
||||||
|
assert appdirs.user_data_dir("pip") == "/home/test/.local/share/pip"
|
||||||
|
|
||||||
|
def test_user_data_dir_linux_override(self, monkeypatch):
|
||||||
|
monkeypatch.setenv("XDG_DATA_HOME", "/home/test/.other-share")
|
||||||
|
monkeypatch.setenv("HOME", "/home/test")
|
||||||
|
monkeypatch.setattr(sys, "platform", "linux2")
|
||||||
|
|
||||||
|
assert appdirs.user_data_dir("pip") == "/home/test/.other-share/pip"
|
||||||
|
|
||||||
|
|
||||||
|
class TestUserLogDir:
|
||||||
|
|
||||||
|
def test_user_log_dir_win(self, monkeypatch):
|
||||||
|
@pretend.call_recorder
|
||||||
|
def _get_win_folder(base):
|
||||||
|
return "C:\\Users\\test\\AppData\\Local"
|
||||||
|
|
||||||
|
monkeypatch.setattr(
|
||||||
|
appdirs,
|
||||||
|
"_get_win_folder",
|
||||||
|
_get_win_folder,
|
||||||
|
raising=False,
|
||||||
|
)
|
||||||
|
monkeypatch.setattr(appdirs, "WINDOWS", True)
|
||||||
|
|
||||||
|
assert (appdirs.user_log_dir("pip").replace("/", "\\")
|
||||||
|
== "C:\\Users\\test\\AppData\\Local\\pip\\Logs")
|
||||||
|
assert _get_win_folder.calls == [pretend.call("CSIDL_LOCAL_APPDATA")]
|
||||||
|
|
||||||
|
def test_user_log_dir_osx(self, monkeypatch):
|
||||||
|
monkeypatch.setenv("HOME", "/home/test")
|
||||||
|
monkeypatch.setattr(sys, "platform", "darwin")
|
||||||
|
|
||||||
|
assert (appdirs.user_log_dir("pip")
|
||||||
|
== "/home/test/Library/Logs/pip")
|
||||||
|
|
||||||
|
def test_uuser_log_dir_linux(self, monkeypatch):
|
||||||
|
monkeypatch.delenv("XDG_CACHE_HOME")
|
||||||
|
monkeypatch.setenv("HOME", "/home/test")
|
||||||
|
monkeypatch.setattr(sys, "platform", "linux2")
|
||||||
|
|
||||||
|
assert appdirs.user_log_dir("pip") == "/home/test/.cache/pip/log"
|
||||||
|
|
||||||
|
def test_user_log_dir_linux_override(self, monkeypatch):
|
||||||
|
monkeypatch.setenv("XDG_CACHE_HOME", "/home/test/.other-cache")
|
||||||
|
monkeypatch.setenv("HOME", "/home/test")
|
||||||
|
monkeypatch.setattr(sys, "platform", "linux2")
|
||||||
|
|
||||||
|
assert appdirs.user_log_dir("pip") == "/home/test/.other-cache/pip/log"
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
import os
|
import logging
|
||||||
|
|
||||||
from pip.basecommand import Command
|
from pip.basecommand import Command
|
||||||
from pip.log import logger
|
|
||||||
|
|
||||||
|
|
||||||
class FakeCommand(Command):
|
class FakeCommand(Command):
|
||||||
|
@ -12,7 +12,7 @@ class FakeCommand(Command):
|
||||||
super(FakeCommand, self).__init__()
|
super(FakeCommand, self).__init__()
|
||||||
|
|
||||||
def run(self, options, args):
|
def run(self, options, args):
|
||||||
logger.info("fake")
|
logging.getLogger("pip.tests").info("fake")
|
||||||
if self.error:
|
if self.error:
|
||||||
raise SystemExit(1)
|
raise SystemExit(1)
|
||||||
|
|
||||||
|
@ -22,8 +22,10 @@ class FakeCommandWithUnicode(FakeCommand):
|
||||||
summary = name
|
summary = name
|
||||||
|
|
||||||
def run(self, options, args):
|
def run(self, options, args):
|
||||||
logger.info(b"bytes here \xE9")
|
logging.getLogger("pip.tests").info(b"bytes here \xE9")
|
||||||
logger.info(b"unicode here \xC3\xA9".decode('utf-8'))
|
logging.getLogger("pip.tests").info(
|
||||||
|
b"unicode here \xC3\xA9".decode("utf-8")
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class Test_basecommand_logging(object):
|
class Test_basecommand_logging(object):
|
||||||
|
@ -32,9 +34,6 @@ class Test_basecommand_logging(object):
|
||||||
options
|
options
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def teardown(self):
|
|
||||||
logger.consumers = []
|
|
||||||
|
|
||||||
def test_log_command_success(self, tmpdir):
|
def test_log_command_success(self, tmpdir):
|
||||||
"""
|
"""
|
||||||
Test the --log option logs when command succeeds
|
Test the --log option logs when command succeeds
|
||||||
|
@ -53,16 +52,6 @@ class Test_basecommand_logging(object):
|
||||||
cmd.main(['fake', '--log', log_path])
|
cmd.main(['fake', '--log', log_path])
|
||||||
assert 'fake' == open(log_path).read().strip()[:4]
|
assert 'fake' == open(log_path).read().strip()[:4]
|
||||||
|
|
||||||
def test_log_file_command_success(self, tmpdir):
|
|
||||||
"""
|
|
||||||
Test the --log-file option *doesn't* log when command succeeds.
|
|
||||||
(It's just the historical behavior? this just confirms it)
|
|
||||||
"""
|
|
||||||
cmd = FakeCommand()
|
|
||||||
log_file_path = tmpdir.join('log_file')
|
|
||||||
cmd.main(['fake', '--log-file', log_file_path])
|
|
||||||
assert not os.path.exists(log_file_path)
|
|
||||||
|
|
||||||
def test_log_file_command_error(self, tmpdir):
|
def test_log_file_command_error(self, tmpdir):
|
||||||
"""
|
"""
|
||||||
Test the --log-file option logs (when there's an error).
|
Test the --log-file option logs (when there's an error).
|
||||||
|
@ -72,27 +61,6 @@ class Test_basecommand_logging(object):
|
||||||
cmd.main(['fake', '--log-file', log_file_path])
|
cmd.main(['fake', '--log-file', log_file_path])
|
||||||
assert 'fake' == open(log_file_path).read().strip()[:4]
|
assert 'fake' == open(log_file_path).read().strip()[:4]
|
||||||
|
|
||||||
def test_log_log_file(self, tmpdir):
|
|
||||||
"""
|
|
||||||
Test the --log and --log-file options log together (when there's an
|
|
||||||
error).
|
|
||||||
"""
|
|
||||||
cmd = FakeCommand(error=True)
|
|
||||||
log_path = tmpdir.join('log')
|
|
||||||
log_file_path = tmpdir.join('log_file')
|
|
||||||
cmd.main(['fake', '--log', log_path, '--log-file', log_file_path])
|
|
||||||
assert 'fake' == open(log_path).read().strip()[:4]
|
|
||||||
assert 'fake' == open(log_file_path).read().strip()[:4]
|
|
||||||
|
|
||||||
def test_verbose_quiet(self):
|
|
||||||
"""
|
|
||||||
Test additive quality of -v and -q
|
|
||||||
"""
|
|
||||||
cmd = FakeCommand()
|
|
||||||
cmd.main(['fake', '-vqq'])
|
|
||||||
console_level = logger.consumers[0][0]
|
|
||||||
assert console_level == logger.WARN
|
|
||||||
|
|
||||||
def test_unicode_messages(self, tmpdir):
|
def test_unicode_messages(self, tmpdir):
|
||||||
"""
|
"""
|
||||||
Tests that logging bytestrings and unicode objects don't break logging
|
Tests that logging bytestrings and unicode objects don't break logging
|
||||||
|
|
|
@ -4,13 +4,12 @@ import pip.wheel
|
||||||
import pip.pep425tags
|
import pip.pep425tags
|
||||||
|
|
||||||
from pkg_resources import parse_version, Distribution
|
from pkg_resources import parse_version, Distribution
|
||||||
from pip.log import logger
|
|
||||||
from pip.req import InstallRequirement
|
from pip.req import InstallRequirement
|
||||||
from pip.index import PackageFinder, Link
|
from pip.index import PackageFinder, Link
|
||||||
from pip.exceptions import (
|
from pip.exceptions import (
|
||||||
BestVersionAlreadyInstalled, DistributionNotFound, InstallationError,
|
BestVersionAlreadyInstalled, DistributionNotFound, InstallationError,
|
||||||
)
|
)
|
||||||
from pip.util import Inf
|
from pip.utils import Inf
|
||||||
from pip.download import PipSession
|
from pip.download import PipSession
|
||||||
|
|
||||||
from mock import Mock, patch
|
from mock import Mock, patch
|
||||||
|
@ -96,15 +95,10 @@ def test_finder_detects_latest_already_satisfied_pypi_links():
|
||||||
|
|
||||||
class TestWheel:
|
class TestWheel:
|
||||||
|
|
||||||
def teardown(self):
|
def test_skip_invalid_wheel_link(self, caplog, data):
|
||||||
logger.consumers = []
|
|
||||||
|
|
||||||
def test_skip_invalid_wheel_link(self, data):
|
|
||||||
"""
|
"""
|
||||||
Test if PackageFinder skips invalid wheel filenames
|
Test if PackageFinder skips invalid wheel filenames
|
||||||
"""
|
"""
|
||||||
log = []
|
|
||||||
logger.add_consumers((logger.DEBUG, log.append))
|
|
||||||
req = InstallRequirement.from_line("invalid")
|
req = InstallRequirement.from_line("invalid")
|
||||||
# data.find_links contains "invalid.whl", which is an invalid wheel
|
# data.find_links contains "invalid.whl", which is an invalid wheel
|
||||||
finder = PackageFinder(
|
finder = PackageFinder(
|
||||||
|
@ -115,7 +109,11 @@ class TestWheel:
|
||||||
)
|
)
|
||||||
with pytest.raises(DistributionNotFound):
|
with pytest.raises(DistributionNotFound):
|
||||||
finder.find_requirement(req, True)
|
finder.find_requirement(req, True)
|
||||||
"invalid.whl because the wheel filename is invalid" in "".join(log)
|
|
||||||
|
assert (
|
||||||
|
"invalid.whl because the wheel filename is invalid"
|
||||||
|
in caplog.text()
|
||||||
|
)
|
||||||
|
|
||||||
def test_not_find_wheel_not_supported(self, data, monkeypatch):
|
def test_not_find_wheel_not_supported(self, data, monkeypatch):
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -1,125 +0,0 @@
|
||||||
from pip.compat import StringIO
|
|
||||||
from pip.log import should_color, should_warn, Logger
|
|
||||||
|
|
||||||
|
|
||||||
def test_should_color_std():
|
|
||||||
assert not should_color(object(), {}, std=[object()])
|
|
||||||
|
|
||||||
|
|
||||||
def test_should_color_isatty():
|
|
||||||
class FakeTTY(object):
|
|
||||||
def isatty(self):
|
|
||||||
return True
|
|
||||||
|
|
||||||
consumer = FakeTTY()
|
|
||||||
assert should_color(consumer, {}, std=[consumer])
|
|
||||||
|
|
||||||
|
|
||||||
def test_should_color_environ():
|
|
||||||
consumer = object()
|
|
||||||
assert should_color(consumer, {"TERM": "ANSI"}, std=[consumer])
|
|
||||||
|
|
||||||
|
|
||||||
def test_should_color_notty_environ():
|
|
||||||
consumer = object()
|
|
||||||
assert not should_color(consumer, {}, std=[consumer])
|
|
||||||
|
|
||||||
|
|
||||||
def test_should_warn_greater_one_minor():
|
|
||||||
assert should_warn("1.4", "1.6")
|
|
||||||
|
|
||||||
|
|
||||||
def test_should_warn_exactly_one_minor():
|
|
||||||
assert not should_warn("1.5", "1.6")
|
|
||||||
|
|
||||||
|
|
||||||
def test_should_warn_equal():
|
|
||||||
assert not should_warn("1.6", "1.6")
|
|
||||||
|
|
||||||
|
|
||||||
def test_should_warn_greater():
|
|
||||||
assert not should_warn("1.7", "1.6")
|
|
||||||
|
|
||||||
|
|
||||||
def test_should_warn_significance():
|
|
||||||
assert should_warn("1.4.dev1", "1.6")
|
|
||||||
|
|
||||||
|
|
||||||
def test_ignores_duplicate_consumers():
|
|
||||||
"""
|
|
||||||
Make sure if the same consumer & level pair are asked to be added,
|
|
||||||
they're ignored.
|
|
||||||
"""
|
|
||||||
logger = Logger()
|
|
||||||
|
|
||||||
import sys
|
|
||||||
consumer1 = sys.stdout
|
|
||||||
consumer2 = sys.stdout
|
|
||||||
|
|
||||||
logger.add_consumers(
|
|
||||||
(logger.NOTIFY, consumer1),
|
|
||||||
(logger.NOTIFY, consumer2),
|
|
||||||
)
|
|
||||||
logger.add_consumers(
|
|
||||||
(logger.NOTIFY, consumer1),
|
|
||||||
(logger.NOTIFY, consumer2),
|
|
||||||
)
|
|
||||||
|
|
||||||
assert 1 == len(logger.consumers)
|
|
||||||
|
|
||||||
|
|
||||||
def test_ignores_Win32_wrapped_consumers(monkeypatch):
|
|
||||||
"""
|
|
||||||
Test that colorama wrapped duplicate streams are ignored, too.
|
|
||||||
"""
|
|
||||||
logger = Logger()
|
|
||||||
consumer = StringIO()
|
|
||||||
|
|
||||||
consumer1 = consumer
|
|
||||||
consumer2 = consumer
|
|
||||||
|
|
||||||
# Pretend to be Windows
|
|
||||||
monkeypatch.setattr('sys.platform', 'win32')
|
|
||||||
logger.add_consumers(
|
|
||||||
(logger.NOTIFY, consumer1),
|
|
||||||
(logger.NOTIFY, consumer2),
|
|
||||||
)
|
|
||||||
# Pretend to be linux
|
|
||||||
monkeypatch.setattr('sys.platform', 'linux2')
|
|
||||||
logger.add_consumers(
|
|
||||||
(logger.NOTIFY, consumer2),
|
|
||||||
(logger.NOTIFY, consumer1),
|
|
||||||
)
|
|
||||||
|
|
||||||
assert 1 == len(logger.consumers)
|
|
||||||
|
|
||||||
|
|
||||||
def test_log_no_extra_line_break():
|
|
||||||
"""
|
|
||||||
Confirm that multiple `.write()` consumers doesn't result in additional
|
|
||||||
'\n's per write
|
|
||||||
"""
|
|
||||||
consumer1 = StringIO()
|
|
||||||
consumer2 = StringIO()
|
|
||||||
logger = Logger()
|
|
||||||
logger.add_consumers(
|
|
||||||
(logger.NOTIFY, consumer1),
|
|
||||||
(logger.NOTIFY, consumer2)
|
|
||||||
)
|
|
||||||
logger.notify("one line")
|
|
||||||
# splitlines(True) will detect empty line-breaks
|
|
||||||
assert 1 == len(consumer1.getvalue().splitlines(True))
|
|
||||||
assert 1 == len(consumer2.getvalue().splitlines(True))
|
|
||||||
|
|
||||||
|
|
||||||
def test_level_for_integer():
|
|
||||||
logger = Logger()
|
|
||||||
assert logger.VERBOSE_DEBUG == logger.level_for_integer(-1000)
|
|
||||||
assert logger.VERBOSE_DEBUG == logger.level_for_integer(0)
|
|
||||||
assert logger.DEBUG == logger.level_for_integer(1)
|
|
||||||
assert logger.INFO == logger.level_for_integer(2)
|
|
||||||
assert logger.NOTIFY == logger.level_for_integer(3)
|
|
||||||
assert logger.WARN == logger.level_for_integer(4)
|
|
||||||
assert logger.ERROR == logger.level_for_integer(5)
|
|
||||||
assert logger.FATAL == logger.level_for_integer(6)
|
|
||||||
assert logger.FATAL == logger.level_for_integer(1000)
|
|
|
@ -89,10 +89,10 @@ class TestOptionPrecedence(object):
|
||||||
"""
|
"""
|
||||||
os.environ['PIP_LOG_FILE'] = 'override.log'
|
os.environ['PIP_LOG_FILE'] = 'override.log'
|
||||||
options, args = main(['fake'])
|
options, args = main(['fake'])
|
||||||
assert options.log_file == 'override.log'
|
assert options.log == 'override.log'
|
||||||
os.environ['PIP_LOCAL_LOG'] = 'override.log'
|
os.environ['PIP_LOCAL_LOG'] = 'override.log'
|
||||||
options, args = main(['fake'])
|
options, args = main(['fake'])
|
||||||
assert options.log_file == 'override.log'
|
assert options.log == 'override.log'
|
||||||
|
|
||||||
def test_cli_override_environment(self):
|
def test_cli_override_environment(self):
|
||||||
"""
|
"""
|
||||||
|
@ -210,7 +210,7 @@ class TestGeneralOptions(object):
|
||||||
def test_local_log(self):
|
def test_local_log(self):
|
||||||
options1, args1 = main(['--local-log', 'path', 'fake'])
|
options1, args1 = main(['--local-log', 'path', 'fake'])
|
||||||
options2, args2 = main(['fake', '--local-log', 'path'])
|
options2, args2 = main(['fake', '--local-log', 'path'])
|
||||||
assert options1.log_file == options2.log_file == 'path'
|
assert options1.log == options2.log == 'path'
|
||||||
|
|
||||||
def test_no_input(self):
|
def test_no_input(self):
|
||||||
options1, args1 = main(['--no-input', 'fake'])
|
options1, args1 = main(['--no-input', 'fake'])
|
||||||
|
|
|
@ -11,11 +11,10 @@ from pip.exceptions import (
|
||||||
from pip.download import PipSession
|
from pip.download import PipSession
|
||||||
from pip._vendor import pkg_resources
|
from pip._vendor import pkg_resources
|
||||||
from pip.index import PackageFinder
|
from pip.index import PackageFinder
|
||||||
from pip.log import logger
|
|
||||||
from pip.req import (InstallRequirement, RequirementSet,
|
from pip.req import (InstallRequirement, RequirementSet,
|
||||||
Requirements, parse_requirements)
|
Requirements, parse_requirements)
|
||||||
from pip.req.req_install import parse_editable
|
from pip.req.req_install import parse_editable
|
||||||
from pip.util import read_text_file
|
from pip.utils import read_text_file
|
||||||
from tests.lib import assert_raises_regexp
|
from tests.lib import assert_raises_regexp
|
||||||
|
|
||||||
|
|
||||||
|
@ -23,11 +22,9 @@ class TestRequirementSet(object):
|
||||||
"""RequirementSet tests"""
|
"""RequirementSet tests"""
|
||||||
|
|
||||||
def setup(self):
|
def setup(self):
|
||||||
logger.consumers = [(logger.NOTIFY, Mock())]
|
|
||||||
self.tempdir = tempfile.mkdtemp()
|
self.tempdir = tempfile.mkdtemp()
|
||||||
|
|
||||||
def teardown(self):
|
def teardown(self):
|
||||||
logger.consumers = []
|
|
||||||
shutil.rmtree(self.tempdir, ignore_errors=True)
|
shutil.rmtree(self.tempdir, ignore_errors=True)
|
||||||
|
|
||||||
def basic_reqset(self):
|
def basic_reqset(self):
|
||||||
|
@ -66,7 +63,7 @@ def test_egg_info_data(file_contents, expected):
|
||||||
om = mock_open(read_data=file_contents)
|
om = mock_open(read_data=file_contents)
|
||||||
em = Mock()
|
em = Mock()
|
||||||
em.return_value = 'cp1252'
|
em.return_value = 'cp1252'
|
||||||
with patch('pip.util.open', om, create=True):
|
with patch('pip.utils.open', om, create=True):
|
||||||
with patch('locale.getpreferredencoding', em):
|
with patch('locale.getpreferredencoding', em):
|
||||||
ret = read_text_file('foo')
|
ret = read_text_file('foo')
|
||||||
assert ret == expected.decode('utf-8')
|
assert ret == expected.decode('utf-8')
|
||||||
|
|
|
@ -12,8 +12,8 @@ import pytest
|
||||||
|
|
||||||
from mock import Mock, patch
|
from mock import Mock, patch
|
||||||
from pip.exceptions import BadCommand
|
from pip.exceptions import BadCommand
|
||||||
from pip.util import (egg_link_path, Inf, get_installed_distributions,
|
from pip.utils import (egg_link_path, Inf, get_installed_distributions,
|
||||||
find_command, untar_file, unzip_file)
|
find_command, untar_file, unzip_file)
|
||||||
from pip.commands.freeze import freeze_excludes
|
from pip.commands.freeze import freeze_excludes
|
||||||
|
|
||||||
|
|
||||||
|
@ -37,26 +37,26 @@ class Tests_EgglinkPath:
|
||||||
)
|
)
|
||||||
|
|
||||||
# patches
|
# patches
|
||||||
from pip import util
|
from pip import utils
|
||||||
self.old_site_packages = util.site_packages
|
self.old_site_packages = utils.site_packages
|
||||||
self.mock_site_packages = util.site_packages = 'SITE_PACKAGES'
|
self.mock_site_packages = utils.site_packages = 'SITE_PACKAGES'
|
||||||
self.old_running_under_virtualenv = util.running_under_virtualenv
|
self.old_running_under_virtualenv = utils.running_under_virtualenv
|
||||||
self.mock_running_under_virtualenv = util.running_under_virtualenv = \
|
self.mock_running_under_virtualenv = utils.running_under_virtualenv = \
|
||||||
Mock()
|
Mock()
|
||||||
self.old_virtualenv_no_global = util.virtualenv_no_global
|
self.old_virtualenv_no_global = utils.virtualenv_no_global
|
||||||
self.mock_virtualenv_no_global = util.virtualenv_no_global = Mock()
|
self.mock_virtualenv_no_global = utils.virtualenv_no_global = Mock()
|
||||||
self.old_user_site = util.user_site
|
self.old_user_site = utils.user_site
|
||||||
self.mock_user_site = util.user_site = self.user_site
|
self.mock_user_site = utils.user_site = self.user_site
|
||||||
from os import path
|
from os import path
|
||||||
self.old_isfile = path.isfile
|
self.old_isfile = path.isfile
|
||||||
self.mock_isfile = path.isfile = Mock()
|
self.mock_isfile = path.isfile = Mock()
|
||||||
|
|
||||||
def teardown(self):
|
def teardown(self):
|
||||||
from pip import util
|
from pip import utils
|
||||||
util.site_packages = self.old_site_packages
|
utils.site_packages = self.old_site_packages
|
||||||
util.running_under_virtualenv = self.old_running_under_virtualenv
|
utils.running_under_virtualenv = self.old_running_under_virtualenv
|
||||||
util.virtualenv_no_global = self.old_virtualenv_no_global
|
utils.virtualenv_no_global = self.old_virtualenv_no_global
|
||||||
util.user_site = self.old_user_site
|
utils.user_site = self.old_user_site
|
||||||
from os import path
|
from os import path
|
||||||
path.isfile = self.old_isfile
|
path.isfile = self.old_isfile
|
||||||
|
|
||||||
|
@ -161,8 +161,8 @@ def test_Inf_equals_Inf():
|
||||||
assert Inf == Inf
|
assert Inf == Inf
|
||||||
|
|
||||||
|
|
||||||
@patch('pip.util.dist_is_local')
|
@patch('pip.utils.dist_is_local')
|
||||||
@patch('pip.util.dist_is_editable')
|
@patch('pip.utils.dist_is_editable')
|
||||||
class Tests_get_installed_distributions:
|
class Tests_get_installed_distributions:
|
||||||
"""test util.get_installed_distributions"""
|
"""test util.get_installed_distributions"""
|
||||||
|
|
||||||
|
@ -274,7 +274,7 @@ def test_does_not_find_command_because_there_is_no_path():
|
||||||
|
|
||||||
|
|
||||||
@patch('os.pathsep', ':')
|
@patch('os.pathsep', ':')
|
||||||
@patch('pip.util.get_pathext')
|
@patch('pip.utils.get_pathext')
|
||||||
@patch('os.path.isfile')
|
@patch('os.path.isfile')
|
||||||
def test_find_command_trys_all_pathext(mock_isfile, getpath_mock):
|
def test_find_command_trys_all_pathext(mock_isfile, getpath_mock):
|
||||||
"""
|
"""
|
||||||
|
@ -301,7 +301,7 @@ def test_find_command_trys_all_pathext(mock_isfile, getpath_mock):
|
||||||
|
|
||||||
|
|
||||||
@patch('os.pathsep', ':')
|
@patch('os.pathsep', ':')
|
||||||
@patch('pip.util.get_pathext')
|
@patch('pip.utils.get_pathext')
|
||||||
@patch('os.path.isfile')
|
@patch('os.path.isfile')
|
||||||
def test_find_command_trys_supplied_pathext(mock_isfile, getpath_mock):
|
def test_find_command_trys_supplied_pathext(mock_isfile, getpath_mock):
|
||||||
"""
|
"""
|
|
@ -7,7 +7,7 @@ from mock import patch, Mock
|
||||||
from pip._vendor import pkg_resources
|
from pip._vendor import pkg_resources
|
||||||
from pip import pep425tags, wheel
|
from pip import pep425tags, wheel
|
||||||
from pip.exceptions import InvalidWheelFilename, UnsupportedWheel
|
from pip.exceptions import InvalidWheelFilename, UnsupportedWheel
|
||||||
from pip.util import unpack_file
|
from pip.utils import unpack_file
|
||||||
|
|
||||||
|
|
||||||
def test_get_entrypoints(tmpdir):
|
def test_get_entrypoints(tmpdir):
|
||||||
|
@ -255,7 +255,7 @@ class TestWheelFile(object):
|
||||||
assert w.support_index_min(tags=[]) is None
|
assert w.support_index_min(tags=[]) is None
|
||||||
|
|
||||||
def test_unpack_wheel_no_flatten(self):
|
def test_unpack_wheel_no_flatten(self):
|
||||||
from pip import util
|
from pip import utils
|
||||||
from tempfile import mkdtemp
|
from tempfile import mkdtemp
|
||||||
from shutil import rmtree
|
from shutil import rmtree
|
||||||
|
|
||||||
|
@ -264,7 +264,7 @@ class TestWheelFile(object):
|
||||||
pytest.skip("%s does not exist" % filepath)
|
pytest.skip("%s does not exist" % filepath)
|
||||||
try:
|
try:
|
||||||
tmpdir = mkdtemp()
|
tmpdir = mkdtemp()
|
||||||
util.unpack_file(filepath, tmpdir, 'application/zip', None)
|
utils.unpack_file(filepath, tmpdir, 'application/zip', None)
|
||||||
assert os.path.isdir(os.path.join(tmpdir, 'meta-1.0.dist-info'))
|
assert os.path.isdir(os.path.join(tmpdir, 'meta-1.0.dist-info'))
|
||||||
finally:
|
finally:
|
||||||
rmtree(tmpdir)
|
rmtree(tmpdir)
|
||||||
|
@ -371,24 +371,20 @@ class TestMoveWheelFiles(object):
|
||||||
|
|
||||||
class TestWheelBuilder(object):
|
class TestWheelBuilder(object):
|
||||||
|
|
||||||
@patch('pip.log.Logger.log')
|
def test_skip_building_wheels(self, caplog):
|
||||||
@patch('pip.wheel.WheelBuilder._build_one')
|
with patch('pip.wheel.WheelBuilder._build_one') as mock_build_one:
|
||||||
def test_skip_building_wheels(self, mock_build_one, mock_log):
|
wheel_req = Mock(is_wheel=True, editable=False)
|
||||||
wheel_req = Mock(is_wheel=True, editable=False)
|
reqset = Mock(requirements=Mock(values=lambda: [wheel_req]))
|
||||||
reqset = Mock(requirements=Mock(values=lambda: [wheel_req]))
|
wb = wheel.WheelBuilder(reqset, Mock(), '/wheel/dir')
|
||||||
wb = wheel.WheelBuilder(reqset, Mock(), '/wheel/dir')
|
wb.build()
|
||||||
wb.build()
|
assert "due to already being wheel" in caplog.text()
|
||||||
name, args, kwargs = mock_log.mock_calls[0]
|
assert mock_build_one.mock_calls == []
|
||||||
assert "due to already being wheel" in args[1]
|
|
||||||
assert mock_build_one.mock_calls == []
|
|
||||||
|
|
||||||
@patch('pip.log.Logger.log')
|
def test_skip_building_editables(self, caplog):
|
||||||
@patch('pip.wheel.WheelBuilder._build_one')
|
with patch('pip.wheel.WheelBuilder._build_one') as mock_build_one:
|
||||||
def test_skip_building_editables(self, mock_build_one, mock_log):
|
editable_req = Mock(editable=True, is_wheel=False)
|
||||||
editable_req = Mock(editable=True, is_wheel=False)
|
reqset = Mock(requirements=Mock(values=lambda: [editable_req]))
|
||||||
reqset = Mock(requirements=Mock(values=lambda: [editable_req]))
|
wb = wheel.WheelBuilder(reqset, Mock(), '/wheel/dir')
|
||||||
wb = wheel.WheelBuilder(reqset, Mock(), '/wheel/dir')
|
wb.build()
|
||||||
wb.build()
|
assert "due to being editable" in caplog.text()
|
||||||
name, args, kwargs = mock_log.mock_calls[0]
|
assert mock_build_one.mock_calls == []
|
||||||
assert "due to being editable" in args[1]
|
|
||||||
assert mock_build_one.mock_calls == []
|
|
||||||
|
|
Loading…
Reference in a new issue