Switch to using the stdlib logger instead of pip.log

This commit is contained in:
Donald Stufft 2014-08-30 19:52:28 -04:00
parent 83cdcc1f40
commit 767d11e49c
59 changed files with 2052 additions and 1346 deletions

View File

@ -8,8 +8,7 @@ from docutils.statemachine import ViewList
from textwrap import dedent
from pip import commands
from pip import cmdoptions
from pip.locations import default_log_file
from pip.util import get_prog
from pip.utils import get_prog
class PipCommandUsage(rst.Directive):
@ -58,10 +57,6 @@ class PipOptions(rst.Directive):
opt_help = option.help.replace('%default', str(option.default))
# fix paths with sys.prefix
opt_help = opt_help.replace(sys.prefix, "<sys.prefix>")
opt_help = opt_help.replace(
default_log_file,
"<see :ref:`FileLogging`>",
)
return [bookmark_line, "", line, "", " %s" % opt_help, ""]
def _format_options(self, options, cmd_name=None):

View File

@ -24,26 +24,7 @@ Console logging
~~~~~~~~~~~~~~~
pip offers :ref:`-v, --verbose <--verbose>` and :ref:`-q, --quiet <--quiet>`
to control the console log level. Each option can be used multiple times and
used together. One ``-v`` increases the verbosity by one, whereas one ``-q`` decreases it by
one.
The series of log levels, in order, are as follows::
VERBOSE_DEBUG, DEBUG, INFO, NOTIFY, WARN, ERROR, FATAL
``NOTIFY`` is the default level.
A few examples on how the parameters work to affect the level:
* specifying nothing results in ``NOTIFY``
* ``-v`` results in ``INFO``
* ``-vv`` results in ``DEBUG``
* ``-q`` results in ``WARN``
* ``-vq`` results in ``NOTIFY``
The most practical use case for users is either ``-v`` or ``-vv`` to see
additional logging to help troubleshoot an issue.
to control the console log level.
.. _`FileLogging`:
@ -55,20 +36,16 @@ pip offers the :ref:`--log <--log>` option for specifying a file where a maximum
verbosity log will be kept. This option is empty by default. This log appends
to previous logging.
Additionally, when commands fail (i.e. return a non-zero exit code), pip writes
a "failure log" for the failed command. This log overwrites previous
logging. The default location is as follows:
Additionally, pip writes a "debug log" for every command. This log appends and
will periodically rotate and clean itself up to limit on disk file size. The
default location is as follows:
* On Unix and Mac OS X: :file:`$HOME/.pip/pip.log`
* On Windows, the configuration file is: :file:`%HOME%\\pip\\pip.log`
* On Unix: :file:`$HOME/.cache/pip/log/debug.log`
* On Mac OS X: :file:`$HOME/Library/Logs/pip/debug.log`
* On Windows: :file:`C:\\Users\<username>\AppData\Local\pip\Logs\debug.log`
The option for the failure log, is :ref:`--log-file <--log-file>`.
Both logs add a line per execution to specify the date and what pip executable wrote the log.
Like all pip options, ``--log`` and ``log-file``, can also be set as an environment
variable, or placed into the pip config file. See the :ref:`Configuration`
section.
Like all pip options, ``--log`` can also be set as an environment variable, or
placed into the pip config file. See the :ref:`Configuration` section.
.. _`exists-action`:

View File

@ -1,13 +1,17 @@
#!/usr/bin/env python
from __future__ import absolute_import
import logging
import os
import optparse
import warnings
import sys
import re
from pip.exceptions import InstallationError, CommandError, PipError
from pip.log import logger
from pip.util import get_installed_distributions, get_prog
from pip.utils import get_installed_distributions, get_prog
from pip.utils import deprecation
from pip.vcs import git, mercurial, subversion, bazaar # noqa
from pip.baseparser import ConfigOptionParser, UpdatingDefaultsHelpFormatter
from pip.commands import commands, get_summaries, get_similar_commands
@ -22,6 +26,9 @@ cmdoptions = pip.cmdoptions
__version__ = "1.6.dev1"
logger = logging.getLogger(__name__)
def autocomplete():
"""Command and option completion for the main option parser (and options)
and its subcommands (and options).
@ -171,6 +178,13 @@ def main(args=None):
if args is None:
args = sys.argv[1:]
# Enable our Deprecation Warnings
for deprecation_warning in deprecation.DEPRECATIONS:
warnings.simplefilter("default", deprecation_warning)
# Configure our deprecation warnings to be sent through loggers
deprecation.install_warning_logger()
autocomplete()
try:
@ -208,14 +222,14 @@ class FrozenRequirement(object):
try:
req = get_src_requirement(dist, location, find_tags)
except InstallationError as exc:
logger.warn(
logger.warning(
"Error when trying to get requirement for VCS system %s, "
"falling back to uneditable format" % exc
"falling back to uneditable format", exc
)
req = None
if req is None:
logger.warn(
'Could not determine repository location of %s' % location
logger.warning(
'Could not determine repository location of %s', location
)
comments.append(
'## !! Could not determine repository location'
@ -238,8 +252,8 @@ class FrozenRequirement(object):
dependency_links,
)
if not svn_location:
logger.warn(
'Warning: cannot find svn location for %s' % req)
logger.warning(
'Warning: cannot find svn location for %s', req)
comments.append(
'## FIXME: could not find svn URL in dependency_links '
'for this package:'

View File

@ -1,3 +1,5 @@
from __future__ import absolute_import
import sys
# If we are running from a wheel, add the wheel to sys.path

View File

@ -2,6 +2,8 @@
This code was taken from https://github.com/ActiveState/appdirs and modified
to suite our purposes.
"""
from __future__ import absolute_import
import os
import sys
@ -52,6 +54,82 @@ def user_cache_dir(appname):
return path
def user_data_dir(appname, roaming=False):
"""
Return full path to the user-specific data dir for this application.
"appname" is the name of application.
If None, just the system directory is returned.
"roaming" (boolean, default False) can be set True to use the Windows
roaming appdata directory. That means that for users on a Windows
network setup for roaming profiles, this user data will be
sync'd on login. See
<http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
for a discussion of issues.
Typical user data directories are:
Mac OS X: ~/Library/Application Support/<AppName>
Unix: ~/.local/share/<AppName> # or in
$XDG_DATA_HOME, if defined
Win XP (not roaming): C:\Documents and Settings\<username>\ ...
...Application Data\<AppName>
Win XP (roaming): C:\Documents and Settings\<username>\Local ...
...Settings\Application Data\<AppName>
Win 7 (not roaming): C:\\Users\<username>\AppData\Local\<AppName>
Win 7 (roaming): C:\\Users\<username>\AppData\Roaming\<AppName>
For Unix, we follow the XDG spec and support $XDG_DATA_HOME.
That means, by default "~/.local/share/<AppName>".
"""
if WINDOWS:
const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA"
path = os.path.join(os.path.normpath(_get_win_folder(const)), appname)
elif sys.platform == "darwin":
path = os.path.join(
os.path.expanduser('~/Library/Application Support/'),
appname,
)
else:
path = os.path.join(
os.getenv('XDG_DATA_HOME', os.path.expanduser("~/.local/share")),
appname,
)
return path
def user_log_dir(appname):
"""
Return full path to the user-specific log dir for this application.
"appname" is the name of application.
If None, just the system directory is returned.
Typical user cache directories are:
Mac OS X: ~/Library/Logs/<AppName>
Unix: ~/.cache/<AppName>/log # or under $XDG_CACHE_HOME if
defined
Win XP: C:\Documents and Settings\<username>\Local Settings\ ...
...Application Data\<AppName>\Logs
Vista: C:\\Users\<username>\AppData\Local\<AppName>\Logs
On Windows the only suggestion in the MSDN docs is that local settings
go in the `CSIDL_LOCAL_APPDATA` directory. (Note: I'm interested in
examples of what some windows apps use for a logs dir.)
OPINION: This function appends "Logs" to the `CSIDL_LOCAL_APPDATA`
value for Windows and appends "log" to the user cache dir for Unix.
"""
if WINDOWS:
path = os.path.join(user_data_dir(appname), "Logs")
elif sys.platform == "darwin":
path = os.path.join(os.path.expanduser('~/Library/Logs'), appname)
else:
path = os.path.join(user_cache_dir(appname), "log")
return path
# for the discussion regarding site_config_dirs locations
# see <https://github.com/pypa/pip/issues/1733>
def site_config_dirs(appname):

View File

@ -1,34 +1,40 @@
"""Base Command class, and related routines"""
from __future__ import absolute_import
import logging
import os
import sys
import tempfile
import traceback
import time
import optparse
import warnings
from pip import cmdoptions
from pip import appdirs, cmdoptions
from pip.locations import running_under_virtualenv
from pip.log import logger
from pip.download import PipSession
from pip.exceptions import (BadCommand, InstallationError, UninstallationError,
CommandError, PreviousBuildDirError)
from pip.compat import StringIO, native_str
from pip.compat import StringIO, logging_dictConfig
from pip.baseparser import ConfigOptionParser, UpdatingDefaultsHelpFormatter
from pip.status_codes import (
SUCCESS, ERROR, UNKNOWN_ERROR, VIRTUALENV_NOT_FOUND,
PREVIOUS_BUILD_DIR_ERROR,
)
from pip.util import get_prog, normalize_path
from pip.utils import get_prog, normalize_path
from pip.utils.deprecation import RemovedInPip18Warning
from pip.utils.logging import IndentingFormatter
__all__ = ['Command']
logger = logging.getLogger(__name__)
class Command(object):
name = None
usage = None
hidden = False
log_stream = "ext://sys.stdout"
def __init__(self):
parser_kw = {
@ -88,9 +94,6 @@ class Command(object):
return session
def setup_logging(self):
pass
def parse_args(self, args):
# factored out for testability
return self.parser.parse_args(args)
@ -98,19 +101,84 @@ class Command(object):
def main(self, args):
options, args = self.parse_args(args)
level = 1 # Notify
level += options.verbose
level -= options.quiet
level = logger.level_for_integer(4 - level)
complete_log = []
logger.add_consumers(
(level, sys.stdout),
(logger.DEBUG, complete_log.append),
)
if options.log_explicit_levels:
logger.explicit_levels = True
if options.quiet:
level = "WARNING"
elif options.verbose:
level = "DEBUG"
else:
level = "INFO"
self.setup_logging()
logging_dictConfig({
"version": 1,
"disable_existing_loggers": False,
"formatters": {
"indent": {
"()": IndentingFormatter,
"format": (
"%(message)s"
if not options.log_explicit_levels
else "[%(levelname)s] %(message)s"
),
},
},
"handlers": {
"console": {
"level": level,
"class": "pip.utils.logging.ColorizedStreamHandler",
"stream": self.log_stream,
"formatter": "indent",
},
"debug_log": {
"level": "DEBUG",
"class": "pip.utils.logging.BetterRotatingFileHandler",
"filename": os.path.join(
appdirs.user_log_dir("pip"), "debug.log",
),
"maxBytes": 10 * 1000 * 1000, # 10 MB
"backupCount": 1,
"delay": True,
"formatter": "indent",
},
"user_log": {
"level": "DEBUG",
"class": "pip.utils.logging.BetterRotatingFileHandler",
"filename": options.log or "/dev/null",
"delay": True,
"formatter": "indent",
},
},
"root": {
"level": level,
"handlers": list(filter(None, [
"console",
"debug_log",
"user_log" if options.log else None,
])),
},
# Disable any logging besides WARNING unless we have DEBUG level
# logging enabled. These use both pip._vendor and the bare names
# for the case where someone unbundles our libraries.
"loggers": dict(
(
name,
{
"level": (
"WARNING"
if level in ["INFO", "ERROR"]
else "DEBUG"
),
},
)
for name in ["pip._vendor", "distlib", "requests", "urllib3"]
),
})
if options.log_explicit_levels:
warnings.warn(
"--log-explicit-levels has been deprecated and will be removed"
" in a future version.",
RemovedInPip18Warning,
)
# TODO: try to get these passing down from the command?
# without resorting to os.environ to hold these.
@ -124,68 +192,43 @@ class Command(object):
if options.require_venv:
# If a venv is required check if it can really be found
if not running_under_virtualenv():
logger.fatal(
logger.critical(
'Could not find an activated virtualenv (required).'
)
sys.exit(VIRTUALENV_NOT_FOUND)
if options.log:
log_fp = open_logfile(options.log, 'a')
logger.add_consumers((logger.DEBUG, log_fp))
else:
log_fp = None
exit = SUCCESS
store_log = False
try:
status = self.run(options, args)
# FIXME: all commands should return an exit status
# and when it is done, isinstance is not needed anymore
if isinstance(status, int):
exit = status
return status
except PreviousBuildDirError as exc:
logger.fatal(str(exc))
logger.info('Exception information:\n%s' % format_exc())
store_log = True
exit = PREVIOUS_BUILD_DIR_ERROR
except (InstallationError, UninstallationError) as exc:
logger.fatal(str(exc))
logger.info('Exception information:\n%s' % format_exc())
store_log = True
exit = ERROR
except BadCommand as exc:
logger.fatal(str(exc))
logger.info('Exception information:\n%s' % format_exc())
store_log = True
exit = ERROR
logger.critical(str(exc))
logger.debug('Exception information:\n%s', format_exc())
return PREVIOUS_BUILD_DIR_ERROR
except (InstallationError, UninstallationError, BadCommand) as exc:
logger.critical(str(exc))
logger.debug('Exception information:\n%s', format_exc())
return ERROR
except CommandError as exc:
logger.fatal('ERROR: %s' % exc)
logger.info('Exception information:\n%s' % format_exc())
exit = ERROR
logger.critical('ERROR: %s', exc)
logger.debug('Exception information:\n%s', format_exc())
return ERROR
except KeyboardInterrupt:
logger.fatal('Operation cancelled by user')
logger.info('Exception information:\n%s' % format_exc())
store_log = True
exit = ERROR
logger.critical('Operation cancelled by user')
logger.debug('Exception information:\n%s', format_exc())
return ERROR
except:
logger.fatal('Exception:\n%s' % format_exc())
store_log = True
exit = UNKNOWN_ERROR
if store_log:
log_file_fn = options.log_file
text = '\n'.join(native_str(l, True) for l in complete_log)
try:
log_file_fp = open_logfile(log_file_fn, 'w')
except IOError:
temp = tempfile.NamedTemporaryFile(delete=False)
log_file_fn = temp.name
log_file_fp = open_logfile(log_file_fn, 'w')
logger.fatal('Storing debug log for failure in %s' % log_file_fn)
log_file_fp.write(text)
log_file_fp.close()
if log_fp is not None:
log_fp.close()
return exit
logger.critical('Exception:\n%s', format_exc())
return UNKNOWN_ERROR
return SUCCESS
def format_exc(exc_info=None):
@ -194,27 +237,3 @@ def format_exc(exc_info=None):
out = StringIO()
traceback.print_exception(*exc_info, **dict(file=out))
return out.getvalue()
def open_logfile(filename, mode='a'):
"""Open the named log file in append mode.
If the file already exists, a separator will also be printed to
the file to separate past activity from current activity.
"""
filename = os.path.expanduser(filename)
filename = os.path.abspath(filename)
dirname = os.path.dirname(filename)
if not os.path.exists(dirname):
os.makedirs(dirname)
exists = os.path.exists(filename)
kw = {}
if sys.version_info[0] != 2:
kw['errors'] = 'replace'
log_fp = open(filename, mode, **kw)
if exists:
log_fp.write('%s\n' % ('-' * 60))
log_fp.write('%s run on %s\n' % (sys.argv[0], time.strftime('%c')))
return log_fp

View File

@ -1,4 +1,5 @@
"""Base option parser setup"""
from __future__ import absolute_import
import sys
import optparse
@ -12,7 +13,7 @@ from pip.locations import (
default_config_file, default_config_basename, running_under_virtualenv,
site_config_files
)
from pip.util import get_terminal_size
from pip.utils import get_terminal_size
class PrettyHelpFormatter(optparse.IndentedHelpFormatter):

View File

@ -7,10 +7,12 @@ between parses. pip parse's general options twice internally, and shouldn't
pass on state. To be consistent, all options will follow this design.
"""
from __future__ import absolute_import
import copy
from optparse import OptionGroup, SUPPRESS_HELP, Option
from pip.locations import (
CA_BUNDLE_PATH, USER_CACHE_DIR, build_prefix, default_log_file, src_prefix,
CA_BUNDLE_PATH, USER_CACHE_DIR, build_prefix, src_prefix,
)
@ -79,10 +81,11 @@ quiet = OptionMaker(
help='Give less output.')
log = OptionMaker(
'--log',
dest='log',
metavar='path',
help='Path to a verbose appending log. This log is inactive by default.')
"--log", "--log-file", "--local-log",
dest="log",
metavar="path",
help="Path to a verbose appending log."
)
log_explicit_levels = OptionMaker(
# Writes the log levels explicitely to the log'
@ -92,15 +95,6 @@ log_explicit_levels = OptionMaker(
default=False,
help=SUPPRESS_HELP)
log_file = OptionMaker(
# The default log file
'--log-file', '--local-log',
dest='log_file',
metavar='path',
default=default_log_file,
help='Path to a verbose non-appending log, that only logs failures. This '
'log is active by default at %default.')
no_input = OptionMaker(
# Don't ask for input
'--no-input',
@ -409,7 +403,6 @@ general_group = {
verbose,
version,
quiet,
log_file,
log,
log_explicit_levels,
no_input,

View File

@ -1,7 +1,7 @@
"""
Package containing all pip commands
"""
from __future__ import absolute_import
from pip.commands.completion import CompletionCommand
from pip.commands.freeze import FreezeCommand

View File

@ -1,3 +1,5 @@
from __future__ import absolute_import
import sys
from pip.basecommand import Command

View File

@ -1,18 +1,25 @@
from __future__ import absolute_import
import logging
import re
import sys
import pip
from pip.compat import stdlib_pkgs
from pip.req import InstallRequirement
from pip.log import logger
from pip.basecommand import Command
from pip.util import get_installed_distributions
from pip.utils import get_installed_distributions
from pip._vendor import pkg_resources
# packages to exclude from freeze output
freeze_excludes = stdlib_pkgs + ['setuptools', 'pip', 'distribute']
logger = logging.getLogger(__name__)
class FreezeCommand(Command):
"""
Output installed packages in requirements format.
@ -23,6 +30,7 @@ class FreezeCommand(Command):
usage = """
%prog [options]"""
summary = 'Output installed packages in requirements format.'
log_stream = "ext://sys.stderr"
def __init__(self, *args, **kw):
super(FreezeCommand, self).__init__(*args, **kw)
@ -53,9 +61,6 @@ class FreezeCommand(Command):
self.parser.insert_option_group(0, self.cmd_opts)
def setup_logging(self):
logger.move_stdout_to_stderr()
def run(self, options, args):
requirement = options.requirement
find_links = options.find_links or []
@ -123,19 +128,21 @@ class FreezeCommand(Command):
else:
line_req = InstallRequirement.from_line(line)
if not line_req.name:
logger.notify(
logger.info(
"Skipping line because it's not clear what it would "
"install: %s" % line.strip()
"install: %s",
line.strip(),
)
logger.notify(
logger.info(
" (add #egg=PackageName to the URL to avoid"
" this warning)"
)
continue
if line_req.name not in installations:
logger.warn(
logger.warning(
"Requirement file contains %s, but that package is not"
" installed" % line.strip()
" installed",
line.strip(),
)
continue
f.write(str(installations[line_req.name]))

View File

@ -1,3 +1,5 @@
from __future__ import absolute_import
from pip.basecommand import Command, SUCCESS
from pip.exceptions import CommandError

View File

@ -1,9 +1,12 @@
from __future__ import absolute_import
import logging
import os
import tempfile
import shutil
import warnings
from pip.req import InstallRequirement, RequirementSet, parse_requirements
from pip.log import logger
from pip.locations import (virtualenv_no_global, distutils_scheme,
build_prefix)
from pip.basecommand import Command
@ -12,6 +15,10 @@ from pip.exceptions import (
InstallationError, CommandError, PreviousBuildDirError,
)
from pip import cmdoptions
from pip.utils.deprecation import RemovedInPip17Warning, RemovedInPip18Warning
logger = logging.getLogger(__name__)
class InstallCommand(Command):
@ -192,11 +199,10 @@ class InstallCommand(Command):
(options.build_dir != build_prefix) or
options.no_clean
):
logger.deprecated(
'1.7',
'DEPRECATION: --no-install, --no-download, --build, '
'and --no-clean are deprecated. See '
'https://github.com/pypa/pip/issues/906.',
warnings.warn(
"--no-install, --no-download, --build, and --no-clean are "
"deprecated. See https://github.com/pypa/pip/issues/906.",
RemovedInPip17Warning,
)
if options.download_dir:
@ -229,32 +235,32 @@ class InstallCommand(Command):
global_options = options.global_options or []
index_urls = [options.index_url] + options.extra_index_urls
if options.no_index:
logger.notify('Ignoring indexes: %s' % ','.join(index_urls))
logger.info('Ignoring indexes: %s', ','.join(index_urls))
index_urls = []
if options.use_mirrors:
logger.deprecated(
"1.7",
"--use-mirrors has been deprecated and will be removed"
" in the future. Explicit uses of --index-url and/or "
"--extra-index-url is suggested."
warnings.warn(
"--use-mirrors has been deprecated and will be removed in the "
"future. Explicit uses of --index-url and/or --extra-index-url"
" is suggested.",
RemovedInPip17Warning,
)
if options.mirrors:
logger.deprecated(
"1.7",
"--mirrors has been deprecated and will be removed in "
" the future. Explicit uses of --index-url and/or "
"--extra-index-url is suggested."
warnings.warn(
"--mirrors has been deprecated and will be removed in the "
"future. Explicit uses of --index-url and/or --extra-index-url"
" is suggested.",
RemovedInPip17Warning,
)
index_urls += options.mirrors
if options.download_cache:
logger.deprecated(
"1.8",
warnings.warn(
"--download-cache has been deprecated and will be removed in "
" the future. Pip now automatically uses and configures its "
"cache."
"the future. Pip now automatically uses and configures its "
"cache.",
RemovedInPip18Warning,
)
with self._build_session(options) as session:
@ -299,7 +305,7 @@ class InstallCommand(Command):
else:
msg = ('You must give at least one requirement '
'to %(name)s (see "pip help %(name)s")' % opts)
logger.warn(msg)
logger.warning(msg)
return
try:
@ -319,16 +325,14 @@ class InstallCommand(Command):
requirement_set.successfully_installed
])
if installed:
logger.notify('Successfully installed %s' % installed)
logger.info('Successfully installed %s', installed)
else:
downloaded = ' '.join([
req.name
for req in requirement_set.successfully_downloaded
])
if downloaded:
logger.notify(
'Successfully downloaded %s' % downloaded
)
logger.info('Successfully downloaded %s', downloaded)
except PreviousBuildDirError:
options.no_clean = True
raise

View File

@ -1,12 +1,20 @@
from __future__ import absolute_import
import logging
import warnings
from pip.basecommand import Command
from pip.exceptions import DistributionNotFound, BestVersionAlreadyInstalled
from pip.index import PackageFinder
from pip.log import logger
from pip.req import InstallRequirement
from pip.util import get_installed_distributions, dist_is_editable
from pip.utils import get_installed_distributions, dist_is_editable
from pip.utils.deprecation import RemovedInPip17Warning
from pip.cmdoptions import make_option_group, index_group
logger = logging.getLogger(__name__)
class ListCommand(Command):
"""
List installed packages, including editables.
@ -88,31 +96,31 @@ class ListCommand(Command):
for dist, remote_version_raw, remote_version_parsed in \
self.find_packages_latests_versions(options):
if remote_version_parsed > dist.parsed_version:
logger.notify(
'%s (Current: %s Latest: %s)' %
(dist.project_name, dist.version, remote_version_raw)
logger.info(
'%s (Current: %s Latest: %s)',
dist.project_name, dist.version, remote_version_raw,
)
def find_packages_latests_versions(self, options):
index_urls = [options.index_url] + options.extra_index_urls
if options.no_index:
logger.notify('Ignoring indexes: %s' % ','.join(index_urls))
logger.info('Ignoring indexes: %s', ','.join(index_urls))
index_urls = []
if options.use_mirrors:
logger.deprecated(
"1.7",
"--use-mirrors has been deprecated and will be removed"
" in the future. Explicit uses of --index-url and/or "
"--extra-index-url is suggested."
warnings.warn(
"--use-mirrors has been deprecated and will be removed in the "
"future. Explicit uses of --index-url and/or --extra-index-url"
" is suggested.",
RemovedInPip17Warning,
)
if options.mirrors:
logger.deprecated(
"1.7",
"--mirrors has been deprecated and will be removed in "
" the future. Explicit uses of --index-url and/or "
"--extra-index-url is suggested."
warnings.warn(
"--mirrors has been deprecated and will be removed in the "
"future. Explicit uses of --index-url and/or --extra-index-url"
" is suggested.",
RemovedInPip17Warning,
)
index_urls += options.mirrors
@ -181,7 +189,7 @@ class ListCommand(Command):
)
else:
line = '%s (%s)' % (dist.project_name, dist.version)
logger.notify(line)
logger.info(line)
def run_uptodate(self, options):
uptodate = []

View File

@ -1,3 +1,6 @@
from __future__ import absolute_import
import logging
import sys
import textwrap
@ -6,8 +9,8 @@ from functools import reduce
from pip.basecommand import Command, SUCCESS
from pip.download import PipXmlrpcTransport
from pip.util import get_terminal_size
from pip.log import logger
from pip.utils import get_terminal_size
from pip.utils.logging import indent_log
from pip.compat import cmp
from pip.exceptions import CommandError
from pip.status_codes import NO_MATCHES_FOUND
@ -15,6 +18,9 @@ from pip._vendor import pkg_resources
from pip._vendor.six.moves import xmlrpc_client
logger = logging.getLogger(__name__)
class SearchCommand(Command):
"""Search for PyPI packages whose name or summary contains <query>."""
name = 'search'
@ -112,19 +118,16 @@ def print_results(hits, name_column_width=25, terminal_width=None):
summary = ('\n' + ' ' * (name_column_width + 3)).join(summary)
line = '%s - %s' % (name.ljust(name_column_width), summary)
try:
logger.notify(line)
logger.info(line)
if name in installed_packages:
dist = pkg_resources.get_distribution(name)
logger.indent += 2
try:
with indent_log():
latest = highest_version(hit['versions'])
if dist.version == latest:
logger.notify('INSTALLED: %s (latest)' % dist.version)
logger.info('INSTALLED: %s (latest)', dist.version)
else:
logger.notify('INSTALLED: %s' % dist.version)
logger.notify('LATEST: %s' % latest)
finally:
logger.indent -= 2
logger.info('INSTALLED: %s', dist.version)
logger.info('LATEST: %s', latest)
except UnicodeEncodeError:
pass

View File

@ -1,10 +1,15 @@
from __future__ import absolute_import
import logging
import os
from pip.basecommand import Command
from pip.log import logger
from pip._vendor import pkg_resources
logger = logging.getLogger(__name__)
class ShowCommand(Command):
"""Show information about one or more installed packages."""
name = 'show'
@ -25,7 +30,7 @@ class ShowCommand(Command):
def run(self, options, args):
if not args:
logger.warn('ERROR: Please provide a package name or names.')
logger.warning('ERROR: Please provide a package name or names.')
return
query = args
@ -75,15 +80,15 @@ def print_results(distributions, list_all_files):
Print the informations from installed distributions found.
"""
for dist in distributions:
logger.notify("---")
logger.notify("Name: %s" % dist['name'])
logger.notify("Version: %s" % dist['version'])
logger.notify("Location: %s" % dist['location'])
logger.notify("Requires: %s" % ', '.join(dist['requires']))
logger.info("---")
logger.info("Name: %s" % dist['name'])
logger.info("Version: %s" % dist['version'])
logger.info("Location: %s" % dist['location'])
logger.info("Requires: %s" % ', '.join(dist['requires']))
if list_all_files:
logger.notify("Files:")
logger.info("Files:")
if dist['files'] is not None:
for line in dist['files']:
logger.notify(" %s" % line.strip())
logger.info(" %s" % line.strip())
else:
logger.notify("Cannot locate installed-files.txt")
logger.info("Cannot locate installed-files.txt")

View File

@ -1,3 +1,5 @@
from __future__ import absolute_import
from pip.req import InstallRequirement, RequirementSet, parse_requirements
from pip.basecommand import Command
from pip.exceptions import InstallationError

View File

@ -1,3 +1,5 @@
from __future__ import absolute_import
from pip.commands.zip import ZipCommand

View File

@ -1,20 +1,25 @@
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import logging
import os
import warnings
from pip.basecommand import Command
from pip.index import PackageFinder
from pip.log import logger
from pip.exceptions import CommandError, PreviousBuildDirError
from pip.req import InstallRequirement, RequirementSet, parse_requirements
from pip.util import normalize_path
from pip.utils import normalize_path
from pip.utils.deprecation import RemovedInPip17Warning, RemovedInPip18Warning
from pip.wheel import WheelBuilder
from pip import cmdoptions
DEFAULT_WHEEL_DIR = os.path.join(normalize_path(os.curdir), 'wheelhouse')
logger = logging.getLogger(__name__)
class WheelCommand(Command):
"""
Build Wheel archives for your requirements and dependencies.
@ -124,32 +129,32 @@ class WheelCommand(Command):
index_urls = [options.index_url] + options.extra_index_urls
if options.no_index:
logger.notify('Ignoring indexes: %s' % ','.join(index_urls))
logger.info('Ignoring indexes: %s', ','.join(index_urls))
index_urls = []
if options.use_mirrors:
logger.deprecated(
"1.7",
"--use-mirrors has been deprecated and will be removed"
" in the future. Explicit uses of --index-url and/or "
"--extra-index-url is suggested."
warnings.warn(
"--use-mirrors has been deprecated and will be removed in the "
"future. Explicit uses of --index-url and/or --extra-index-url"
" is suggested.",
RemovedInPip17Warning,
)
if options.mirrors:
logger.deprecated(
"1.7",
"--mirrors has been deprecated and will be removed in "
" the future. Explicit uses of --index-url and/or "
"--extra-index-url is suggested."
warnings.warn(
"--mirrors has been deprecated and will be removed in the "
"future. Explicit uses of --index-url and/or --extra-index-url"
" is suggested.",
RemovedInPip17Warning,
)
index_urls += options.mirrors
if options.download_cache:
logger.deprecated(
"1.8",
warnings.warn(
"--download-cache has been deprecated and will be removed in "
" the future. Pip now automatically uses and configures its "
"cache."
"the future. Pip now automatically uses and configures its "
"cache.",
RemovedInPip18Warning,
)
with self._build_session(options) as session:
@ -202,10 +207,11 @@ class WheelCommand(Command):
# fail if no requirements
if not requirement_set.has_requirements:
opts = {'name': self.name}
msg = ('You must give at least one requirement '
'to %(name)s (see "pip help %(name)s")' % opts)
logger.error(msg)
logger.error(
"You must give at least one requirement to %s "
"(see \"pip help %s\")",
self.name,
)
return
try:

View File

@ -1,15 +1,24 @@
from __future__ import absolute_import
import sys
import re
import fnmatch
import logging
import os
import shutil
import warnings
import zipfile
from pip.util import display_path, backup_dir, rmtree
from pip.log import logger
from pip.utils import display_path, backup_dir, rmtree
from pip.utils.deprecation import RemovedInPip17Warning
from pip.utils.logging import indent_log
from pip.exceptions import InstallationError
from pip.basecommand import Command
logger = logging.getLogger(__name__)
class ZipCommand(Command):
"""Zip individual packages."""
name = 'zip'
@ -87,21 +96,27 @@ class ZipCommand(Command):
match_any.add(match)
break
else:
logger.debug("Skipping path %s because it doesn't match %s"
% (path, ', '.join(self.select_paths)))
logger.debug(
"Skipping path %s because it doesn't match %s",
path,
', '.join(self.select_paths),
)
for match in self.select_paths:
if match not in match_any and '*' not in match:
result.append(match)
logger.debug("Adding path %s because it doesn't match "
"anything already on sys.path" % match)
logger.debug(
"Adding path %s because it doesn't match "
"anything already on sys.path",
match,
)
return result
def run(self, options, args):
logger.deprecated(
'1.7',
"DEPRECATION: 'pip zip' and 'pip unzip` are deprecated, and will "
"be removed in a future release."
warnings.warn(
"'pip zip' and 'pip unzip` are deprecated, and will be removed in "
"a future release.",
RemovedInPip17Warning,
)
self.select_paths = options.paths
@ -144,20 +159,22 @@ class ZipCommand(Command):
% (module_name, filename, zip_filename))
package_path = os.path.dirname(zip_filename)
if package_path not in self.paths():
logger.warn(
'Unpacking %s into %s, but %s is not on sys.path'
% (display_path(zip_filename), display_path(package_path),
display_path(package_path)))
logger.notify(
'Unzipping %s (in %s)' % (module_name, display_path(zip_filename))
logger.warning(
'Unpacking %s into %s, but %s is not on sys.path',
display_path(zip_filename),
display_path(package_path),
display_path(package_path),
)
logger.info(
'Unzipping %s (in %s)', module_name, display_path(zip_filename),
)
if self.simulate:
logger.notify(
logger.info(
'Skipping remaining operations because of --simulate'
)
return
logger.indent += 2
try:
with indent_log():
# FIXME: this should be undoable:
zip = zipfile.ZipFile(zip_filename)
to_save = []
@ -179,44 +196,44 @@ class ZipCommand(Command):
to_save.append((name, zip.read(name)))
zip.close()
if not to_save:
logger.info(
'Removing now-empty zip file %s' %
logger.debug(
'Removing now-empty zip file %s',
display_path(zip_filename)
)
os.unlink(zip_filename)
self.remove_filename_from_pth(zip_filename)
else:
logger.info(
'Removing entries in %s/ from zip file %s' %
(module_name, display_path(zip_filename))
logger.debug(
'Removing entries in %s/ from zip file %s',
module_name,
display_path(zip_filename),
)
zip = zipfile.ZipFile(zip_filename, 'w')
for name, content in to_save:
zip.writestr(name, content)
zip.close()
finally:
logger.indent -= 2
def zip_package(self, module_name, filename, no_pyc):
logger.info('Zip %s (in %s)', module_name, display_path(filename))
orig_filename = filename
logger.notify('Zip %s (in %s)' % (module_name, display_path(filename)))
logger.indent += 2
if filename.endswith('.egg'):
dest_filename = filename
else:
dest_filename = filename + '.zip'
try:
with indent_log():
# FIXME: I think this needs to be undoable:
if filename == dest_filename:
filename = backup_dir(orig_filename)
logger.notify(
'Moving %s aside to %s' % (orig_filename, filename)
logger.info(
'Moving %s aside to %s', orig_filename, filename,
)
if not self.simulate:
shutil.move(orig_filename, filename)
try:
logger.info(
'Creating zip file in %s' % display_path(dest_filename)
logger.debug(
'Creating zip file in %s', display_path(dest_filename),
)
if not self.simulate:
zip = zipfile.ZipFile(dest_filename, 'w')
@ -241,8 +258,8 @@ class ZipCommand(Command):
else:
zip.write(full, dest)
zip.close()
logger.info(
'Removing old directory %s' % display_path(filename)
logger.debug(
'Removing old directory %s', display_path(filename),
)
if not self.simulate:
rmtree(filename)
@ -251,8 +268,6 @@ class ZipCommand(Command):
raise
# FIXME: should also be undone:
self.add_filename_to_pth(dest_filename)
finally:
logger.indent -= 2
def remove_filename_from_pth(self, filename):
for pth in self.pth_files():
@ -262,11 +277,14 @@ class ZipCommand(Command):
new_lines = [
l for l in lines if l.strip() != filename]
if lines != new_lines:
logger.info('Removing reference to %s from .pth file %s'
% (display_path(filename), display_path(pth)))
logger.debug(
'Removing reference to %s from .pth file %s',
display_path(filename),
display_path(pth),
)
if not [line for line in new_lines if line]:
logger.info(
'%s file would be empty: deleting' % display_path(pth)
logger.debug(
'%s file would be empty: deleting', display_path(pth)
)
if not self.simulate:
os.unlink(pth)
@ -276,18 +294,18 @@ class ZipCommand(Command):
f.writelines(new_lines)
f.close()
return
logger.warn(
'Cannot find a reference to %s in any .pth file' %
display_path(filename)
logger.warning(
'Cannot find a reference to %s in any .pth file',
display_path(filename),
)
def add_filename_to_pth(self, filename):
path = os.path.dirname(filename)
dest = filename + '.pth'
if path not in self.paths():
logger.warn(
'Adding .pth file %s, but it is not on sys.path' %
display_path(dest)
logger.warning(
'Adding .pth file %s, but it is not on sys.path',
display_path(dest),
)
if not self.simulate:
if os.path.exists(dest):
@ -340,24 +358,25 @@ class ZipCommand(Command):
basename = os.path.basename(path.rstrip(os.path.sep))
if os.path.isfile(path) and zipfile.is_zipfile(path):
if os.path.dirname(path) not in self.paths():
logger.notify('Zipped egg: %s' % display_path(path))
logger.info('Zipped egg: %s', display_path(path))
continue
if (basename != 'site-packages'
and basename != 'dist-packages'
and not path.replace('\\', '/').endswith('lib/python')):
continue
logger.notify('In %s:' % display_path(path))
logger.indent += 2
zipped = []
unzipped = []
try:
logger.info('In %s:', display_path(path))
with indent_log():
zipped = []
unzipped = []
for filename in sorted(os.listdir(path)):
ext = os.path.splitext(filename)[1].lower()
if ext in ('.pth', '.egg-info', '.egg-link'):
continue
if ext == '.py':
logger.info(
'Not displaying %s: not a package' %
logger.debug(
'Not displaying %s: not a package',
display_path(filename)
)
continue
@ -367,33 +386,25 @@ class ZipCommand(Command):
elif zipfile.is_zipfile(full):
zipped.append(filename)
else:
logger.info(
'Unknown file: %s' % display_path(filename)
logger.debug(
'Unknown file: %s', display_path(filename),
)
if zipped:
logger.notify('Zipped packages:')
logger.indent += 2
try:
logger.info('Zipped packages:')
with indent_log():
for filename in zipped:
logger.notify(filename)
finally:
logger.indent -= 2
logger.info(filename)
else:
logger.notify('No zipped packages.')
logger.info('No zipped packages.')
if unzipped:
if options.sort_files:
unzipped.sort(key=lambda x: -x[1])
logger.notify('Unzipped packages:')
logger.indent += 2
try:
logger.info('Unzipped packages:')
with indent_log():
for filename, count in unzipped:
logger.notify('%s (%i files)' % (filename, count))
finally:
logger.indent -= 2
logger.info('%s (%i files)', filename, count)
else:
logger.notify('No unzipped packages.')
finally:
logger.indent -= 2
logger.info('No unzipped packages.')
def count_package(self, path):
total = 0

View File

@ -1,5 +1,6 @@
"""Stuff that differs in different Python versions and platform
distributions."""
from __future__ import absolute_import
# flake8: noqa
@ -30,6 +31,12 @@ except NameError:
console_encoding = sys.__stdout__.encoding
try:
from logging.config import dictConfig as logging_dictConfig
except ImportError:
from pip.compat.dictconfig import dictConfig as logging_dictConfig
if sys.version_info >= (3,):
from io import StringIO
from urllib.error import URLError, HTTPError

565
pip/compat/dictconfig.py Normal file
View File

@ -0,0 +1,565 @@
# This is a copy of the Python logging.config.dictconfig module,
# reproduced with permission. It is provided here for backwards
# compatibility for Python versions prior to 2.7.
#
# Copyright 2009-2010 by Vinay Sajip. All Rights Reserved.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose and without fee is hereby granted,
# provided that the above copyright notice appear in all copies and that
# both that copyright notice and this permission notice appear in
# supporting documentation, and that the name of Vinay Sajip
# not be used in advertising or publicity pertaining to distribution
# of the software without specific, written prior permission.
# VINAY SAJIP DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING
# ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
# VINAY SAJIP BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR
# ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER
# IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
from __future__ import absolute_import
import logging.handlers
import re
import sys
import types
from pip._vendor import six
# flake8: noqa
IDENTIFIER = re.compile('^[a-z_][a-z0-9_]*$', re.I)
def valid_ident(s):
m = IDENTIFIER.match(s)
if not m:
raise ValueError('Not a valid Python identifier: %r' % s)
return True
#
# This function is defined in logging only in recent versions of Python
#
try:
from logging import _checkLevel
except ImportError:
def _checkLevel(level):
if isinstance(level, int):
rv = level
elif str(level) == level:
if level not in logging._levelNames:
raise ValueError('Unknown level: %r' % level)
rv = logging._levelNames[level]
else:
raise TypeError('Level not an integer or a '
'valid string: %r' % level)
return rv
# The ConvertingXXX classes are wrappers around standard Python containers,
# and they serve to convert any suitable values in the container. The
# conversion converts base dicts, lists and tuples to their wrapped
# equivalents, whereas strings which match a conversion format are converted
# appropriately.
#
# Each wrapper should have a configurator attribute holding the actual
# configurator to use for conversion.
class ConvertingDict(dict):
"""A converting dictionary wrapper."""
def __getitem__(self, key):
value = dict.__getitem__(self, key)
result = self.configurator.convert(value)
# If the converted value is different, save for next time
if value is not result:
self[key] = result
if type(result) in (ConvertingDict, ConvertingList,
ConvertingTuple):
result.parent = self
result.key = key
return result
def get(self, key, default=None):
value = dict.get(self, key, default)
result = self.configurator.convert(value)
# If the converted value is different, save for next time
if value is not result:
self[key] = result
if type(result) in (ConvertingDict, ConvertingList,
ConvertingTuple):
result.parent = self
result.key = key
return result
def pop(self, key, default=None):
value = dict.pop(self, key, default)
result = self.configurator.convert(value)
if value is not result:
if type(result) in (ConvertingDict, ConvertingList,
ConvertingTuple):
result.parent = self
result.key = key
return result
class ConvertingList(list):
"""A converting list wrapper."""
def __getitem__(self, key):
value = list.__getitem__(self, key)
result = self.configurator.convert(value)
# If the converted value is different, save for next time
if value is not result:
self[key] = result
if type(result) in (ConvertingDict, ConvertingList,
ConvertingTuple):
result.parent = self
result.key = key
return result
def pop(self, idx=-1):
value = list.pop(self, idx)
result = self.configurator.convert(value)
if value is not result:
if type(result) in (ConvertingDict, ConvertingList,
ConvertingTuple):
result.parent = self
return result
class ConvertingTuple(tuple):
"""A converting tuple wrapper."""
def __getitem__(self, key):
value = tuple.__getitem__(self, key)
result = self.configurator.convert(value)
if value is not result:
if type(result) in (ConvertingDict, ConvertingList,
ConvertingTuple):
result.parent = self
result.key = key
return result
class BaseConfigurator(object):
"""
The configurator base class which defines some useful defaults.
"""
CONVERT_PATTERN = re.compile(r'^(?P<prefix>[a-z]+)://(?P<suffix>.*)$')
WORD_PATTERN = re.compile(r'^\s*(\w+)\s*')
DOT_PATTERN = re.compile(r'^\.\s*(\w+)\s*')
INDEX_PATTERN = re.compile(r'^\[\s*(\w+)\s*\]\s*')
DIGIT_PATTERN = re.compile(r'^\d+$')
value_converters = {
'ext' : 'ext_convert',
'cfg' : 'cfg_convert',
}
# We might want to use a different one, e.g. importlib
importer = __import__
def __init__(self, config):
self.config = ConvertingDict(config)
self.config.configurator = self
def resolve(self, s):
"""
Resolve strings to objects using standard import and attribute
syntax.
"""
name = s.split('.')
used = name.pop(0)
try:
found = self.importer(used)
for frag in name:
used += '.' + frag
try:
found = getattr(found, frag)
except AttributeError:
self.importer(used)
found = getattr(found, frag)
return found
except ImportError:
e, tb = sys.exc_info()[1:]
v = ValueError('Cannot resolve %r: %s' % (s, e))
v.__cause__, v.__traceback__ = e, tb
raise v
def ext_convert(self, value):
"""Default converter for the ext:// protocol."""
return self.resolve(value)
def cfg_convert(self, value):
"""Default converter for the cfg:// protocol."""
rest = value
m = self.WORD_PATTERN.match(rest)
if m is None:
raise ValueError("Unable to convert %r" % value)
else:
rest = rest[m.end():]
d = self.config[m.groups()[0]]
# print d, rest
while rest:
m = self.DOT_PATTERN.match(rest)
if m:
d = d[m.groups()[0]]
else:
m = self.INDEX_PATTERN.match(rest)
if m:
idx = m.groups()[0]
if not self.DIGIT_PATTERN.match(idx):
d = d[idx]
else:
try:
n = int(idx) # try as number first (most likely)
d = d[n]
except TypeError:
d = d[idx]
if m:
rest = rest[m.end():]
else:
raise ValueError('Unable to convert '
'%r at %r' % (value, rest))
# rest should be empty
return d
def convert(self, value):
"""
Convert values to an appropriate type. dicts, lists and tuples are
replaced by their converting alternatives. Strings are checked to
see if they have a conversion format and are converted if they do.
"""
if not isinstance(value, ConvertingDict) and isinstance(value, dict):
value = ConvertingDict(value)
value.configurator = self
elif not isinstance(value, ConvertingList) and isinstance(value, list):
value = ConvertingList(value)
value.configurator = self
elif not isinstance(value, ConvertingTuple) and\
isinstance(value, tuple):
value = ConvertingTuple(value)
value.configurator = self
elif isinstance(value, six.string_types): # str for py3k
m = self.CONVERT_PATTERN.match(value)
if m:
d = m.groupdict()
prefix = d['prefix']
converter = self.value_converters.get(prefix, None)
if converter:
suffix = d['suffix']
converter = getattr(self, converter)
value = converter(suffix)
return value
def configure_custom(self, config):
"""Configure an object with a user-supplied factory."""
c = config.pop('()')
if not hasattr(c, '__call__') and hasattr(types, 'ClassType') and type(c) != types.ClassType:
c = self.resolve(c)
props = config.pop('.', None)
# Check for valid identifiers
kwargs = dict((k, config[k]) for k in config if valid_ident(k))
result = c(**kwargs)
if props:
for name, value in props.items():
setattr(result, name, value)
return result
def as_tuple(self, value):
"""Utility function which converts lists to tuples."""
if isinstance(value, list):
value = tuple(value)
return value
class DictConfigurator(BaseConfigurator):
"""
Configure logging using a dictionary-like object to describe the
configuration.
"""
def configure(self):
"""Do the configuration."""
config = self.config
if 'version' not in config:
raise ValueError("dictionary doesn't specify a version")
if config['version'] != 1:
raise ValueError("Unsupported version: %s" % config['version'])
incremental = config.pop('incremental', False)
EMPTY_DICT = {}
logging._acquireLock()
try:
if incremental:
handlers = config.get('handlers', EMPTY_DICT)
# incremental handler config only if handler name
# ties in to logging._handlers (Python 2.7)
if sys.version_info[:2] == (2, 7):
for name in handlers:
if name not in logging._handlers:
raise ValueError('No handler found with '
'name %r' % name)
else:
try:
handler = logging._handlers[name]
handler_config = handlers[name]
level = handler_config.get('level', None)
if level:
handler.setLevel(_checkLevel(level))
except StandardError as e:
raise ValueError('Unable to configure handler '
'%r: %s' % (name, e))
loggers = config.get('loggers', EMPTY_DICT)
for name in loggers:
try:
self.configure_logger(name, loggers[name], True)
except StandardError as e:
raise ValueError('Unable to configure logger '
'%r: %s' % (name, e))
root = config.get('root', None)
if root:
try:
self.configure_root(root, True)
except StandardError as e:
raise ValueError('Unable to configure root '
'logger: %s' % e)
else:
disable_existing = config.pop('disable_existing_loggers', True)
logging._handlers.clear()
del logging._handlerList[:]
# Do formatters first - they don't refer to anything else
formatters = config.get('formatters', EMPTY_DICT)
for name in formatters:
try:
formatters[name] = self.configure_formatter(
formatters[name])
except StandardError as e:
raise ValueError('Unable to configure '
'formatter %r: %s' % (name, e))
# Next, do filters - they don't refer to anything else, either
filters = config.get('filters', EMPTY_DICT)
for name in filters:
try:
filters[name] = self.configure_filter(filters[name])
except StandardError as e:
raise ValueError('Unable to configure '
'filter %r: %s' % (name, e))
# Next, do handlers - they refer to formatters and filters
# As handlers can refer to other handlers, sort the keys
# to allow a deterministic order of configuration
handlers = config.get('handlers', EMPTY_DICT)
for name in sorted(handlers):
try:
handler = self.configure_handler(handlers[name])
handler.name = name
handlers[name] = handler
except StandardError as e:
raise ValueError('Unable to configure handler '
'%r: %s' % (name, e))
# Next, do loggers - they refer to handlers and filters
# we don't want to lose the existing loggers,
# since other threads may have pointers to them.
# existing is set to contain all existing loggers,
# and as we go through the new configuration we
# remove any which are configured. At the end,
# what's left in existing is the set of loggers
# which were in the previous configuration but
# which are not in the new configuration.
root = logging.root
existing = list(root.manager.loggerDict)
# The list needs to be sorted so that we can
# avoid disabling child loggers of explicitly
# named loggers. With a sorted list it is easier
# to find the child loggers.
existing.sort()
# We'll keep the list of existing loggers
# which are children of named loggers here...
child_loggers = []
# now set up the new ones...
loggers = config.get('loggers', EMPTY_DICT)
for name in loggers:
if name in existing:
i = existing.index(name)
prefixed = name + "."
pflen = len(prefixed)
num_existing = len(existing)
i = i + 1 # look at the entry after name
while (i < num_existing) and\
(existing[i][:pflen] == prefixed):
child_loggers.append(existing[i])
i = i + 1
existing.remove(name)
try:
self.configure_logger(name, loggers[name])
except StandardError as e:
raise ValueError('Unable to configure logger '
'%r: %s' % (name, e))
# Disable any old loggers. There's no point deleting
# them as other threads may continue to hold references
# and by disabling them, you stop them doing any logging.
# However, don't disable children of named loggers, as that's
# probably not what was intended by the user.
for log in existing:
logger = root.manager.loggerDict[log]
if log in child_loggers:
logger.level = logging.NOTSET
logger.handlers = []
logger.propagate = True
elif disable_existing:
logger.disabled = True
# And finally, do the root logger
root = config.get('root', None)
if root:
try:
self.configure_root(root)
except StandardError as e:
raise ValueError('Unable to configure root '
'logger: %s' % e)
finally:
logging._releaseLock()
def configure_formatter(self, config):
"""Configure a formatter from a dictionary."""
if '()' in config:
factory = config['()'] # for use in exception handler
try:
result = self.configure_custom(config)
except TypeError as te:
if "'format'" not in str(te):
raise
# Name of parameter changed from fmt to format.
# Retry with old name.
# This is so that code can be used with older Python versions
#(e.g. by Django)
config['fmt'] = config.pop('format')
config['()'] = factory
result = self.configure_custom(config)
else:
fmt = config.get('format', None)
dfmt = config.get('datefmt', None)
result = logging.Formatter(fmt, dfmt)
return result
def configure_filter(self, config):
"""Configure a filter from a dictionary."""
if '()' in config:
result = self.configure_custom(config)
else:
name = config.get('name', '')
result = logging.Filter(name)
return result
def add_filters(self, filterer, filters):
"""Add filters to a filterer from a list of names."""
for f in filters:
try:
filterer.addFilter(self.config['filters'][f])
except StandardError as e:
raise ValueError('Unable to add filter %r: %s' % (f, e))
def configure_handler(self, config):
"""Configure a handler from a dictionary."""
formatter = config.pop('formatter', None)
if formatter:
try:
formatter = self.config['formatters'][formatter]
except StandardError as e:
raise ValueError('Unable to set formatter '
'%r: %s' % (formatter, e))
level = config.pop('level', None)
filters = config.pop('filters', None)
if '()' in config:
c = config.pop('()')
if not hasattr(c, '__call__') and hasattr(types, 'ClassType') and type(c) != types.ClassType:
c = self.resolve(c)
factory = c
else:
klass = self.resolve(config.pop('class'))
# Special case for handler which refers to another handler
if issubclass(klass, logging.handlers.MemoryHandler) and\
'target' in config:
try:
config['target'] = self.config['handlers'][config['target']]
except StandardError as e:
raise ValueError('Unable to set target handler '
'%r: %s' % (config['target'], e))
elif issubclass(klass, logging.handlers.SMTPHandler) and\
'mailhost' in config:
config['mailhost'] = self.as_tuple(config['mailhost'])
elif issubclass(klass, logging.handlers.SysLogHandler) and\
'address' in config:
config['address'] = self.as_tuple(config['address'])
factory = klass
kwargs = dict((k, config[k]) for k in config if valid_ident(k))
try:
result = factory(**kwargs)
except TypeError as te:
if "'stream'" not in str(te):
raise
# The argument name changed from strm to stream
# Retry with old name.
# This is so that code can be used with older Python versions
#(e.g. by Django)
kwargs['strm'] = kwargs.pop('stream')
result = factory(**kwargs)
if formatter:
result.setFormatter(formatter)
if level is not None:
result.setLevel(_checkLevel(level))
if filters:
self.add_filters(result, filters)
return result
def add_handlers(self, logger, handlers):
"""Add handlers to a logger from a list of names."""
for h in handlers:
try:
logger.addHandler(self.config['handlers'][h])
except StandardError as e:
raise ValueError('Unable to add handler %r: %s' % (h, e))
def common_logger_config(self, logger, config, incremental=False):
"""
Perform configuration which is common to root and non-root loggers.
"""
level = config.get('level', None)
if level is not None:
logger.setLevel(_checkLevel(level))
if not incremental:
# Remove any existing handlers
for h in logger.handlers[:]:
logger.removeHandler(h)
handlers = config.get('handlers', None)
if handlers:
self.add_handlers(logger, handlers)
filters = config.get('filters', None)
if filters:
self.add_filters(logger, filters)
def configure_logger(self, name, config, incremental=False):
"""Configure a non-root logger from a dictionary."""
logger = logging.getLogger(name)
self.common_logger_config(logger, config, incremental)
propagate = config.get('propagate', None)
if propagate is not None:
logger.propagate = propagate
def configure_root(self, config, incremental=False):
"""Configure a root logger from a dictionary."""
root = logging.getLogger()
self.common_logger_config(root, config, incremental)
dictConfigClass = DictConfigurator
def dictConfig(config):
"""Configure logging using a dictionary."""
dictConfigClass(config).configure()

View File

@ -1,8 +1,11 @@
from __future__ import absolute_import
import cgi
import email.utils
import hashlib
import getpass
import json
import logging
import mimetypes
import os
import platform
@ -15,11 +18,11 @@ import pip
from pip.compat import urllib, urlparse
from pip.exceptions import InstallationError, HashMismatch
from pip.util import (splitext, rmtree, format_size, display_path,
backup_dir, ask_path_exists, unpack_file)
from pip.utils import (splitext, rmtree, format_size, display_path,
backup_dir, ask_path_exists, unpack_file)
from pip.utils.ui import DownloadProgressBar, DownloadProgressSpinner
from pip.locations import write_delete_marker_file
from pip.vcs import vcs
from pip.log import logger
from pip._vendor import requests, six
from pip._vendor.requests.adapters import BaseAdapter, HTTPAdapter
from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth
@ -40,6 +43,9 @@ __all__ = ['get_file_content',
'unpack_http_url', 'unpack_url']
logger = logging.getLogger(__name__)
def user_agent():
"""
Return a string representing the user agent.
@ -408,16 +414,16 @@ def is_file_url(link):
def _check_hash(download_hash, link):
if download_hash.digest_size != hashlib.new(link.hash_name).digest_size:
logger.fatal(
logger.critical(
"Hash digest size of the package %d (%s) doesn't match the "
"expected hash name %s!" %
(download_hash.digest_size, link, link.hash_name)
"expected hash name %s!",
download_hash.digest_size, link, link.hash_name,
)
raise HashMismatch('Hash name mismatch for package %s' % link)
if download_hash.hexdigest() != link.hash:
logger.fatal(
"Hash of the package %s (%s) doesn't match the expected hash %s!" %
(link, download_hash.hexdigest(), link.hash)
logger.critical(
"Hash of the package %s (%s) doesn't match the expected hash %s!",
link, download_hash.hexdigest(), link.hash,
)
raise HashMismatch(
'Bad %s hash for package %s' % (link.hash_name, link)
@ -428,8 +434,8 @@ def _get_hash_from_file(target_file, link):
try:
download_hash = hashlib.new(link.hash_name)
except (ValueError, TypeError):
logger.warn(
"Unsupported hash name %s for package %s" % (link.hash_name, link)
logger.warning(
"Unsupported hash name %s for package %s", link.hash_name, link,
)
return None
@ -450,34 +456,29 @@ def _download_url(resp, link, temp_location):
try:
download_hash = hashlib.new(link.hash_name)
except ValueError:
logger.warn(
"Unsupported hash name %s for package %s" %
(link.hash_name, link)
logger.warning(
"Unsupported hash name %s for package %s",
link.hash_name, link,
)
try:
total_length = int(resp.headers['content-length'])
except (ValueError, KeyError, TypeError):
total_length = 0
downloaded = 0
show_progress = total_length > 40 * 1000 or not total_length
cached_resp = getattr(resp, "from_cache", False)
if cached_resp:
show_progress = False
elif total_length > (40 * 1000):
show_progress = True
elif not total_length:
show_progress = True
else:
show_progress = False
show_url = link.show_url
try:
if show_progress:
# FIXME: the URL can get really long in this message:
if total_length:
logger.start_progress(
'Downloading %s (%s): ' %
(show_url, format_size(total_length))
)
else:
logger.start_progress(
'Downloading %s (unknown size): ' % show_url
)
else:
logger.notify('Downloading %s' % show_url)
logger.info('Downloading from URL %s' % link)
def resp_read(chunk_size):
try:
# Special case for urllib3.
@ -518,26 +519,32 @@ def _download_url(resp, link, temp_location):
break
yield chunk
for chunk in resp_read(4096):
downloaded += len(chunk)
if show_progress:
if not total_length:
logger.show_progress('%s' % format_size(downloaded))
else:
logger.show_progress(
'%3i%% %s' %
(
100 * downloaded / total_length,
format_size(downloaded)
)
)
progress_indicator = lambda x, *a, **k: x
if show_progress: # We don't show progress on cached responses
if total_length:
logger.info(
"Downloading %s (%s)", show_url, format_size(total_length),
)
progress_indicator = DownloadProgressBar(
max=total_length,
).iter
else:
logger.info("Downloading %s", show_url)
progress_indicator = DownloadProgressSpinner().iter
elif cached_resp:
logger.info("Using cached %s", show_url)
else:
logger.info("Downloading %s", show_url)
logger.debug('Downloading from URL %s', link)
for chunk in progress_indicator(resp_read(4096), 4096):
if download_hash is not None:
download_hash.update(chunk)
fp.write(chunk)
fp.close()
finally:
if show_progress:
logger.end_progress('%s downloaded' % format_size(downloaded))
if link.hash and link.hash_name:
_check_hash(download_hash, link)
return download_hash
@ -553,18 +560,19 @@ def _copy_file(filename, location, content_type, link):
if response == 'i':
copy = False
elif response == 'w':
logger.warn('Deleting %s' % display_path(download_location))
logger.warning('Deleting %s', display_path(download_location))
os.remove(download_location)
elif response == 'b':
dest_file = backup_dir(download_location)
logger.warn(
'Backing up %s to %s' %
(display_path(download_location), display_path(dest_file))
logger.warning(
'Backing up %s to %s',
display_path(download_location),
display_path(dest_file),
)
shutil.move(download_location, dest_file)
if copy:
shutil.copy(filename, download_location)
logger.notify('Saved %s' % display_path(download_location))
logger.info('Saved %s', display_path(download_location))
def unpack_http_url(link, location, download_dir=None, session=None):
@ -613,7 +621,7 @@ def unpack_file_url(link, location, download_dir=None):
rmtree(location)
shutil.copytree(link_path, location, symlinks=True)
if download_dir:
logger.notify('Link is a directory, ignoring download_dir')
logger.info('Link is a directory, ignoring download_dir')
return
# if link has a hash, let's confirm it matches
@ -663,8 +671,10 @@ class PipXmlrpcTransport(xmlrpc_client.Transport):
self.verbose = verbose
return self.parse_response(response.raw)
except requests.HTTPError as exc:
logger.fatal("HTTP error {0} while getting {1}".format(
exc.response.status_code, url))
logger.critical(
"HTTP error %s while getting %s",
exc.response.status_code, url,
)
raise
@ -734,8 +744,9 @@ def _download_http_url(link, session, temp_dir):
)
resp.raise_for_status()
except requests.HTTPError as exc:
logger.fatal("HTTP error %s while getting %s" %
(exc.response.status_code, link))
logger.critical(
"HTTP error %s while getting %s", exc.response.status_code, link,
)
raise
content_type = resp.headers.get('content-type', '')
@ -768,15 +779,16 @@ def _check_download_dir(link, download_dir):
download_path = os.path.join(download_dir, link.filename)
if os.path.exists(download_path):
# If already downloaded, does its hash match?
logger.notify('File was already downloaded %s' % download_path)
logger.info('File was already downloaded %s', download_path)
if link.hash:
download_hash = _get_hash_from_file(download_path, link)
try:
_check_hash(download_hash, link)
except HashMismatch:
logger.warn(
logger.warning(
'Previously-downloaded file %s has bad hash, '
're-downloading.' % download_path
're-downloading.',
download_path
)
os.unlink(download_path)
return None

View File

@ -1,4 +1,5 @@
"""Exceptions used throughout package"""
from __future__ import absolute_import
class PipError(Exception):

View File

@ -1,13 +1,17 @@
"""Routines related to PyPI, indexes"""
from __future__ import absolute_import
import logging
import sys
import os
import re
import mimetypes
import posixpath
import warnings
from pip.log import logger
from pip.util import Inf, normalize_name, splitext, is_prerelease
from pip.utils import Inf, normalize_name, splitext, is_prerelease
from pip.utils.deprecation import RemovedInPip17Warning
from pip.utils.logging import indent_log
from pip.exceptions import (
DistributionNotFound, BestVersionAlreadyInstalled, InvalidWheelFilename,
UnsupportedWheel,
@ -29,6 +33,9 @@ INSECURE_SCHEMES = {
}
logger = logging.getLogger(__name__)
class PackageFinder(object):
"""This finds packages.
@ -82,7 +89,6 @@ class PackageFinder(object):
# Do we process dependency links?
self.process_dependency_links = process_dependency_links
self._have_warned_dependency_links = False
# The Session we'll use to make requests
self.session = session
@ -93,13 +99,11 @@ class PackageFinder(object):
# # dependency_links value
# # FIXME: also, we should track comes_from (i.e., use Link)
if self.process_dependency_links:
if not self._have_warned_dependency_links:
logger.deprecated(
"1.6",
"Dependency Links processing has been deprecated with an "
"accelerated time schedule and will be removed in pip 1.6",
)
self._have_warned_dependency_links = True
warnings.warn(
"Dependency Links processing has been deprecated and will be "
"removed in a future release.",
RemovedInPip17Warning,
)
self.dependency_links.extend(links)
def _sort_locations(self, locations):
@ -268,9 +272,9 @@ class PackageFinder(object):
# We explicitly do not trust links that came from dependency_links
locations.extend([Link(url) for url in _ulocations])
logger.debug('URLs to search for versions for %s:' % req)
logger.debug('URLs to search for versions for %s:', req)
for location in locations:
logger.debug('* %s' % location)
logger.debug('* %s', location)
self._warn_about_insecure_transport_scheme(logger, location)
found_versions = []
@ -283,19 +287,16 @@ class PackageFinder(object):
)
page_versions = []
for page in self._get_pages(locations, req):
logger.debug('Analyzing links from page %s' % page.url)
logger.indent += 2
try:
logger.debug('Analyzing links from page %s', page.url)
with indent_log():
page_versions.extend(
self._package_versions(page.links, req.name.lower())
)
finally:
logger.indent -= 2
dependency_versions = list(self._package_versions(
[Link(url) for url in self.dependency_links], req.name.lower()))
if dependency_versions:
logger.info(
'dependency_links found: %s' %
logger.debug(
'dependency_links found: %s',
', '.join([
link.url for p, link, version in dependency_versions
])
@ -310,22 +311,25 @@ class PackageFinder(object):
and not page_versions
and not dependency_versions
and not file_versions):
logger.fatal(
'Could not find any downloads that satisfy the requirement'
' %s' % req
logger.critical(
'Could not find any downloads that satisfy the requirement %s',
req,
)
if self.need_warn_external:
logger.warn(
logger.warning(
"Some externally hosted files were ignored as access to "
"them may be unreliable (use --allow-external %s to "
"allow)." % req.name
"allow).",
req.name,
)
if self.need_warn_unverified:
logger.warn("Some insecure and unverifiable files were ignored"
" (use --allow-unverified %s to allow)." %
req.name)
logger.warning(
"Some insecure and unverifiable files were ignored"
" (use --allow-unverified %s to allow).",
req.name,
)
raise DistributionNotFound(
'No distributions at all found for %s' % req
@ -339,8 +343,8 @@ class PackageFinder(object):
)]
if file_versions:
file_versions.sort(reverse=True)
logger.info(
'Local files found: %s' %
logger.debug(
'Local files found: %s',
', '.join([
url_to_path(link.url)
for _, link, _ in file_versions
@ -352,13 +356,11 @@ class PackageFinder(object):
applicable_versions = []
for (parsed_version, link, version) in all_versions:
if version not in req.req:
logger.info(
"Ignoring link %s, version %s doesn't match %s" %
(
link,
version,
','.join([''.join(s) for s in req.req.specs])
)
logger.debug(
"Ignoring link %s, version %s doesn't match %s",
link,
version,
','.join([''.join(s) for s in req.req.specs]),
)
continue
elif (is_prerelease(version)
@ -366,9 +368,11 @@ class PackageFinder(object):
# If this version isn't the already installed one, then
# ignore it if it's a pre-release.
if link is not INSTALLED_VERSION:
logger.info(
logger.debug(
"Ignoring link %s, version %s is a pre-release (use "
"--pre to allow)." % (link, version)
"--pre to allow).",
link,
version,
)
continue
applicable_versions.append((parsed_version, link, version))
@ -380,80 +384,81 @@ class PackageFinder(object):
])
if not upgrade and existing_applicable:
if applicable_versions[0][1] is INSTALLED_VERSION:
logger.info(
logger.debug(
'Existing installed version (%s) is most up-to-date and '
'satisfies requirement' % req.satisfied_by.version
'satisfies requirement',
req.satisfied_by.version,
)
else:
logger.info(
logger.debug(
'Existing installed version (%s) satisfies requirement '
'(most up-to-date version is %s)' %
(req.satisfied_by.version, applicable_versions[0][2])
'(most up-to-date version is %s)',
req.satisfied_by.version,
applicable_versions[0][2],
)
return None
if not applicable_versions:
logger.fatal(
logger.critical(
'Could not find a version that satisfies the requirement %s '
'(from versions: %s)' %
(
req,
', '.join(
sorted(set([
version
for parsed_version, link, version in all_versions
])))
)
'(from versions: %s)',
req,
', '.join(
sorted(set([
version
for parsed_version, link, version in all_versions
]))),
)
if self.need_warn_external:
logger.warn(
logger.warning(
"Some externally hosted files were ignored as access to "
"them may be unreliable (use --allow-external to allow)."
)
if self.need_warn_unverified:
logger.warn("Some insecure and unverifiable files were ignored"
" (use --allow-unverified %s to allow)." %
req.name)
logger.warning(
"Some insecure and unverifiable files were ignored"
" (use --allow-unverified %s to allow).",
req.name,
)
raise DistributionNotFound(
'No distributions matching the version for %s' % req
)
if applicable_versions[0][1] is INSTALLED_VERSION:
# We have an existing version, and its the best version
logger.info(
logger.debug(
'Installed version (%s) is most up-to-date (past versions: '
'%s)' % (
req.satisfied_by.version,
', '.join([
version for parsed_version, link, version
in applicable_versions[1:]
]) or 'none'))
'%s)',
req.satisfied_by.version,
', '.join([
version for parsed_version, link, version
in applicable_versions[1:]
]) or 'none'),
raise BestVersionAlreadyInstalled
if len(applicable_versions) > 1:
logger.info(
'Using version %s (newest of versions: %s)' %
(
applicable_versions[0][2],
', '.join([
version for parsed_version, link, version
in applicable_versions
])
)
logger.debug(
'Using version %s (newest of versions: %s)',
applicable_versions[0][2],
', '.join([
version for parsed_version, link, version
in applicable_versions
])
)
selected_version = applicable_versions[0][1]
if (selected_version.verifiable is not None
and not selected_version.verifiable):
logger.warn("%s is potentially insecure and "
"unverifiable." % req.name)
logger.warning(
"%s is potentially insecure and unverifiable.", req.name,
)
if selected_version._deprecated_regex:
logger.deprecated(
"1.7",
"%s discovered using a deprecated method of parsing, "
"in the future it will no longer be discovered" % req.name
warnings.warn(
"%s discovered using a deprecated method of parsing, in the "
"future it will no longer be discovered." % req.name,
RemovedInPip17Warning,
)
return selected_version
@ -470,14 +475,14 @@ class PackageFinder(object):
index_url.url += '/'
page = self._get_page(index_url, req)
if page is None:
logger.fatal('Cannot fetch index base URL %s' % index_url)
logger.critical('Cannot fetch index base URL %s', index_url)
return
norm_name = normalize_name(req.url_name)
for link in page.links:
base = posixpath.basename(link.path.rstrip('/'))
if norm_name == normalize_name(base):
logger.notify(
'Real name of requirement %s is %s' % (url_name, base)
logger.debug(
'Real name of requirement %s is %s', url_name, base,
)
return base
return None
@ -508,8 +513,11 @@ class PackageFinder(object):
if (normalized not in self.allow_external
and not self.allow_all_external):
self.need_warn_external = True
logger.debug("Not searching %s for files because external "
"urls are disallowed." % link)
logger.debug(
"Not searching %s for files because external "
"urls are disallowed.",
link,
)
continue
if (link.trusted is not None
@ -518,7 +526,8 @@ class PackageFinder(object):
logger.debug(
"Not searching %s for urls, it is an "
"untrusted link and cannot produce safe or "
"verifiable files." % link
"verifiable files.",
link,
)
self.need_warn_unverified = True
continue
@ -573,7 +582,7 @@ class PackageFinder(object):
egg_info, ext = link.splitext()
if not ext:
if link not in self.logged_links:
logger.debug('Skipping link %s; not a file' % link)
logger.debug('Skipping link %s; not a file', link)
self.logged_links.add(link)
return []
if egg_info.endswith('.tar'):
@ -583,14 +592,15 @@ class PackageFinder(object):
if ext not in self._known_extensions():
if link not in self.logged_links:
logger.debug(
'Skipping link %s; unknown archive format: %s' %
(link, ext)
'Skipping link %s; unknown archive format: %s',
link,
ext,
)
self.logged_links.add(link)
return []
if "macosx10" in link.path and ext == '.zip':
if link not in self.logged_links:
logger.debug('Skipping link %s; macosx10 one' % (link))
logger.debug('Skipping link %s; macosx10 one', link)
self.logged_links.add(link)
return []
if ext == wheel_ext:
@ -598,20 +608,22 @@ class PackageFinder(object):
wheel = Wheel(link.filename)
except InvalidWheelFilename:
logger.debug(
'Skipping %s because the wheel filename is invalid' %
'Skipping %s because the wheel filename is invalid',
link
)
return []
if wheel.name.lower() != search_name.lower():
logger.debug(
'Skipping link %s; wrong project name (not %s)' %
(link, search_name)
'Skipping link %s; wrong project name (not %s)',
link,
search_name,
)
return []
if not wheel.supported():
logger.debug(
'Skipping %s because it is not compatible with this '
'Python' % link
'Python',
link,
)
return []
# This is a dirty hack to prevent installing Binary Wheels from
@ -634,7 +646,8 @@ class PackageFinder(object):
if not wheel.supported(tags=supported_tags_noarch):
logger.debug(
"Skipping %s because it is a pypi-hosted binary "
"Wheel on an unsupported platform" % link
"Wheel on an unsupported platform",
link,
)
return []
version = wheel.version
@ -643,8 +656,9 @@ class PackageFinder(object):
version = self._egg_info_matches(egg_info, search_name, link)
if version is None:
logger.debug(
'Skipping link %s; wrong project name (not %s)' %
(link, search_name)
'Skipping link %s; wrong project name (not %s)',
link,
search_name,
)
return []
@ -655,7 +669,7 @@ class PackageFinder(object):
and not self.allow_all_external):
# We have a link that we are sure is external, so we should skip
# it unless we are allowing externals
logger.debug("Skipping %s because it is externally hosted." % link)
logger.debug("Skipping %s because it is externally hosted.", link)
self.need_warn_external = True
return []
@ -666,8 +680,10 @@ class PackageFinder(object):
# We have a link that we are sure we cannot verify its integrity,
# so we should skip it unless we are allowing unsafe installs
# for this requirement.
logger.debug("Skipping %s because it is an insecure and "
"unverifiable file." % link)
logger.debug(
"Skipping %s because it is an insecure and unverifiable file.",
link,
)
self.need_warn_unverified = True
return []
@ -677,10 +693,10 @@ class PackageFinder(object):
py_version = match.group(1)
if py_version != sys.version[:3]:
logger.debug(
'Skipping %s because Python version is incorrect' % link
'Skipping %s because Python version is incorrect', link
)
return []
logger.debug('Found link %s, version: %s' % (link, version))
logger.debug('Found link %s, version: %s', link, version)
return [(
pkg_resources.parse_version(version),
link,
@ -690,7 +706,7 @@ class PackageFinder(object):
def _egg_info_matches(self, egg_info, search_name, link):
match = self._egg_info_re.search(egg_info)
if not match:
logger.debug('Could not parse version from link: %s' % link)
logger.debug('Could not parse version from link: %s', link)
return None
name = match.group(0).lower()
# To match the "safe" name that pkg_resources creates:
@ -741,9 +757,7 @@ class HTMLPage(object):
from pip.vcs import VcsSupport
for scheme in VcsSupport.schemes:
if url.lower().startswith(scheme) and url[len(scheme)] in '+:':
logger.debug(
'Cannot look at %(scheme)s URL %(link)s' % locals()
)
logger.debug('Cannot look at %s URL %s', scheme, link)
return None
try:
@ -758,12 +772,13 @@ class HTMLPage(object):
break
else:
logger.debug(
'Skipping page %s because of Content-Type: '
'%s' % (link, content_type)
'Skipping page %s because of Content-Type: %s',
link,
content_type,
)
return
logger.debug('Getting page %s' % url)
logger.debug('Getting page %s', url)
# Tack index.html onto file:// URLs that point to directories
(scheme, netloc, path, params, query, fragment) = \
@ -774,7 +789,7 @@ class HTMLPage(object):
if not url.endswith('/'):
url += '/'
url = urlparse.urljoin(url, 'index.html')
logger.debug(' file: URL is directory, getting %s' % url)
logger.debug(' file: URL is directory, getting %s', url)
resp = session.get(
url,
@ -793,8 +808,9 @@ class HTMLPage(object):
content_type = resp.headers.get('Content-Type', 'unknown')
if not content_type.lower().startswith("text/html"):
logger.debug(
'Skipping page %s because of Content-Type: %s' %
(link, content_type)
'Skipping page %s because of Content-Type: %s',
link,
content_type,
)
return
@ -814,7 +830,7 @@ class HTMLPage(object):
cls._handle_fail(
req, link, reason, url,
level=2,
meth=logger.notify,
meth=logger.info,
)
else:
return inst
@ -822,7 +838,7 @@ class HTMLPage(object):
@staticmethod
def _handle_fail(req, link, reason, url, level=1, meth=None):
if meth is None:
meth = logger.info
meth = logger.debug
meth("Could not fetch URL %s: %s", link, reason)
meth("Will skip URL %s when looking for download links for %s" %

View File

@ -1,4 +1,5 @@
"""Locations where we look for configs, install stuff, etc"""
from __future__ import absolute_import
import getpass
import os
@ -184,7 +185,6 @@ if WINDOWS:
default_storage_dir,
default_config_basename,
)
default_log_file = os.path.join(default_storage_dir, 'pip.log')
else:
bin_py = os.path.join(sys.prefix, 'bin')
bin_user = os.path.join(user_site, 'bin')
@ -194,13 +194,11 @@ else:
default_storage_dir,
default_config_basename,
)
default_log_file = os.path.join(default_storage_dir, 'pip.log')
# Forcing to use /usr/local/bin for standard Mac OS X framework installs
# Also log to ~/Library/Logs/ for use with the Console.app log viewer
if sys.platform[:6] == 'darwin' and sys.prefix[:16] == '/System/Library/':
bin_py = '/usr/local/bin'
default_log_file = os.path.join(user_dir, 'Library/Logs/pip.log')
site_config_files = [
os.path.join(path, default_config_basename)

View File

@ -1,297 +0,0 @@
"""Logging
"""
import sys
import os
import logging
from pip._vendor import colorama, pkg_resources
from pip.compat import WINDOWS
def _color_wrap(*colors):
def wrapped(inp):
return "".join(list(colors) + [inp, colorama.Style.RESET_ALL])
return wrapped
def should_color(consumer, environ, std=(sys.stdout, sys.stderr)):
real_consumer = (
consumer if not isinstance(consumer, colorama.AnsiToWin32)
else consumer.wrapped
)
# If consumer isn't stdout or stderr we shouldn't colorize it
if real_consumer not in std:
return False
# If consumer is a tty we should color it
if hasattr(real_consumer, "isatty") and real_consumer.isatty():
return True
# If we have an ASNI term we should color it
if environ.get("TERM") == "ANSI":
return True
# If anything else we should not color it
return False
def should_warn(current_version, removal_version):
# Our Significant digits on versions is 2, so remove everything but the
# first two places.
current_version = ".".join(current_version.split(".")[:2])
removal_version = ".".join(removal_version.split(".")[:2])
# Our warning threshold is one minor version before removal, so we
# decrement the minor version by one
major, minor = removal_version.split(".")
minor = str(int(minor) - 1)
warn_version = ".".join([major, minor])
# Test if our current_version should be a warn
return (pkg_resources.parse_version(current_version)
< pkg_resources.parse_version(warn_version))
def sanitize(write_content, encoding=sys.getdefaultencoding()):
# Important for certain values of LANG
if sys.version_info[0] == 2:
return write_content
write_content = write_content.encode(encoding, 'replace')
return write_content.decode(encoding)
class Logger(object):
"""
Logging object for use in command-line script. Allows ranges of
levels, to avoid some redundancy of displayed information.
"""
VERBOSE_DEBUG = logging.DEBUG - 1
DEBUG = logging.DEBUG
INFO = logging.INFO
NOTIFY = (logging.INFO + logging.WARN) / 2
WARN = WARNING = logging.WARN
ERROR = logging.ERROR
FATAL = logging.FATAL
LEVELS = [VERBOSE_DEBUG, DEBUG, INFO, NOTIFY, WARN, ERROR, FATAL]
COLORS = {
WARN: _color_wrap(colorama.Fore.YELLOW),
ERROR: _color_wrap(colorama.Fore.RED),
FATAL: _color_wrap(colorama.Fore.RED),
}
def __init__(self):
self.consumers = []
self.indent = 0
self.explicit_levels = False
self.in_progress = None
self.in_progress_hanging = False
def add_consumers(self, *consumers):
for level, consumer in consumers:
# Try to check for duplicate consumers before adding them
for chk_level, chk_consumer in self.consumers:
# Account for coloroma wrapped streams
if isinstance(chk_consumer, colorama.AnsiToWin32):
chk_consumer = chk_consumer.wrapped
if (level, consumer) == (chk_level, chk_consumer):
break
# If we didn't find a duplicate, then add it
else:
# Colorize consumer for Windows
if WINDOWS and hasattr(consumer, 'write'):
consumer = colorama.AnsiToWin32(consumer)
self.consumers.append((level, consumer))
def debug(self, msg, *args, **kw):
self.log(self.DEBUG, msg, *args, **kw)
def info(self, msg, *args, **kw):
self.log(self.INFO, msg, *args, **kw)
def notify(self, msg, *args, **kw):
self.log(self.NOTIFY, msg, *args, **kw)
def warn(self, msg, *args, **kw):
self.log(self.WARN, msg, *args, **kw)
def error(self, msg, *args, **kw):
self.log(self.ERROR, msg, *args, **kw)
def fatal(self, msg, *args, **kw):
self.log(self.FATAL, msg, *args, **kw)
def deprecated(self, removal_version, msg, *args, **kwargs):
"""
Logs deprecation message which is log level WARN if the
``removal_version`` is > 1 minor release away and log level ERROR
otherwise.
removal_version should be the version that the deprecated feature is
expected to be removed in, so something that will not exist in
version 1.7, but will in 1.6 would have a removal_version of 1.7.
"""
from pip import __version__
if should_warn(__version__, removal_version):
self.warn(msg, *args, **kwargs)
else:
self.error(msg, *args, **kwargs)
def log(self, level, msg, *args, **kw):
if args:
if kw:
raise TypeError(
"You may give positional or keyword arguments, not both")
args = args or kw
# render
if args:
rendered = msg % args
else:
rendered = msg
rendered = ' ' * self.indent + rendered
if self.explicit_levels:
# FIXME: should this be a name, not a level number?
rendered = '%02i %s' % (level, rendered)
for consumer_level, consumer in self.consumers:
if self.level_matches(level, consumer_level):
if (self.in_progress_hanging
and consumer in (sys.stdout, sys.stderr)):
self.in_progress_hanging = False
sys.stdout.write('\n')
sys.stdout.flush()
if hasattr(consumer, 'write'):
write_content = rendered + '\n'
if should_color(consumer, os.environ):
# We are printing to stdout or stderr and it supports
# colors so render our text colored
colorizer = self.COLORS.get(level, lambda x: x)
write_content = colorizer(write_content)
consumer.write(sanitize(write_content))
if hasattr(consumer, 'flush'):
consumer.flush()
else:
consumer(rendered)
def _show_progress(self):
"""Should we display download progress?"""
return (self.stdout_level_matches(self.NOTIFY) and sys.stdout.isatty())
def start_progress(self, msg):
assert not self.in_progress, (
"Tried to start_progress(%r) while in_progress %r"
% (msg, self.in_progress))
if self._show_progress():
sys.stdout.write(' ' * self.indent + msg)
sys.stdout.flush()
self.in_progress_hanging = True
else:
self.in_progress_hanging = False
self.in_progress = msg
self.last_message = None
def end_progress(self, msg='done.'):
assert self.in_progress, (
"Tried to end_progress without start_progress")
if self._show_progress():
if not self.in_progress_hanging:
# Some message has been printed out since start_progress
sys.stdout.write('...' + self.in_progress + msg + '\n')
sys.stdout.flush()
else:
# These erase any messages shown with show_progress
# (besides .'s)
logger.show_progress('')
logger.show_progress('')
sys.stdout.write(msg + '\n')
sys.stdout.flush()
self.in_progress = None
self.in_progress_hanging = False
def show_progress(self, message=None):
"""If we are in a progress scope, and no log messages have been
shown, write out another '.'"""
if self.in_progress_hanging:
if message is None:
sys.stdout.write('.')
sys.stdout.flush()
else:
if self.last_message:
padding = ' ' * max(
0,
len(self.last_message) - len(message)
)
else:
padding = ''
sys.stdout.write(
'\r%s%s%s%s' %
(' ' * self.indent, self.in_progress, message, padding)
)
sys.stdout.flush()
self.last_message = message
def stdout_level_matches(self, level):
"""Returns true if a message at this level will go to stdout"""
return self.level_matches(level, self._stdout_level())
def _stdout_level(self):
"""Returns the level that stdout runs at"""
for level, consumer in self.consumers:
if consumer is sys.stdout:
return level
return self.FATAL
def level_matches(self, level, consumer_level):
"""
>>> l = Logger()
>>> l.level_matches(3, 4)
False
>>> l.level_matches(3, 2)
True
>>> l.level_matches(slice(None, 3), 3)
False
>>> l.level_matches(slice(None, 3), 2)
True
>>> l.level_matches(slice(1, 3), 1)
True
>>> l.level_matches(slice(2, 3), 1)
False
"""
if isinstance(level, slice):
start, stop = level.start, level.stop
if start is not None and start > consumer_level:
return False
if stop is not None or stop <= consumer_level:
return False
return True
else:
return level >= consumer_level
@classmethod
def level_for_integer(cls, level):
levels = cls.LEVELS
if level < 0:
return levels[0]
if level >= len(levels):
return levels[-1]
return levels[level]
def move_stdout_to_stderr(self):
to_remove = []
to_add = []
for consumer_level, consumer in self.consumers:
if consumer == sys.stdout:
to_remove.append((consumer_level, consumer))
to_add.append((consumer_level, sys.stderr))
for item in to_remove:
self.consumers.remove(item)
self.consumers.extend(to_add)
logger = Logger()

View File

@ -1,4 +1,5 @@
"""Generate and work with PEP 425 Compatibility Tags."""
from __future__ import absolute_import
import re
import sys

View File

@ -1,3 +1,4 @@
from __future__ import absolute_import
from .req_install import InstallRequirement
from .req_set import RequirementSet, Requirements

View File

@ -1,10 +1,12 @@
from __future__ import absolute_import
import os
import re
from pip.compat import urlparse
from pip.download import get_file_content
from pip.req.req_install import InstallRequirement
from pip.util import normalize_name
from pip.utils import normalize_name
_scheme_re = re.compile(r'^(http|https|file):', re.I)

View File

@ -1,14 +1,19 @@
from __future__ import absolute_import
import logging
import os
import re
import shutil
import sys
import tempfile
import zipfile
from distutils.util import change_root
from distutils import sysconfig
from email.parser import FeedParser
import pip.wheel
from pip._vendor import pkg_resources, six
from pip._vendor.six.moves import configparser
from pip.compat import urllib, native_str, WINDOWS
@ -20,17 +25,20 @@ from pip.index import Link
from pip.locations import (
bin_py, running_under_virtualenv, PIP_DELETE_MARKER_FILENAME, bin_user,
)
from pip.log import logger
from pip.util import (
from pip.utils import (
display_path, rmtree, ask_path_exists, backup_dir, is_installable_dir,
dist_in_usersite, dist_in_site_packages, egg_link_path, make_path_relative,
call_subprocess, is_prerelease, read_text_file, FakeFile, _make_build_dir,
)
from pip.utils.logging import indent_log
from pip.req.req_uninstall import UninstallPathSet
from pip.vcs import vcs
from pip.wheel import move_wheel_files, Wheel, wheel_ext
logger = logging.getLogger(__name__)
class InstallRequirement(object):
def __init__(self, req, comes_from, source_dir=None, editable=False,
@ -122,7 +130,7 @@ class InstallRequirement(object):
link = Link(path_to_url(name))
elif is_archive_file(path):
if not os.path.isfile(path):
logger.warn(
logger.warning(
'Requirement %r looks like a filename, but the file does '
'not exist',
name
@ -221,15 +229,15 @@ class InstallRequirement(object):
name = self.name
new_location = os.path.join(new_build_dir, name)
if not os.path.exists(new_build_dir):
logger.debug('Creating directory %s' % new_build_dir)
logger.debug('Creating directory %s', new_build_dir)
_make_build_dir(new_build_dir)
if os.path.exists(new_location):
raise InstallationError(
'A package already exists in %s; please remove it to continue'
% display_path(new_location))
logger.debug(
'Moving package %s from %s to new location %s' %
(self, display_path(old_location), display_path(new_location))
'Moving package %s from %s to new location %s',
self, display_path(old_location), display_path(new_location),
)
shutil.move(old_location, new_location)
self._temp_build_dir = new_location
@ -278,18 +286,17 @@ class InstallRequirement(object):
def run_egg_info(self):
assert self.source_dir
if self.name:
logger.notify(
'Running setup.py (path:%s) egg_info for package %s' %
(self.setup_py, self.name)
logger.info(
'Running setup.py (path:%s) egg_info for package %s',
self.setup_py, self.name,
)
else:
logger.notify(
'Running setup.py (path:%s) egg_info for package from %s' %
(self.setup_py, self.url)
logger.info(
'Running setup.py (path:%s) egg_info for package from %s',
self.setup_py, self.url,
)
logger.indent += 2
try:
with indent_log():
# if it's distribute>=0.7, it won't contain an importable
# setuptools, and having an egg-info dir blocks the ability of
# setup.py to find setuptools plugins, so delete the egg-info dir
@ -325,10 +332,9 @@ class InstallRequirement(object):
cwd=cwd,
filter_stdout=self._filter_install,
show_stdout=False,
command_level=logger.VERBOSE_DEBUG,
command_level=logging.DEBUG,
command_desc='python setup.py egg_info')
finally:
logger.indent -= 2
if not self.req:
self.req = pkg_resources.Requirement.parse(
"%(Name)s==%(Version)s" % self.pkg_info())
@ -441,9 +447,9 @@ exec(compile(
p = FeedParser()
data = self.egg_info_data('PKG-INFO')
if not data:
logger.warn(
'No PKG-INFO file found in %s' %
display_path(self.egg_info_path('PKG-INFO'))
logger.warning(
'No PKG-INFO file found in %s',
display_path(self.egg_info_path('PKG-INFO')),
)
p.feed(data or '')
return p.close()
@ -466,7 +472,7 @@ exec(compile(
in_extra = match.group(1)
continue
if in_extra and in_extra not in extras:
logger.debug('skipping extra %s' % in_extra)
logger.debug('skipping extra %s', in_extra)
# Skip requirement for an extra we aren't requiring
continue
yield line
@ -485,21 +491,25 @@ exec(compile(
assert self.source_dir
version = self.installed_version
if version not in self.req:
logger.warn(
'Requested %s, but installing version %s' %
(self, self.installed_version)
logger.warning(
'Requested %s, but installing version %s',
self,
self.installed_version,
)
else:
logger.debug(
'Source in %s has version %s, which satisfies requirement %s' %
(display_path(self.source_dir), version, self)
'Source in %s has version %s, which satisfies requirement %s',
display_path(self.source_dir),
version,
self,
)
def update_editable(self, obtain=True):
if not self.url:
logger.info(
logger.debug(
"Cannot update repository at %s; repository location is "
"unknown" % self.source_dir
"unknown",
self.source_dir,
)
return
assert self.editable
@ -655,15 +665,17 @@ exec(compile(
if self.uninstalled:
self.uninstalled.rollback()
else:
logger.error("Can't rollback %s, nothing uninstalled."
% (self.project_name,))
logger.error(
"Can't rollback %s, nothing uninstalled.", self.project_name,
)
def commit_uninstall(self):
if self.uninstalled:
self.uninstalled.commit()
else:
logger.error("Can't commit %s, nothing uninstalled."
% (self.project_name,))
logger.error(
"Can't commit %s, nothing uninstalled.", self.project_name,
)
def archive(self, build_dir):
assert self.source_dir
@ -677,13 +689,14 @@ exec(compile(
if response == 'i':
create_archive = False
elif response == 'w':
logger.warn('Deleting %s' % display_path(archive_path))
logger.warning('Deleting %s', display_path(archive_path))
os.remove(archive_path)
elif response == 'b':
dest_file = backup_dir(archive_path)
logger.warn(
'Backing up %s to %s' %
(display_path(archive_path), display_path(dest_file))
logger.warning(
'Backing up %s to %s',
display_path(archive_path),
display_path(dest_file),
)
shutil.move(archive_path, dest_file)
if create_archive:
@ -708,8 +721,7 @@ exec(compile(
name = self._clean_zip_name(filename, dir)
zip.write(filename, self.name + '/' + name)
zip.close()
logger.indent -= 2
logger.notify('Saved %s' % display_path(archive_path))
logger.info('Saved %s', display_path(archive_path))
def _clean_zip_name(self, name, prefix):
assert name.startswith(prefix + os.path.sep), (
@ -763,19 +775,17 @@ exec(compile(
install_args += ['--install-headers',
os.path.join(sys.prefix, 'include', 'site',
py_ver_str)]
logger.notify('Running setup.py install for %s' % self.name)
logger.indent += 2
try:
logger.info('Running setup.py install for %s', self.name)
with indent_log():
call_subprocess(
install_args + install_options,
cwd=self.source_dir,
filter_stdout=self._filter_install,
show_stdout=False,
)
finally:
logger.indent -= 2
if not os.path.exists(record_filename):
logger.notify('Record file %s not found' % record_filename)
logger.debug('Record file %s not found', record_filename)
return
self.install_succeeded = True
if self.as_egg:
@ -796,9 +806,10 @@ exec(compile(
egg_info_dir = prepend_root(line)
break
else:
logger.warn(
logger.warning(
'Could not find .egg-info directory in install record for '
'%s' % self
'%s',
self,
)
f.close()
# FIXME: put the record somewhere
@ -827,7 +838,7 @@ exec(compile(
"""Remove the source files from this requirement, if they are marked
for deletion"""
if os.path.exists(self.delete_marker_filename):
logger.info('Removing source in %s' % self.source_dir)
logger.debug('Removing source in %s', self.source_dir)
if self.source_dir:
rmtree(self.source_dir)
self.source_dir = None
@ -836,9 +847,8 @@ exec(compile(
self._temp_build_dir = None
def install_editable(self, install_options, global_options=()):
logger.notify('Running setup.py develop for %s' % self.name)
logger.indent += 2
try:
logger.info('Running setup.py develop for %s', self.name)
with indent_log():
# FIXME: should we do --install-headers here too?
cwd = self.source_dir
if self.editable_options and \
@ -858,12 +868,11 @@ exec(compile(
cwd=cwd, filter_stdout=self._filter_install,
show_stdout=False)
finally:
logger.indent -= 2
self.install_succeeded = True
def _filter_install(self, line):
level = logger.NOTIFY
level = logging.INFO
for regex in [
r'^running .*',
r'^writing .*',
@ -877,7 +886,7 @@ exec(compile(
# Not sure what this warning is, but it seems harmless:
r"^warning: manifest_maker: standard file '-c' not found$"]:
if re.search(regex, line.strip()):
level = logger.INFO
level = logging.DEBUG
break
return (level, line)

View File

@ -1,3 +1,6 @@
from __future__ import absolute_import
import logging
import os
import shutil
@ -8,14 +11,17 @@ from pip.exceptions import (InstallationError, BestVersionAlreadyInstalled,
DistributionNotFound, PreviousBuildDirError)
from pip.index import Link
from pip.locations import (PIP_DELETE_MARKER_FILENAME, build_prefix)
from pip.log import logger
from pip.req.req_install import InstallRequirement
from pip.util import (display_path, rmtree, dist_in_usersite, call_subprocess,
_make_build_dir, normalize_path)
from pip.utils import (display_path, rmtree, dist_in_usersite, call_subprocess,
_make_build_dir, normalize_path)
from pip.utils.logging import indent_log
from pip.vcs import vcs
from pip.wheel import wheel_ext
logger = logging.getLogger(__name__)
class Requirements(object):
def __init__(self):
@ -130,7 +136,7 @@ class RequirementSet(object):
if os.path.exists(self.download_dir):
return True
else:
logger.fatal('Could not find download directory')
logger.critical('Could not find download directory')
raise InstallationError(
"Could not find or access download directory '%s'"
% display_path(self.download_dir))
@ -176,9 +182,11 @@ class RequirementSet(object):
else:
install_needed = False
if req_to_install.satisfied_by:
logger.notify('Requirement already satisfied '
'(use --upgrade to upgrade): %s'
% req_to_install)
logger.info(
'Requirement already satisfied (use --upgrade to '
'upgrade): %s',
req_to_install,
)
if req_to_install.editable:
if req_to_install.source_dir is None:
@ -250,30 +258,33 @@ class RequirementSet(object):
install = False
if req_to_install.satisfied_by:
if best_installed:
logger.notify('Requirement already up-to-date: %s'
% req_to_install)
logger.info(
'Requirement already up-to-date: %s',
req_to_install,
)
else:
logger.notify('Requirement already satisfied '
'(use --upgrade to upgrade): %s'
% req_to_install)
logger.info(
'Requirement already satisfied (use --upgrade to '
'upgrade): %s',
req_to_install,
)
if req_to_install.editable:
logger.notify('Obtaining %s' % req_to_install)
logger.info('Obtaining %s', req_to_install)
elif install:
if (req_to_install.url
and req_to_install.url.lower().startswith('file:')):
logger.notify(
'Unpacking %s' %
display_path(url_to_path(req_to_install.url))
logger.info(
'Unpacking %s',
display_path(url_to_path(req_to_install.url)),
)
else:
logger.notify('Downloading/unpacking %s' % req_to_install)
logger.indent += 2
logger.info('Downloading/unpacking %s', req_to_install)
# ################################ #
# # vcs update or unpack archive # #
# ################################ #
with indent_log():
# ################################ #
# # vcs update or unpack archive # #
# ################################ #
try:
is_wheel = False
if req_to_install.editable:
if req_to_install.source_dir is None:
@ -349,9 +360,11 @@ class RequirementSet(object):
do_download, session=self.session,
)
except HTTPError as exc:
logger.fatal(
logger.critical(
'Could not install requirement %s because '
'of error %s' % (req_to_install, exc)
'of error %s',
req_to_install,
exc,
)
raise InstallationError(
'Could not install requirement %s because '
@ -392,10 +405,10 @@ class RequirementSet(object):
req_to_install.satisfied_by
req_to_install.satisfied_by = None
else:
logger.notify(
logger.info(
'Requirement already satisfied (use '
'--upgrade to upgrade): %s' %
req_to_install
'--upgrade to upgrade): %s',
req_to_install,
)
install = False
@ -428,9 +441,9 @@ class RequirementSet(object):
req_to_install.dependency_links
)
if (req_to_install.extras):
logger.notify(
"Installing extra requirements: %r" %
','.join(req_to_install.extras)
logger.info(
"Installing extra requirements: %r",
','.join(req_to_install.extras),
)
if not self.ignore_dependencies:
for req in req_to_install.requirements(
@ -443,8 +456,8 @@ class RequirementSet(object):
# FIXME: proper warning
logger.error(
'Invalid requirement: %r (%s) in '
'requirement %s' %
(req, exc, req_to_install)
'requirement %s',
req, exc, req_to_install,
)
continue
if self.has_requirement(name):
@ -465,26 +478,21 @@ class RequirementSet(object):
if install:
self.successfully_downloaded.append(req_to_install)
finally:
logger.indent -= 2
def cleanup_files(self):
"""Clean up files, remove builds."""
logger.notify('Cleaning up...')
logger.indent += 2
for req in self.reqs_to_cleanup:
req.remove_temporary_source()
logger.info('Cleaning up...')
with indent_log():
for req in self.reqs_to_cleanup:
req.remove_temporary_source()
remove_dir = []
if self._pip_has_created_build_dir():
remove_dir.append(self.build_dir)
remove_dir = []
if self._pip_has_created_build_dir():
remove_dir.append(self.build_dir)
for dir in remove_dir:
if os.path.exists(dir):
logger.info('Removing temporary dir %s...' % dir)
rmtree(dir)
logger.indent -= 2
for dir in remove_dir:
if os.path.exists(dir):
logger.debug('Removing temporary dir %s...', dir)
rmtree(dir)
def _pip_has_created_build_dir(self):
return (
@ -496,7 +504,7 @@ class RequirementSet(object):
def copy_to_build_dir(self, req_to_install):
target_dir = req_to_install.editable and self.src_dir or self.build_dir
logger.info("Copying %s to %s" % (req_to_install.name, target_dir))
logger.debug("Copying %s to %s", req_to_install.name, target_dir)
dest = os.path.join(target_dir, req_to_install.name)
shutil.copytree(req_to_install.source_dir, dest)
call_subprocess(["python", "%s/setup.py" % dest, "clean"], cwd=dest,
@ -524,12 +532,12 @@ class RequirementSet(object):
to_install.append(req)
if to_install:
logger.notify(
'Installing collected packages: %s' %
', '.join([req.name for req in to_install])
logger.info(
'Installing collected packages: %s',
', '.join([req.name for req in to_install]),
)
logger.indent += 2
try:
with indent_log():
for requirement in to_install:
# DISTRIBUTE TO SETUPTOOLS UPGRADE HACK (1 of 3 parts)
@ -559,13 +567,12 @@ class RequirementSet(object):
pass
if requirement.conflicts_with:
logger.notify('Found existing installation: %s'
% requirement.conflicts_with)
logger.indent += 2
try:
logger.info(
'Found existing installation: %s',
requirement.conflicts_with,
)
with indent_log():
requirement.uninstall(auto_confirm=True)
finally:
logger.indent -= 2
try:
requirement.install(
install_options,
@ -584,6 +591,5 @@ class RequirementSet(object):
and requirement.install_succeeded):
requirement.commit_uninstall()
requirement.remove_temporary_source()
finally:
logger.indent -= 2
self.successfully_installed = to_install

View File

@ -1,13 +1,19 @@
from __future__ import absolute_import
import imp
import logging
import os
import sys
import tempfile
from pip.compat import uses_pycache, WINDOWS
from pip.exceptions import UninstallationError
from pip.log import logger
from pip.util import (rmtree, ask, is_local, dist_is_local, renames,
normalize_path)
from pip.utils import (rmtree, ask, is_local, dist_is_local, renames,
normalize_path)
from pip.utils.logging import indent_log
logger = logging.getLogger(__name__)
class UninstallPathSet(object):
@ -31,13 +37,11 @@ class UninstallPathSet(object):
def _can_uninstall(self):
if not dist_is_local(self.dist):
logger.notify(
"Not uninstalling %s at %s, outside environment %s" %
(
self.dist.project_name,
normalize_path(self.dist.location),
sys.prefix
),
logger.info(
"Not uninstalling %s at %s, outside environment %s",
self.dist.project_name,
normalize_path(self.dist.location),
sys.prefix,
)
return False
return True
@ -89,54 +93,52 @@ class UninstallPathSet(object):
if not self._can_uninstall():
return
if not self.paths:
logger.notify(
"Can't uninstall '%s'. No files were found to uninstall." %
self.dist.project_name
logger.info(
"Can't uninstall '%s'. No files were found to uninstall.",
self.dist.project_name,
)
return
logger.notify('Uninstalling %s:' % self.dist.project_name)
logger.indent += 2
paths = sorted(self.compact(self.paths))
try:
logger.info('Uninstalling %s:', self.dist.project_name)
with indent_log():
paths = sorted(self.compact(self.paths))
if auto_confirm:
response = 'y'
else:
for path in paths:
logger.notify(path)
logger.info(path)
response = ask('Proceed (y/n)? ', ('y', 'n'))
if self._refuse:
logger.notify('Not removing or modifying (outside of prefix):')
logger.info('Not removing or modifying (outside of prefix):')
for path in self.compact(self._refuse):
logger.notify(path)
logger.info(path)
if response == 'y':
self.save_dir = tempfile.mkdtemp(suffix='-uninstall',
prefix='pip-')
for path in paths:
new_path = self._stash(path)
logger.info('Removing file or directory %s' % path)
logger.debug('Removing file or directory %s', path)
self._moved_paths.append(path)
renames(path, new_path)
for pth in self.pth.values():
pth.remove()
logger.notify(
'Successfully uninstalled %s' % self.dist.project_name
logger.info(
'Successfully uninstalled %s', self.dist.project_name
)
finally:
logger.indent -= 2
def rollback(self):
"""Rollback the changes previously made by remove()."""
if self.save_dir is None:
logger.error(
"Can't roll back %s; was not uninstalled" %
self.dist.project_name
"Can't roll back %s; was not uninstalled",
self.dist.project_name,
)
return False
logger.notify('Rolling back uninstall of %s' % self.dist.project_name)
logger.info('Rolling back uninstall of %s', self.dist.project_name)
for path in self._moved_paths:
tmp_path = self._stash(path)
logger.info('Replacing %s' % path)
logger.debug('Replacing %s', path)
renames(tmp_path, path)
for pth in self.pth.values():
pth.rollback()
@ -170,7 +172,7 @@ class UninstallPthEntries(object):
self.entries.add(entry)
def remove(self):
logger.info('Removing pth entries from %s:' % self.file)
logger.debug('Removing pth entries from %s:', self.file)
fh = open(self.file, 'rb')
# windows uses '\r\n' with py3k, but uses '\n' with py2.x
lines = fh.readlines()
@ -182,7 +184,7 @@ class UninstallPthEntries(object):
endline = '\n'
for entry in self.entries:
try:
logger.info('Removing entry: %s' % entry)
logger.debug('Removing entry: %s', entry)
lines.remove((entry + endline).encode("utf-8"))
except ValueError:
pass
@ -193,10 +195,10 @@ class UninstallPthEntries(object):
def rollback(self):
if self._saved_lines is None:
logger.error(
'Cannot roll back changes to %s, none were made' % self.file
'Cannot roll back changes to %s, none were made', self.file
)
return False
logger.info('Rolling %s back to previous state' % self.file)
logger.debug('Rolling %s back to previous state', self.file)
fh = open(self.file, 'wb')
fh.writelines(self._saved_lines)
fh.close()

View File

@ -1,3 +1,5 @@
from __future__ import absolute_import
SUCCESS = 0
ERROR = 1
UNKNOWN_ERROR = 2

View File

@ -1,5 +1,8 @@
from __future__ import absolute_import
import contextlib
import locale
import logging
import re
import os
import posixpath
@ -16,7 +19,6 @@ from pip.locations import (
site_packages, user_site, running_under_virtualenv, virtualenv_no_global,
write_delete_marker_file
)
from pip.log import logger
from pip._vendor import pkg_resources, six
from pip._vendor.distlib import version
from pip._vendor.six.moves import input
@ -40,6 +42,9 @@ __all__ = ['rmtree', 'display_path', 'backup_dir',
'captured_stdout', 'remove_tracebacks']
logger = logging.getLogger(__name__)
def get_prog():
try:
if os.path.basename(sys.argv[0]) in ('__main__.py', '-c'):
@ -562,7 +567,9 @@ def untar_file(filename, location):
elif filename.lower().endswith('.tar'):
mode = 'r'
else:
logger.warn('Cannot determine compression type for file %s' % filename)
logger.warning(
'Cannot determine compression type for file %s', filename,
)
mode = 'r:*'
tar = tarfile.open(filename, mode)
try:
@ -587,9 +594,10 @@ def untar_file(filename, location):
except Exception as exc:
# Some corrupt tar files seem to produce this
# (specifically bad symlinks)
logger.warn(
'In the tar file %s the member %s is invalid: %s'
% (filename, member.name, exc))
logger.warning(
'In the tar file %s the member %s is invalid: %s',
filename, member.name, exc,
)
continue
else:
try:
@ -597,9 +605,10 @@ def untar_file(filename, location):
except (KeyError, AttributeError) as exc:
# Some corrupt tar files seem to produce this
# (specifically bad symlinks)
logger.warn(
'In the tar file %s the member %s is invalid: %s'
% (filename, member.name, exc))
logger.warning(
'In the tar file %s the member %s is invalid: %s',
filename, member.name, exc,
)
continue
if not os.path.exists(os.path.dirname(path)):
os.makedirs(os.path.dirname(path))
@ -642,10 +651,10 @@ def unpack_file(filename, location, content_type, link):
else:
# FIXME: handle?
# FIXME: magic signatures?
logger.fatal(
logger.critical(
'Cannot unpack file %s (downloaded from %s, content-type: %s); '
'cannot detect archive format' %
(filename, location, content_type)
'cannot detect archive format',
filename, location, content_type,
)
raise InstallationError(
'Cannot determine archive format of %s' % location
@ -666,7 +675,7 @@ def remove_tracebacks(output):
def call_subprocess(cmd, show_stdout=True,
filter_stdout=None, cwd=None,
raise_on_returncode=True,
command_level=logger.DEBUG, command_desc=None,
command_level=logging.DEBUG, command_desc=None,
extra_environ=None):
if command_desc is None:
cmd_parts = []
@ -679,7 +688,7 @@ def call_subprocess(cmd, show_stdout=True,
stdout = None
else:
stdout = subprocess.PIPE
logger.log(command_level, "Running command %s" % command_desc)
logger.log(command_level, "Running command %s", command_desc)
env = os.environ.copy()
if extra_environ:
env.update(extra_environ)
@ -688,8 +697,9 @@ def call_subprocess(cmd, show_stdout=True,
cmd, stderr=subprocess.STDOUT, stdin=None, stdout=stdout,
cwd=cwd, env=env)
except Exception as exc:
logger.fatal(
"Error %s while executing command %s" % (exc, command_desc))
logger.critical(
"Error %s while executing command %s", exc, command_desc,
)
raise
all_output = []
if stdout is not None:
@ -706,10 +716,11 @@ def call_subprocess(cmd, show_stdout=True,
if isinstance(level, tuple):
level, line = level
logger.log(level, line)
if not logger.stdout_level_matches(level):
logger.show_progress()
# if not logger.stdout_level_matches(level) and False:
# # TODO(dstufft): Handle progress bar.
# logger.show_progress()
else:
logger.info(line)
logger.debug(line)
else:
returned_stdout, returned_stderr = proc.communicate()
all_output = [returned_stdout or '']
@ -717,10 +728,10 @@ def call_subprocess(cmd, show_stdout=True,
if proc.returncode:
if raise_on_returncode:
if all_output:
logger.notify(
'Complete output from command %s:' % command_desc
logger.info(
'Complete output from command %s:', command_desc,
)
logger.notify(
logger.info(
'\n'.join(all_output) +
'\n----------------------------------------'
)
@ -728,9 +739,10 @@ def call_subprocess(cmd, show_stdout=True,
'Command "%s" failed with error code %s in %s'
% (command_desc, proc.returncode, cwd))
else:
logger.warn(
'Command "%s" had error code %s in %s'
% (command_desc, proc.returncode, cwd))
logger.warning(
'Command "%s" had error code %s in %s',
command_desc, proc.returncode, cwd,
)
if stdout is not None:
return remove_tracebacks(''.join(all_output))

68
pip/utils/deprecation.py Normal file
View File

@ -0,0 +1,68 @@
"""
A module that implments tooling to enable easy warnings about deprecations.
"""
from __future__ import absolute_import
import logging
import warnings
class PipDeprecationWarning(Warning):
pass
class RemovedInPip17Warning(PipDeprecationWarning, DeprecationWarning):
pass
class RemovedInPip18Warning(PipDeprecationWarning, PendingDeprecationWarning):
pass
DEPRECATIONS = [RemovedInPip17Warning, RemovedInPip18Warning]
# Warnings <-> Logging Integration
_warnings_showwarning = None
def _showwarning(message, category, filename, lineno, file=None, line=None):
if file is not None:
if _warnings_showwarning is not None:
_warnings_showwarning(
message, category, filename, lineno, file, line,
)
else:
if issubclass(category, PipDeprecationWarning):
# We use a specially named logger which will handle all of the
# deprecation messages for pip.
logger = logging.getLogger("pip.deprecations")
# This is purposely using the % formatter here instead of letting
# the logging module handle the interpolation. This is because we
# want it to appear as if someone typed this entire message out.
log_message = "DEPRECATION: %s" % message
# Things that are DeprecationWarnings will be removed in the very
# next version of pip. We want these to be more obvious so we
# use the ERROR logging level while the PendingDeprecationWarnings
# are still have at least 2 versions to go until they are removed
# so they can just be warnings.
if issubclass(category, DeprecationWarning):
logger.error(log_message)
else:
logger.warning(log_message)
else:
_warnings_showwarning(
message, category, filename, lineno, file, line,
)
def install_warning_logger():
global _warnings_showwarning
if _warnings_showwarning is None:
_warnings_showwarning = warnings.showwarning
warnings.showwarning = _showwarning

121
pip/utils/logging.py Normal file
View File

@ -0,0 +1,121 @@
from __future__ import absolute_import
import contextlib
import logging
import logging.handlers
import os
try:
import threading
except ImportError:
import dummy_threading as threading
from pip.compat import WINDOWS
try:
from pip._vendor import colorama
# Lots of different errors can come from this, including SystemError and
# ImportError.
except Exception:
colorama = None
_log_state = threading.local()
_log_state.indentation = 0
@contextlib.contextmanager
def indent_log(num=2):
"""
A context manager which will cause the log output to be indented for any
log messages emited inside it.
"""
_log_state.indentation += num
yield
_log_state.indentation -= num
def get_indentation():
return _log_state.indentation
class IndentingFormatter(logging.Formatter):
def format(self, record):
"""
Calls the standard formatter, but will indent all of the log messages
by our current indentation level.
"""
formatted = logging.Formatter.format(self, record)
formatted = "".join([
(" " * get_indentation()) + line
for line in formatted.splitlines(True)
])
return formatted
def _color_wrap(*colors):
def wrapped(inp):
return "".join(list(colors) + [inp, colorama.Style.RESET_ALL])
return wrapped
class ColorizedStreamHandler(logging.StreamHandler):
# Don't build up a list of colors if we don't have colorama
if colorama:
COLORS = [
# This needs to be in order from highest logging level to lowest.
(logging.ERROR, _color_wrap(colorama.Fore.RED)),
(logging.WARNING, _color_wrap(colorama.Fore.YELLOW)),
]
else:
COLORS = []
def __init__(self, stream=None):
logging.StreamHandler.__init__(self, stream)
if WINDOWS and colorama:
self.stream = colorama.AnsiToWin32(self.stream)
def should_color(self):
# Don't colorize things if we do not have colorama
if not colorama:
return False
real_stream = (
self.stream if not isinstance(self.stream, colorama.AnsiToWin32)
else self.stream.wrapped
)
# If the stream is a tty we should color it
if hasattr(real_stream, "isatty") and real_stream.isatty():
return True
# If we have an ASNI term we should color it
if os.environ.get("TERM") == "ANSI":
return True
# If anything else we should not color it
return False
def format(self, record):
msg = logging.StreamHandler.format(self, record)
if self.should_color():
for level, color in self.COLORS:
if record.levelno >= level:
msg = color(msg)
break
return msg
class BetterRotatingFileHandler(logging.handlers.RotatingFileHandler):
def _open(self):
# Ensure the directory exists
if not os.path.exists(os.path.dirname(self.baseFilename)):
os.makedirs(os.path.dirname(self.baseFilename))
return logging.handlers.RotatingFileHandler._open(self)

70
pip/utils/ui.py Normal file
View File

@ -0,0 +1,70 @@
from __future__ import absolute_import
from __future__ import division
import itertools
import sys
from pip.utils import format_size
from pip.utils.logging import get_indentation
from pip._vendor.progress.bar import Bar
from pip._vendor.progress.helpers import WritelnMixin
from pip._vendor.progress.spinner import Spinner
class DownloadProgressMixin(object):
def __init__(self, *args, **kwargs):
super(DownloadProgressMixin, self).__init__(*args, **kwargs)
self.message = (" " * (get_indentation() + 2)) + self.message
@property
def downloaded(self):
return format_size(self.index)
@property
def download_speed(self):
return format_size(1 / self.avg) + "/s"
@property
def pretty_eta(self):
if self.eta:
return "eta %s" % self.eta_td
return ""
def iter(self, it, n=1):
for x in it:
yield x
self.next(n)
self.finish()
class DownloadProgressBar(DownloadProgressMixin, Bar):
file = sys.stdout
message = "%(percent)d%%"
suffix = "%(downloaded)s %(download_speed)s %(pretty_eta)s"
class DownloadProgressSpinner(DownloadProgressMixin, WritelnMixin, Spinner):
file = sys.stdout
suffix = "%(downloaded)s %(download_speed)s"
def next_phase(self):
if not hasattr(self, "_phaser"):
self._phaser = itertools.cycle(self.phases)
return next(self._phaser)
def update(self):
message = self.message % self
phase = self.next_phase()
suffix = self.suffix % self
line = ''.join([
message,
" " if message else "",
phase,
" " if suffix else "",
suffix,
])
self.writeln(line)

View File

@ -1,18 +1,22 @@
"""Handles all VCS (version control) support"""
from __future__ import absolute_import
import logging
import os
import shutil
from pip.compat import urlparse, urllib
from pip.exceptions import BadCommand
from pip.log import logger
from pip.util import (display_path, backup_dir, find_command,
rmtree, ask_path_exists)
from pip.utils import (display_path, backup_dir, find_command,
rmtree, ask_path_exists)
__all__ = ['vcs', 'get_src_requirement']
logger = logging.getLogger(__name__)
class VcsSupport(object):
_registry = {}
schemes = ['ssh', 'git', 'hg', 'bzr', 'sftp', 'svn']
@ -46,7 +50,7 @@ class VcsSupport(object):
def register(self, cls):
if not hasattr(cls, 'name'):
logger.warn('Cannot register VCS %s' % cls.__name__)
logger.warning('Cannot register VCS %s', cls.__name__)
return
if cls.name not in self._registry:
self._registry[cls.name] = cls
@ -57,7 +61,7 @@ class VcsSupport(object):
elif cls in self._registry.values():
del self._registry[cls.name]
else:
logger.warn('Cannot unregister because no class or name given')
logger.warning('Cannot unregister because no class or name given')
def get_backend_name(self, location):
"""
@ -95,7 +99,7 @@ class VersionControl(object):
super(VersionControl, self).__init__(*args, **kwargs)
def _filter(self, line):
return (logger.INFO, line)
return (logging.DEBUG, line)
def _is_local_repository(self, repo):
"""
@ -110,7 +114,7 @@ class VersionControl(object):
if self._cmd is not None:
return self._cmd
command = find_command(self.name)
logger.info('Found command %r at %r' % (self.name, command))
logger.debug('Found command %r at %r', self.name, command)
self._cmd = command
return command
@ -187,46 +191,67 @@ class VersionControl(object):
if os.path.exists(os.path.join(dest, self.dirname)):
existing_url = self.get_url(dest)
if self.compare_urls(existing_url, url):
logger.info('%s in %s exists, and has correct URL (%s)' %
(self.repo_name.title(), display_path(dest),
url))
logger.notify('Updating %s %s%s' %
(display_path(dest), self.repo_name,
rev_display))
logger.debug(
'%s in %s exists, and has correct URL (%s)',
self.repo_name.title(),
display_path(dest),
url,
)
logger.info(
'Updating %s %s%s',
display_path(dest),
self.repo_name,
rev_display,
)
self.update(dest, rev_options)
else:
logger.warn('%s %s in %s exists with URL %s' %
(self.name, self.repo_name,
display_path(dest), existing_url))
logger.warning(
'%s %s in %s exists with URL %s',
self.name,
self.repo_name,
display_path(dest),
existing_url,
)
prompt = ('(s)witch, (i)gnore, (w)ipe, (b)ackup ',
('s', 'i', 'w', 'b'))
else:
logger.warn('Directory %s already exists, '
'and is not a %s %s.' %
(dest, self.name, self.repo_name))
logger.warning(
'Directory %s already exists, and is not a %s %s.',
dest,
self.name,
self.repo_name,
)
prompt = ('(i)gnore, (w)ipe, (b)ackup ', ('i', 'w', 'b'))
if prompt:
logger.warn('The plan is to install the %s repository %s' %
(self.name, url))
logger.warning(
'The plan is to install the %s repository %s',
self.name,
url,
)
response = ask_path_exists('What to do? %s' % prompt[0],
prompt[1])
if response == 's':
logger.notify('Switching %s %s to %s%s' %
(self.repo_name, display_path(dest), url,
rev_display))
logger.info(
'Switching %s %s to %s%s',
self.repo_name,
display_path(dest),
url,
rev_display,
)
self.switch(dest, url, rev_options)
elif response == 'i':
# do nothing
pass
elif response == 'w':
logger.warn('Deleting %s' % display_path(dest))
logger.warning('Deleting %s', display_path(dest))
rmtree(dest)
checkout = True
elif response == 'b':
dest_dir = backup_dir(dest)
logger.warn('Backing up %s to %s'
% (display_path(dest), dest_dir))
logger.warning(
'Backing up %s to %s', display_path(dest), dest_dir,
)
shutil.move(dest, dest_dir)
checkout = True
return checkout
@ -248,12 +273,16 @@ def get_src_requirement(dist, location, find_tags):
location,
find_tags)
except BadCommand:
logger.warn('cannot determine version of editable source in %s '
'(%s command not found in path)' % (
location, version_control.name))
logger.warning(
'cannot determine version of editable source in %s '
'(%s command not found in path)',
location,
version_control.name,
)
return dist.as_requirement()
logger.warn(
logger.warning(
'cannot determine version of editable source in %s (is not SVN '
'checkout, Git clone, Mercurial clone or Bazaar branch)' % location
'checkout, Git clone, Mercurial clone or Bazaar branch)',
location,
)
return dist.as_requirement()

View File

@ -1,13 +1,19 @@
from __future__ import absolute_import
import logging
import os
import tempfile
import re
from pip.compat import urlparse
from pip.log import logger
from pip.util import rmtree, display_path, call_subprocess
from pip.utils import rmtree, display_path, call_subprocess
from pip.vcs import vcs, VersionControl
from pip.download import path_to_url
logger = logging.getLogger(__name__)
class Bazaar(VersionControl):
name = 'bzr'
dirname = '.bzr'
@ -56,8 +62,12 @@ class Bazaar(VersionControl):
rev_options = []
rev_display = ''
if self.check_destination(dest, url, rev_options, rev_display):
logger.notify('Checking out %s%s to %s'
% (url, rev_display, display_path(dest)))
logger.info(
'Checking out %s%s to %s',
url,
rev_display,
display_path(dest),
)
call_subprocess(
[self.cmd, 'branch', '-q'] + rev_options + [url, dest])

View File

@ -1,16 +1,21 @@
from __future__ import absolute_import
import logging
import tempfile
import os.path
from pip.util import call_subprocess
from pip.util import display_path, rmtree
from pip.utils import call_subprocess
from pip.utils import display_path, rmtree
from pip.vcs import vcs, VersionControl
from pip.log import logger
from pip.compat import url2pathname, urlparse
urlsplit = urlparse.urlsplit
urlunsplit = urlparse.urlunsplit
logger = logging.getLogger(__name__)
class Git(VersionControl):
name = 'git'
dirname = '.git'
@ -67,8 +72,8 @@ class Git(VersionControl):
# a local tag or branch name
return [revisions[rev]]
else:
logger.warn(
"Could not find a tag or branch '%s', assuming commit." % rev,
logger.warning(
"Could not find a tag or branch '%s', assuming commit.", rev,
)
return rev_options
@ -104,8 +109,8 @@ class Git(VersionControl):
rev_options = ['origin/master']
rev_display = ''
if self.check_destination(dest, url, rev_options, rev_display):
logger.notify(
'Cloning %s%s to %s' % (url, rev_display, display_path(dest)),
logger.info(
'Cloning %s%s to %s', url, rev_display, display_path(dest),
)
call_subprocess([self.cmd, 'clone', '-q', url, dest])
#: repo may contain submodules

View File

@ -1,15 +1,20 @@
from __future__ import absolute_import
import logging
import os
import tempfile
import re
from pip.util import call_subprocess
from pip.util import display_path, rmtree
from pip.log import logger
from pip.utils import call_subprocess
from pip.utils import display_path, rmtree
from pip.vcs import vcs, VersionControl
from pip.download import path_to_url
from pip._vendor.six.moves import configparser
logger = logging.getLogger(__name__)
class Mercurial(VersionControl):
name = 'hg'
dirname = '.hg'
@ -37,9 +42,9 @@ class Mercurial(VersionControl):
config.write(config_file)
config_file.close()
except (OSError, configparser.NoSectionError) as exc:
logger.warn(
'Could not switch Mercurial repository to %s: %s'
% (url, exc))
logger.warning(
'Could not switch Mercurial repository to %s: %s', url, exc,
)
else:
call_subprocess([self.cmd, 'update', '-q'] + rev_options, cwd=dest)
@ -57,8 +62,12 @@ class Mercurial(VersionControl):
rev_options = []
rev_display = ''
if self.check_destination(dest, url, rev_options, rev_display):
logger.notify('Cloning hg %s%s to %s'
% (url, rev_display, display_path(dest)))
logger.info(
'Cloning hg %s%s to %s',
url,
rev_display,
display_path(dest),
)
call_subprocess([self.cmd, 'clone', '--noupdate', '-q', url, dest])
call_subprocess([self.cmd, 'update', '-q'] + rev_options, cwd=dest)

View File

@ -1,10 +1,13 @@
from __future__ import absolute_import
import logging
import os
import re
from pip.compat import urlparse
from pip.index import Link
from pip.util import rmtree, display_path, call_subprocess
from pip.log import logger
from pip.utils import rmtree, display_path, call_subprocess
from pip.utils.logging import indent_log
from pip.vcs import vcs, VersionControl
_svn_xml_url_re = re.compile('url="([^"]+)"')
@ -15,6 +18,9 @@ _svn_info_xml_rev_re = re.compile(r'\s*revision="(\d+)"')
_svn_info_xml_url_re = re.compile(r'<url>(.*)</url>')
logger = logging.getLogger(__name__)
class Subversion(VersionControl):
name = 'svn'
dirname = '.svn'
@ -32,20 +38,20 @@ class Subversion(VersionControl):
)
match = _svn_url_re.search(output)
if not match:
logger.warn(
'Cannot determine URL of svn checkout %s' %
display_path(location)
logger.warning(
'Cannot determine URL of svn checkout %s',
display_path(location),
)
logger.info('Output that cannot be parsed: \n%s' % output)
logger.debug('Output that cannot be parsed: \n%s', output)
return None, None
url = match.group(1).strip()
match = _svn_revision_re.search(output)
if not match:
logger.warn(
'Cannot determine revision of svn checkout %s' %
display_path(location)
logger.warning(
'Cannot determine revision of svn checkout %s',
display_path(location),
)
logger.info('Output that cannot be parsed: \n%s' % output)
logger.debug('Output that cannot be parsed: \n%s', output)
return url, None
return url, match.group(1)
@ -53,9 +59,8 @@ class Subversion(VersionControl):
"""Export the svn repository at the url to the destination location"""
url, rev = self.get_url_rev()
rev_options = get_rev_options(url, rev)
logger.notify('Exporting svn repository %s to %s' % (url, location))
logger.indent += 2
try:
logger.info('Exporting svn repository %s to %s', url, location)
with indent_log():
if os.path.exists(location):
# Subversion doesn't like to check out over an existing
# directory --force fixes this, but was only added in svn 1.5
@ -63,8 +68,6 @@ class Subversion(VersionControl):
call_subprocess(
[self.cmd, 'export'] + rev_options + [url, location],
filter_stdout=self._filter, show_stdout=False)
finally:
logger.indent -= 2
def switch(self, dest, url, rev_options):
call_subprocess(
@ -82,8 +85,12 @@ class Subversion(VersionControl):
else:
rev_display = ''
if self.check_destination(dest, url, rev_options, rev_display):
logger.notify('Checking out %s%s to %s'
% (url, rev_display, display_path(dest)))
logger.info(
'Checking out %s%s to %s',
url,
rev_display,
display_path(dest),
)
call_subprocess(
[self.cmd, 'checkout', '-q'] + rev_options + [url, dest])
@ -146,10 +153,10 @@ class Subversion(VersionControl):
if location == last_location:
# We've traversed up to the root of the filesystem without
# finding setup.py
logger.warn(
logger.warning(
"Could not find setup.py for directory %s (tried all "
"parent directories)" %
orig_location
"parent directories)",
orig_location,
)
return None
@ -240,17 +247,18 @@ class Subversion(VersionControl):
tag_revs = self.get_tag_revs(tag_url)
match = self.find_tag_match(rev, tag_revs)
if match:
logger.notify(
'trunk checkout %s seems to be equivalent to tag %s' %
match
logger.info(
'trunk checkout %s seems to be equivalent to tag %s',
match,
)
repo = '%s/%s' % (tag_url, match)
full_egg_name = '%s-%s' % (egg_project_name, match)
else:
# Don't know what it is
logger.warn(
logger.warning(
'svn URL does not fit normal structure (tags/branches/trunk): '
'%s' % repo
'%s',
repo,
)
full_egg_name = '%s-dev_r%s' % (egg_project_name, rev)
return 'svn+%s@%s#egg=%s' % (repo, rev, full_egg_name)

View File

@ -1,12 +1,13 @@
"""
Support for installing and building the "wheel" binary package format.
"""
from __future__ import with_statement
from __future__ import absolute_import
import compileall
import csv
import functools
import hashlib
import logging
import os
import re
import shutil
@ -18,10 +19,10 @@ from email.parser import Parser
from pip.compat import StringIO
from pip.exceptions import InvalidWheelFilename, UnsupportedWheel
from pip.locations import distutils_scheme
from pip.log import logger
from pip import pep425tags
from pip.util import (call_subprocess, normalize_path, make_path_relative,
captured_stdout, remove_tracebacks)
from pip.utils import (call_subprocess, normalize_path, make_path_relative,
captured_stdout, remove_tracebacks)
from pip.utils.logging import indent_log
from pip._vendor.distlib.scripts import ScriptMaker
from pip._vendor import pkg_resources
from pip._vendor.six.moves import configparser
@ -33,6 +34,9 @@ wheel_ext = '.whl'
VERSION_COMPATIBLE = (1, 0)
logger = logging.getLogger(__name__)
def rehash(path, algo='sha256', blocksize=1 << 20):
"""Return (hash, length) for path using hashlib.new(algo)"""
h = hashlib.new(algo)
@ -411,7 +415,7 @@ def uninstallation_paths(dist):
UninstallPathSet.add() takes care of the __pycache__ .pyc.
"""
from pip.util import FakeFile # circular import
from pip.utils import FakeFile # circular import
r = csv.reader(FakeFile(dist.get_metadata_lines('RECORD')))
for row in r:
path = os.path.join(dist.location, row[0])
@ -465,8 +469,10 @@ def check_compatibility(version, name):
"of pip" % (name, '.'.join(map(str, version)))
)
elif version > VERSION_COMPATIBLE:
logger.warn('Installing from a newer Wheel-Version (%s)'
% '.'.join(map(str, version)))
logger.warning(
'Installing from a newer Wheel-Version (%s)',
'.'.join(map(str, version)),
)
class Wheel(object):
@ -545,15 +551,15 @@ class WheelBuilder(object):
"__file__, 'exec'))" % req.setup_py
] + list(self.global_options)
logger.notify('Running setup.py bdist_wheel for %s' % req.name)
logger.notify('Destination directory: %s' % self.wheel_dir)
logger.info('Running setup.py bdist_wheel for %s', req.name)
logger.info('Destination directory: %s', self.wheel_dir)
wheel_args = base_args + ['bdist_wheel', '-d', self.wheel_dir] \
+ self.build_options
try:
call_subprocess(wheel_args, cwd=req.source_dir, show_stdout=False)
return True
except:
logger.error('Failed building wheel for %s' % req.name)
logger.error('Failed building wheel for %s', req.name)
return False
def build(self):
@ -567,11 +573,13 @@ class WheelBuilder(object):
buildset = []
for req in reqset:
if req.is_wheel:
logger.notify(
'Skipping %s, due to already being wheel.' % req.name)
logger.info(
'Skipping %s, due to already being wheel.', req.name,
)
elif req.editable:
logger.notify(
'Skipping %s, due to being editable' % req.name)
logger.info(
'Skipping %s, due to being editable', req.name,
)
else:
buildset.append(req)
@ -579,29 +587,28 @@ class WheelBuilder(object):
return True
# Build the wheels.
logger.notify(
'Building wheels for collected packages: %s' %
', '.join([req.name for req in buildset])
logger.info(
'Building wheels for collected packages: %s',
', '.join([req.name for req in buildset]),
)
logger.indent += 2
build_success, build_failure = [], []
for req in buildset:
if self._build_one(req):
build_success.append(req)
else:
build_failure.append(req)
logger.indent -= 2
with indent_log():
build_success, build_failure = [], []
for req in buildset:
if self._build_one(req):
build_success.append(req)
else:
build_failure.append(req)
# notify success/failure
if build_success:
logger.notify(
'Successfully built %s' %
' '.join([req.name for req in build_success])
logger.info(
'Successfully built %s',
' '.join([req.name for req in build_success]),
)
if build_failure:
logger.notify(
'Failed to build %s' %
' '.join([req.name for req in build_failure])
logger.info(
'Failed to build %s',
' '.join([req.name for req in build_failure]),
)
# Return True if all builds were successful
return len(build_failure) == 0

View File

@ -6,7 +6,7 @@ from os.path import join, curdir, pardir
import pytest
from pip.util import rmtree
from pip.utils import rmtree
from tests.lib import pyversion
from tests.lib.local_repos import local_checkout
from tests.lib.path import Path
@ -62,8 +62,8 @@ def test_editable_install(script):
"INITools==0.2 should either be a path to a local project or a VCS url"
in result.stdout
)
assert len(result.files_created) == 1, result.files_created
assert not result.files_updated, result.files_updated
assert not result.files_created
assert not result.files_updated
def test_install_editable_from_svn(script, tmpdir):
@ -248,17 +248,13 @@ def test_vcs_url_final_slash_normalization(script, tmpdir):
"""
Test that presence or absence of final slash in VCS URL is normalized.
"""
result = script.pip(
script.pip(
'install', '-e',
'%s/#egg=ScriptTest' %
local_checkout(
'hg+https://bitbucket.org/ianb/scripttest',
tmpdir.join("cache"),
),
expect_error=True,
)
assert 'pip-log.txt' not in result.files_created, (
result.files_created['pip-log.txt'].bytes
)
@ -283,7 +279,7 @@ def test_vcs_url_urlquote_normalization(script, tmpdir):
"""
Test that urlquoted characters are normalized for repo URL comparison.
"""
result = script.pip(
script.pip(
'install', '-e',
'%s/#egg=django-wikiapp' %
local_checkout(
@ -291,10 +287,6 @@ def test_vcs_url_urlquote_normalization(script, tmpdir):
'/release-0.1',
tmpdir.join("cache"),
),
expect_error=True,
)
assert 'pip-log.txt' not in result.files_created, (
result.files_created['pip-log.txt'].bytes
)
@ -332,8 +324,7 @@ def test_install_from_local_directory_with_no_setup_py(script, data):
Test installing from a local directory with no 'setup.py'.
"""
result = script.pip('install', data.root, expect_error=True)
assert len(result.files_created) == 1, result.files_created
assert 'pip-log.txt' in result.files_created, result.files_created
assert not result.files_created
assert "is not installable. File 'setup.py' not found." in result.stdout
@ -342,8 +333,7 @@ def test_editable_install_from_local_directory_with_no_setup_py(script, data):
Test installing from a local directory with no 'setup.py'.
"""
result = script.pip('install', '-e', data.root, expect_error=True)
assert len(result.files_created) == 1, result.files_created
assert 'pip-log.txt' in result.files_created, result.files_created
assert not result.files_created
assert "is not installable. File 'setup.py' not found." in result.stdout

View File

@ -167,19 +167,6 @@ def _test_config_file_override_stack(script, virtualenv, config_file):
)
def test_log_file_no_directory():
"""
Test opening a log file with no directory name.
"""
from pip.basecommand import open_logfile
fp = open_logfile('testpip.log')
fp.write('can write')
fp.close()
assert os.path.exists(fp.name)
os.remove(fp.name)
def test_options_from_venv_config(script, virtualenv):
"""
Test if ConfigOptionParser reads a virtualenv-local config file

View File

@ -197,7 +197,7 @@ def test_uninstall_rollback(script, data):
assert_all_changes(
result.files_after,
result2,
[script.venv / 'build', 'pip-log.txt'],
[script.venv / 'build'],
)

View File

@ -9,7 +9,7 @@ from mock import patch
from tests.lib import assert_all_changes, pyversion
from tests.lib.local_repos import local_repo, local_checkout
from pip.util import rmtree
from pip.utils import rmtree
def test_simple_uninstall(script):
@ -299,8 +299,7 @@ def test_uninstall_as_egg(script, data):
)
@patch('pip.req.req_uninstall.logger')
def test_uninstallpathset_no_paths(mock_logger):
def test_uninstallpathset_no_paths(caplog):
"""
Test UninstallPathSet logs notification when there are no paths to
uninstall
@ -313,13 +312,14 @@ def test_uninstallpathset_no_paths(mock_logger):
mock_dist_is_local.return_value = True
uninstall_set = UninstallPathSet(test_dist)
uninstall_set.remove() # with no files added to set
mock_logger.notify.assert_any_call(
"Can't uninstall 'pip'. No files were found to uninstall.",
assert (
"Can't uninstall 'pip'. No files were found to uninstall."
in caplog.text()
)
@patch('pip.req.req_uninstall.logger')
def test_uninstallpathset_non_local(mock_logger):
def test_uninstallpathset_non_local(caplog):
"""
Test UninstallPathSet logs notification and returns (with no exception)
when dist is non-local
@ -338,11 +338,12 @@ def test_uninstallpathset_non_local(mock_logger):
# with no files added to set; which is the case when trying to remove
# non-local dists
uninstall_set.remove()
mock_logger.notify.assert_any_call(
"Not uninstalling pip at %s, outside environment %s" %
(nonlocal_path, sys.prefix)
assert (
"Not uninstalling pip at %s, outside environment %s"
% (nonlocal_path, sys.prefix)
in caplog.text()
)
mock_logger.notify.mock_calls
def test_uninstall_wheel(script, data):

View File

@ -277,7 +277,6 @@ class PipTestEnvironment(scripttest.TestFileEnvironment):
if environ is None:
environ = os.environ.copy()
environ["PIP_LOG_FILE"] = base_path.join("pip-log.txt")
environ["PATH"] = Path.pathsep.join(
[self.bin_path] + [environ.get("PATH", [])],
)

View File

@ -5,7 +5,7 @@ import subprocess
from os.path import dirname, abspath
from pip.compat import urllib
from pip.util import rmtree
from pip.utils import rmtree
src_folder = dirname(dirname(abspath(__file__)))

View File

@ -90,3 +90,102 @@ class TestUserCacheDir:
'/etc/xdg/pip',
'/etc'
]
class TestUserDataDir:
def test_user_data_dir_win_no_roaming(self, monkeypatch):
@pretend.call_recorder
def _get_win_folder(base):
return "C:\\Users\\test\\AppData\\Local"
monkeypatch.setattr(
appdirs,
"_get_win_folder",
_get_win_folder,
raising=False,
)
monkeypatch.setattr(appdirs, "WINDOWS", True)
assert (appdirs.user_data_dir("pip").replace("/", "\\")
== "C:\\Users\\test\\AppData\\Local\\pip")
assert _get_win_folder.calls == [pretend.call("CSIDL_LOCAL_APPDATA")]
def test_user_data_dir_win_yes_roaming(self, monkeypatch):
@pretend.call_recorder
def _get_win_folder(base):
return "C:\\Users\\test\\AppData\\Roaming"
monkeypatch.setattr(
appdirs,
"_get_win_folder",
_get_win_folder,
raising=False,
)
monkeypatch.setattr(appdirs, "WINDOWS", True)
assert (appdirs.user_data_dir("pip", roaming=True).replace("/", "\\")
== "C:\\Users\\test\\AppData\\Roaming\\pip")
assert _get_win_folder.calls == [pretend.call("CSIDL_APPDATA")]
def test_user_data_dir_osx(self, monkeypatch):
monkeypatch.setenv("HOME", "/home/test")
monkeypatch.setattr(sys, "platform", "darwin")
assert (appdirs.user_data_dir("pip")
== "/home/test/Library/Application Support/pip")
def test_user_data_dir_linux(self, monkeypatch):
monkeypatch.delenv("XDG_DATA_HOME")
monkeypatch.setenv("HOME", "/home/test")
monkeypatch.setattr(sys, "platform", "linux2")
assert appdirs.user_data_dir("pip") == "/home/test/.local/share/pip"
def test_user_data_dir_linux_override(self, monkeypatch):
monkeypatch.setenv("XDG_DATA_HOME", "/home/test/.other-share")
monkeypatch.setenv("HOME", "/home/test")
monkeypatch.setattr(sys, "platform", "linux2")
assert appdirs.user_data_dir("pip") == "/home/test/.other-share/pip"
class TestUserLogDir:
def test_user_log_dir_win(self, monkeypatch):
@pretend.call_recorder
def _get_win_folder(base):
return "C:\\Users\\test\\AppData\\Local"
monkeypatch.setattr(
appdirs,
"_get_win_folder",
_get_win_folder,
raising=False,
)
monkeypatch.setattr(appdirs, "WINDOWS", True)
assert (appdirs.user_log_dir("pip").replace("/", "\\")
== "C:\\Users\\test\\AppData\\Local\\pip\\Logs")
assert _get_win_folder.calls == [pretend.call("CSIDL_LOCAL_APPDATA")]
def test_user_log_dir_osx(self, monkeypatch):
monkeypatch.setenv("HOME", "/home/test")
monkeypatch.setattr(sys, "platform", "darwin")
assert (appdirs.user_log_dir("pip")
== "/home/test/Library/Logs/pip")
def test_uuser_log_dir_linux(self, monkeypatch):
monkeypatch.delenv("XDG_CACHE_HOME")
monkeypatch.setenv("HOME", "/home/test")
monkeypatch.setattr(sys, "platform", "linux2")
assert appdirs.user_log_dir("pip") == "/home/test/.cache/pip/log"
def test_user_log_dir_linux_override(self, monkeypatch):
monkeypatch.setenv("XDG_CACHE_HOME", "/home/test/.other-cache")
monkeypatch.setenv("HOME", "/home/test")
monkeypatch.setattr(sys, "platform", "linux2")
assert appdirs.user_log_dir("pip") == "/home/test/.other-cache/pip/log"

View File

@ -1,6 +1,6 @@
import os
import logging
from pip.basecommand import Command
from pip.log import logger
class FakeCommand(Command):
@ -12,7 +12,7 @@ class FakeCommand(Command):
super(FakeCommand, self).__init__()
def run(self, options, args):
logger.info("fake")
logging.getLogger("pip.tests").info("fake")
if self.error:
raise SystemExit(1)
@ -22,8 +22,10 @@ class FakeCommandWithUnicode(FakeCommand):
summary = name
def run(self, options, args):
logger.info(b"bytes here \xE9")
logger.info(b"unicode here \xC3\xA9".decode('utf-8'))
logging.getLogger("pip.tests").info(b"bytes here \xE9")
logging.getLogger("pip.tests").info(
b"unicode here \xC3\xA9".decode("utf-8")
)
class Test_basecommand_logging(object):
@ -32,9 +34,6 @@ class Test_basecommand_logging(object):
options
"""
def teardown(self):
logger.consumers = []
def test_log_command_success(self, tmpdir):
"""
Test the --log option logs when command succeeds
@ -53,16 +52,6 @@ class Test_basecommand_logging(object):
cmd.main(['fake', '--log', log_path])
assert 'fake' == open(log_path).read().strip()[:4]
def test_log_file_command_success(self, tmpdir):
"""
Test the --log-file option *doesn't* log when command succeeds.
(It's just the historical behavior? this just confirms it)
"""
cmd = FakeCommand()
log_file_path = tmpdir.join('log_file')
cmd.main(['fake', '--log-file', log_file_path])
assert not os.path.exists(log_file_path)
def test_log_file_command_error(self, tmpdir):
"""
Test the --log-file option logs (when there's an error).
@ -72,27 +61,6 @@ class Test_basecommand_logging(object):
cmd.main(['fake', '--log-file', log_file_path])
assert 'fake' == open(log_file_path).read().strip()[:4]
def test_log_log_file(self, tmpdir):
"""
Test the --log and --log-file options log together (when there's an
error).
"""
cmd = FakeCommand(error=True)
log_path = tmpdir.join('log')
log_file_path = tmpdir.join('log_file')
cmd.main(['fake', '--log', log_path, '--log-file', log_file_path])
assert 'fake' == open(log_path).read().strip()[:4]
assert 'fake' == open(log_file_path).read().strip()[:4]
def test_verbose_quiet(self):
"""
Test additive quality of -v and -q
"""
cmd = FakeCommand()
cmd.main(['fake', '-vqq'])
console_level = logger.consumers[0][0]
assert console_level == logger.WARN
def test_unicode_messages(self, tmpdir):
"""
Tests that logging bytestrings and unicode objects don't break logging

View File

@ -4,13 +4,12 @@ import pip.wheel
import pip.pep425tags
from pkg_resources import parse_version, Distribution
from pip.log import logger
from pip.req import InstallRequirement
from pip.index import PackageFinder, Link
from pip.exceptions import (
BestVersionAlreadyInstalled, DistributionNotFound, InstallationError,
)
from pip.util import Inf
from pip.utils import Inf
from pip.download import PipSession
from mock import Mock, patch
@ -96,15 +95,10 @@ def test_finder_detects_latest_already_satisfied_pypi_links():
class TestWheel:
def teardown(self):
logger.consumers = []
def test_skip_invalid_wheel_link(self, data):
def test_skip_invalid_wheel_link(self, caplog, data):
"""
Test if PackageFinder skips invalid wheel filenames
"""
log = []
logger.add_consumers((logger.DEBUG, log.append))
req = InstallRequirement.from_line("invalid")
# data.find_links contains "invalid.whl", which is an invalid wheel
finder = PackageFinder(
@ -115,7 +109,11 @@ class TestWheel:
)
with pytest.raises(DistributionNotFound):
finder.find_requirement(req, True)
"invalid.whl because the wheel filename is invalid" in "".join(log)
assert (
"invalid.whl because the wheel filename is invalid"
in caplog.text()
)
def test_not_find_wheel_not_supported(self, data, monkeypatch):
"""

View File

@ -1,125 +0,0 @@
from pip.compat import StringIO
from pip.log import should_color, should_warn, Logger
def test_should_color_std():
assert not should_color(object(), {}, std=[object()])
def test_should_color_isatty():
class FakeTTY(object):
def isatty(self):
return True
consumer = FakeTTY()
assert should_color(consumer, {}, std=[consumer])
def test_should_color_environ():
consumer = object()
assert should_color(consumer, {"TERM": "ANSI"}, std=[consumer])
def test_should_color_notty_environ():
consumer = object()
assert not should_color(consumer, {}, std=[consumer])
def test_should_warn_greater_one_minor():
assert should_warn("1.4", "1.6")
def test_should_warn_exactly_one_minor():
assert not should_warn("1.5", "1.6")
def test_should_warn_equal():
assert not should_warn("1.6", "1.6")
def test_should_warn_greater():
assert not should_warn("1.7", "1.6")
def test_should_warn_significance():
assert should_warn("1.4.dev1", "1.6")
def test_ignores_duplicate_consumers():
"""
Make sure if the same consumer & level pair are asked to be added,
they're ignored.
"""
logger = Logger()
import sys
consumer1 = sys.stdout
consumer2 = sys.stdout
logger.add_consumers(
(logger.NOTIFY, consumer1),
(logger.NOTIFY, consumer2),
)
logger.add_consumers(
(logger.NOTIFY, consumer1),
(logger.NOTIFY, consumer2),
)
assert 1 == len(logger.consumers)
def test_ignores_Win32_wrapped_consumers(monkeypatch):
"""
Test that colorama wrapped duplicate streams are ignored, too.
"""
logger = Logger()
consumer = StringIO()
consumer1 = consumer
consumer2 = consumer
# Pretend to be Windows
monkeypatch.setattr('sys.platform', 'win32')
logger.add_consumers(
(logger.NOTIFY, consumer1),
(logger.NOTIFY, consumer2),
)
# Pretend to be linux
monkeypatch.setattr('sys.platform', 'linux2')
logger.add_consumers(
(logger.NOTIFY, consumer2),
(logger.NOTIFY, consumer1),
)
assert 1 == len(logger.consumers)
def test_log_no_extra_line_break():
"""
Confirm that multiple `.write()` consumers doesn't result in additional
'\n's per write
"""
consumer1 = StringIO()
consumer2 = StringIO()
logger = Logger()
logger.add_consumers(
(logger.NOTIFY, consumer1),
(logger.NOTIFY, consumer2)
)
logger.notify("one line")
# splitlines(True) will detect empty line-breaks
assert 1 == len(consumer1.getvalue().splitlines(True))
assert 1 == len(consumer2.getvalue().splitlines(True))
def test_level_for_integer():
logger = Logger()
assert logger.VERBOSE_DEBUG == logger.level_for_integer(-1000)
assert logger.VERBOSE_DEBUG == logger.level_for_integer(0)
assert logger.DEBUG == logger.level_for_integer(1)
assert logger.INFO == logger.level_for_integer(2)
assert logger.NOTIFY == logger.level_for_integer(3)
assert logger.WARN == logger.level_for_integer(4)
assert logger.ERROR == logger.level_for_integer(5)
assert logger.FATAL == logger.level_for_integer(6)
assert logger.FATAL == logger.level_for_integer(1000)

View File

@ -89,10 +89,10 @@ class TestOptionPrecedence(object):
"""
os.environ['PIP_LOG_FILE'] = 'override.log'
options, args = main(['fake'])
assert options.log_file == 'override.log'
assert options.log == 'override.log'
os.environ['PIP_LOCAL_LOG'] = 'override.log'
options, args = main(['fake'])
assert options.log_file == 'override.log'
assert options.log == 'override.log'
def test_cli_override_environment(self):
"""
@ -210,7 +210,7 @@ class TestGeneralOptions(object):
def test_local_log(self):
options1, args1 = main(['--local-log', 'path', 'fake'])
options2, args2 = main(['fake', '--local-log', 'path'])
assert options1.log_file == options2.log_file == 'path'
assert options1.log == options2.log == 'path'
def test_no_input(self):
options1, args1 = main(['--no-input', 'fake'])

View File

@ -11,11 +11,10 @@ from pip.exceptions import (
from pip.download import PipSession
from pip._vendor import pkg_resources
from pip.index import PackageFinder
from pip.log import logger
from pip.req import (InstallRequirement, RequirementSet,
Requirements, parse_requirements)
from pip.req.req_install import parse_editable
from pip.util import read_text_file
from pip.utils import read_text_file
from tests.lib import assert_raises_regexp
@ -23,11 +22,9 @@ class TestRequirementSet(object):
"""RequirementSet tests"""
def setup(self):
logger.consumers = [(logger.NOTIFY, Mock())]
self.tempdir = tempfile.mkdtemp()
def teardown(self):
logger.consumers = []
shutil.rmtree(self.tempdir, ignore_errors=True)
def basic_reqset(self):
@ -66,7 +63,7 @@ def test_egg_info_data(file_contents, expected):
om = mock_open(read_data=file_contents)
em = Mock()
em.return_value = 'cp1252'
with patch('pip.util.open', om, create=True):
with patch('pip.utils.open', om, create=True):
with patch('locale.getpreferredencoding', em):
ret = read_text_file('foo')
assert ret == expected.decode('utf-8')

View File

@ -12,8 +12,8 @@ import pytest
from mock import Mock, patch
from pip.exceptions import BadCommand
from pip.util import (egg_link_path, Inf, get_installed_distributions,
find_command, untar_file, unzip_file)
from pip.utils import (egg_link_path, Inf, get_installed_distributions,
find_command, untar_file, unzip_file)
from pip.commands.freeze import freeze_excludes
@ -37,26 +37,26 @@ class Tests_EgglinkPath:
)
# patches
from pip import util
self.old_site_packages = util.site_packages
self.mock_site_packages = util.site_packages = 'SITE_PACKAGES'
self.old_running_under_virtualenv = util.running_under_virtualenv
self.mock_running_under_virtualenv = util.running_under_virtualenv = \
from pip import utils
self.old_site_packages = utils.site_packages
self.mock_site_packages = utils.site_packages = 'SITE_PACKAGES'
self.old_running_under_virtualenv = utils.running_under_virtualenv
self.mock_running_under_virtualenv = utils.running_under_virtualenv = \
Mock()
self.old_virtualenv_no_global = util.virtualenv_no_global
self.mock_virtualenv_no_global = util.virtualenv_no_global = Mock()
self.old_user_site = util.user_site
self.mock_user_site = util.user_site = self.user_site
self.old_virtualenv_no_global = utils.virtualenv_no_global
self.mock_virtualenv_no_global = utils.virtualenv_no_global = Mock()
self.old_user_site = utils.user_site
self.mock_user_site = utils.user_site = self.user_site
from os import path
self.old_isfile = path.isfile
self.mock_isfile = path.isfile = Mock()
def teardown(self):
from pip import util
util.site_packages = self.old_site_packages
util.running_under_virtualenv = self.old_running_under_virtualenv
util.virtualenv_no_global = self.old_virtualenv_no_global
util.user_site = self.old_user_site
from pip import utils
utils.site_packages = self.old_site_packages
utils.running_under_virtualenv = self.old_running_under_virtualenv
utils.virtualenv_no_global = self.old_virtualenv_no_global
utils.user_site = self.old_user_site
from os import path
path.isfile = self.old_isfile
@ -161,8 +161,8 @@ def test_Inf_equals_Inf():
assert Inf == Inf
@patch('pip.util.dist_is_local')
@patch('pip.util.dist_is_editable')
@patch('pip.utils.dist_is_local')
@patch('pip.utils.dist_is_editable')
class Tests_get_installed_distributions:
"""test util.get_installed_distributions"""
@ -274,7 +274,7 @@ def test_does_not_find_command_because_there_is_no_path():
@patch('os.pathsep', ':')
@patch('pip.util.get_pathext')
@patch('pip.utils.get_pathext')
@patch('os.path.isfile')
def test_find_command_trys_all_pathext(mock_isfile, getpath_mock):
"""
@ -301,7 +301,7 @@ def test_find_command_trys_all_pathext(mock_isfile, getpath_mock):
@patch('os.pathsep', ':')
@patch('pip.util.get_pathext')
@patch('pip.utils.get_pathext')
@patch('os.path.isfile')
def test_find_command_trys_supplied_pathext(mock_isfile, getpath_mock):
"""

View File

@ -7,7 +7,7 @@ from mock import patch, Mock
from pip._vendor import pkg_resources
from pip import pep425tags, wheel
from pip.exceptions import InvalidWheelFilename, UnsupportedWheel
from pip.util import unpack_file
from pip.utils import unpack_file
def test_get_entrypoints(tmpdir):
@ -255,7 +255,7 @@ class TestWheelFile(object):
assert w.support_index_min(tags=[]) is None
def test_unpack_wheel_no_flatten(self):
from pip import util
from pip import utils
from tempfile import mkdtemp
from shutil import rmtree
@ -264,7 +264,7 @@ class TestWheelFile(object):
pytest.skip("%s does not exist" % filepath)
try:
tmpdir = mkdtemp()
util.unpack_file(filepath, tmpdir, 'application/zip', None)
utils.unpack_file(filepath, tmpdir, 'application/zip', None)
assert os.path.isdir(os.path.join(tmpdir, 'meta-1.0.dist-info'))
finally:
rmtree(tmpdir)
@ -371,24 +371,20 @@ class TestMoveWheelFiles(object):
class TestWheelBuilder(object):
@patch('pip.log.Logger.log')
@patch('pip.wheel.WheelBuilder._build_one')
def test_skip_building_wheels(self, mock_build_one, mock_log):
wheel_req = Mock(is_wheel=True, editable=False)
reqset = Mock(requirements=Mock(values=lambda: [wheel_req]))
wb = wheel.WheelBuilder(reqset, Mock(), '/wheel/dir')
wb.build()
name, args, kwargs = mock_log.mock_calls[0]
assert "due to already being wheel" in args[1]
assert mock_build_one.mock_calls == []
def test_skip_building_wheels(self, caplog):
with patch('pip.wheel.WheelBuilder._build_one') as mock_build_one:
wheel_req = Mock(is_wheel=True, editable=False)
reqset = Mock(requirements=Mock(values=lambda: [wheel_req]))
wb = wheel.WheelBuilder(reqset, Mock(), '/wheel/dir')
wb.build()
assert "due to already being wheel" in caplog.text()
assert mock_build_one.mock_calls == []
@patch('pip.log.Logger.log')
@patch('pip.wheel.WheelBuilder._build_one')
def test_skip_building_editables(self, mock_build_one, mock_log):
editable_req = Mock(editable=True, is_wheel=False)
reqset = Mock(requirements=Mock(values=lambda: [editable_req]))
wb = wheel.WheelBuilder(reqset, Mock(), '/wheel/dir')
wb.build()
name, args, kwargs = mock_log.mock_calls[0]
assert "due to being editable" in args[1]
assert mock_build_one.mock_calls == []
def test_skip_building_editables(self, caplog):
with patch('pip.wheel.WheelBuilder._build_one') as mock_build_one:
editable_req = Mock(editable=True, is_wheel=False)
reqset = Mock(requirements=Mock(values=lambda: [editable_req]))
wb = wheel.WheelBuilder(reqset, Mock(), '/wheel/dir')
wb.build()
assert "due to being editable" in caplog.text()
assert mock_build_one.mock_calls == []

View File

@ -6,6 +6,7 @@ envlist =
deps =
pretend
pytest
pytest-capturelog
pytest-cov
pytest-timeout
pytest-xdist