diff --git a/pip/__init__.py b/pip/__init__.py
index dcebe3559..f8e610c93 100755
--- a/pip/__init__.py
+++ b/pip/__init__.py
@@ -115,7 +115,7 @@ def create_main_parser():
gen_opts = cmdoptions.make_option_group(cmdoptions.general_group, parser)
parser.add_option_group(gen_opts)
- parser.main = True # so the help formatter knows
+ parser.main = True # so the help formatter knows
# create command listing for description
command_summaries = get_summaries()
@@ -128,8 +128,8 @@ def create_main_parser():
def parseopts(args):
parser = create_main_parser()
- # Note: parser calls disable_interspersed_args(), so the result of this call
- # is to split the initial args into the general options before the
+ # Note: parser calls disable_interspersed_args(), so the result of this
+ # call is to split the initial args into the general options before the
# subcommand and everything else.
# For example:
# args: ['--timeout=5', 'install', '--user', 'INITools']
@@ -191,6 +191,8 @@ def bootstrap():
pkgs = ['pip']
try:
import setuptools
+ # Dumb hack
+ setuptools
except ImportError:
pkgs.append('setuptools')
return main(['install', '--upgrade'] + pkgs + sys.argv[1:])
@@ -220,11 +222,18 @@ class FrozenRequirement(object):
try:
req = get_src_requirement(dist, location, find_tags)
except InstallationError as exc:
- logger.warn("Error when trying to get requirement for VCS system %s, falling back to uneditable format" % exc)
+ logger.warn(
+ "Error when trying to get requirement for VCS system %s, "
+ "falling back to uneditable format" % exc
+ )
req = None
if req is None:
- logger.warn('Could not determine repository location of %s' % location)
- comments.append('## !! Could not determine repository location')
+ logger.warn(
+ 'Could not determine repository location of %s' % location
+ )
+ comments.append(
+ '## !! Could not determine repository location'
+ )
req = dist.as_requirement()
editable = False
else:
@@ -243,15 +252,25 @@ class FrozenRequirement(object):
if not svn_location:
logger.warn(
'Warning: cannot find svn location for %s' % req)
- comments.append('## FIXME: could not find svn URL in dependency_links for this package:')
+ comments.append(
+ '## FIXME: could not find svn URL in dependency_links '
+ 'for this package:'
+ )
else:
- comments.append('# Installing as editable to satisfy requirement %s:' % req)
+ comments.append(
+ '# Installing as editable to satisfy requirement %s:' %
+ req
+ )
if ver_match:
rev = ver_match.group(1)
else:
rev = '{%s}' % date_match.group(1)
editable = True
- req = '%s@%s#egg=%s' % (svn_location, rev, cls.egg_name(dist))
+ req = '%s@%s#egg=%s' % (
+ svn_location,
+ rev,
+ cls.egg_name(dist)
+ )
return cls(dist.project_name, req, editable, comments)
@staticmethod
diff --git a/pip/backwardcompat/__init__.py b/pip/backwardcompat/__init__.py
index c327bbeec..8f1248893 100644
--- a/pip/backwardcompat/__init__.py
+++ b/pip/backwardcompat/__init__.py
@@ -1,6 +1,8 @@
"""Stuff that differs in different Python versions and platform
distributions."""
+# flake8: noqa
+
import os
import imp
import sys
@@ -10,6 +12,7 @@ __all__ = ['WindowsError']
uses_pycache = hasattr(imp, 'cache_from_source')
+
class NeverUsedException(Exception):
"""this exception should never be raised"""
@@ -116,10 +119,11 @@ def get_path_uid(path):
"""
Return path's uid.
- Does not follow symlinks: https://github.com/pypa/pip/pull/935#discussion_r5307003
+ Does not follow symlinks:
+ https://github.com/pypa/pip/pull/935#discussion_r5307003
- Placed this function in backwardcompat due to differences on AIX and Jython,
- that should eventually go away.
+ Placed this function in backwardcompat due to differences on AIX and
+ Jython, that should eventually go away.
:raises OSError: When path is a symlink or can't be read.
"""
@@ -134,5 +138,7 @@ def get_path_uid(path):
file_uid = os.stat(path).st_uid
else:
# raise OSError for parity with os.O_NOFOLLOW above
- raise OSError("%s is a symlink; Will not return uid for symlinks" % path)
+ raise OSError(
+ "%s is a symlink; Will not return uid for symlinks" % path
+ )
return file_uid
diff --git a/pip/basecommand.py b/pip/basecommand.py
index d7a6de533..ae86356b0 100644
--- a/pip/basecommand.py
+++ b/pip/basecommand.py
@@ -15,8 +15,10 @@ from pip.exceptions import (BadCommand, InstallationError, UninstallationError,
CommandError, PreviousBuildDirError)
from pip.backwardcompat import StringIO
from pip.baseparser import ConfigOptionParser, UpdatingDefaultsHelpFormatter
-from pip.status_codes import (SUCCESS, ERROR, UNKNOWN_ERROR, VIRTUALENV_NOT_FOUND,
- PREVIOUS_BUILD_DIR_ERROR)
+from pip.status_codes import (
+ SUCCESS, ERROR, UNKNOWN_ERROR, VIRTUALENV_NOT_FOUND,
+ PREVIOUS_BUILD_DIR_ERROR,
+)
from pip.util import get_prog
@@ -45,7 +47,10 @@ class Command(object):
self.cmd_opts = optparse.OptionGroup(self.parser, optgroup_name)
# Add the general options
- gen_opts = cmdoptions.make_option_group(cmdoptions.general_group, self.parser)
+ gen_opts = cmdoptions.make_option_group(
+ cmdoptions.general_group,
+ self.parser,
+ )
self.parser.add_option_group(gen_opts)
def _build_session(self, options):
@@ -107,7 +112,9 @@ class Command(object):
if options.require_venv:
# If a venv is required check if it can really be found
if not running_under_virtualenv():
- logger.fatal('Could not find an activated virtualenv (required).')
+ logger.fatal(
+ 'Could not find an activated virtualenv (required).'
+ )
sys.exit(VIRTUALENV_NOT_FOUND)
if options.log:
diff --git a/pip/baseparser.py b/pip/baseparser.py
index 98a4f3832..6ff81b2d6 100644
--- a/pip/baseparser.py
+++ b/pip/baseparser.py
@@ -7,9 +7,10 @@ import textwrap
from distutils.util import strtobool
from pip.backwardcompat import ConfigParser, string_types
-from pip.locations import default_config_file, default_config_basename, running_under_virtualenv
-from pip.util import get_terminal_size, get_prog
-from pip._vendor import pkg_resources
+from pip.locations import (
+ default_config_file, default_config_basename, running_under_virtualenv,
+)
+from pip.util import get_terminal_size
class PrettyHelpFormatter(optparse.IndentedHelpFormatter):
@@ -141,7 +142,10 @@ class ConfigOptionParser(CustomOptionParser):
else:
files = [default_config_file]
if running_under_virtualenv():
- venv_config_file = os.path.join(sys.prefix, default_config_basename)
+ venv_config_file = os.path.join(
+ sys.prefix,
+ default_config_basename,
+ )
if os.path.exists(venv_config_file):
files.append(venv_config_file)
return files
@@ -161,7 +165,9 @@ class ConfigOptionParser(CustomOptionParser):
config = {}
# 1. config files
for section in ('global', self.name):
- config.update(self.normalize_keys(self.get_config_section(section)))
+ config.update(
+ self.normalize_keys(self.get_config_section(section))
+ )
# 2. environmental variables
config.update(self.normalize_keys(self.get_environ_vars()))
# Then set the options with those values
diff --git a/pip/cmdoptions.py b/pip/cmdoptions.py
index 8ed3d91fc..703f731b0 100644
--- a/pip/cmdoptions.py
+++ b/pip/cmdoptions.py
@@ -1,10 +1,10 @@
"""
shared options and groups
-The principle here is to define options once, but *not* instantiate them globally.
-One reason being that options with action='append' can carry state between parses.
-pip parse's general options twice internally, and shouldn't pass on state.
-To be consistent, all options will follow this design.
+The principle here is to define options once, but *not* instantiate them
+globally. One reason being that options with action='append' can carry state
+between parses. pip parse's general options twice internally, and shouldn't
+pass on state. To be consistent, all options will follow this design.
"""
import copy
@@ -23,12 +23,15 @@ def make_option_group(group, parser):
option_group.add_option(option.make())
return option_group
+
class OptionMaker(object):
"""Class that stores the args/kwargs that would be used to make an Option,
for making them later, and uses deepcopy's to reset state."""
+
def __init__(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
+
def make(self):
args_copy = copy.deepcopy(self.args)
kwargs_copy = copy.deepcopy(self.kwargs)
@@ -57,7 +60,8 @@ verbose = OptionMaker(
dest='verbose',
action='count',
default=0,
- help='Give more output. Option is additive, and can be used up to 3 times.')
+ help='Give more output. Option is additive, and can be used up to 3 times.'
+)
version = OptionMaker(
'-V', '--version',
@@ -92,7 +96,8 @@ log_file = OptionMaker(
dest='log_file',
metavar='path',
default=default_log_file,
- help='Path to a verbose non-appending log, that only logs failures. This log is active by default at %default.')
+ help='Path to a verbose non-appending log, that only logs failures. This '
+ 'log is active by default at %default.')
no_input = OptionMaker(
# Don't ask for input
@@ -151,7 +156,7 @@ cert = OptionMaker(
type='str',
default='',
metavar='path',
- help = "Path to alternate CA bundle.")
+ help="Path to alternate CA bundle.")
index_url = OptionMaker(
'-i', '--index-url', '--pypi-url',
@@ -175,13 +180,15 @@ no_index = OptionMaker(
default=False,
help='Ignore package index (only looking at --find-links URLs instead).')
-find_links = OptionMaker(
+find_links = OptionMaker(
'-f', '--find-links',
dest='find_links',
action='append',
default=[],
metavar='url',
- help="If a url or path to an html file, then parse for links to archives. If a local path or file:// url that's a directory, then look for archives in the directory listing.")
+ help="If a url or path to an html file, then parse for links to archives. "
+ "If a local path or file:// url that's a directory, then look for "
+ "archives in the directory listing.")
# TODO: Remove after 1.6
use_mirrors = OptionMaker(
@@ -308,9 +315,10 @@ install_options = OptionMaker(
action='append',
metavar='options',
help="Extra arguments to be supplied to the setup.py install "
- "command (use like --install-option=\"--install-scripts=/usr/local/bin\"). "
- "Use multiple --install-option options to pass multiple options to setup.py install. "
- "If you are using an option with a directory path, be sure to use absolute path.")
+ "command (use like --install-option=\"--install-scripts=/usr/local/"
+ "bin\"). Use multiple --install-option options to pass multiple "
+ "options to setup.py install. If you are using an option with a "
+ "directory path, be sure to use absolute path.")
global_options = OptionMaker(
'--global-option',
@@ -318,7 +326,7 @@ global_options = OptionMaker(
action='append',
metavar='options',
help="Extra global options to be supplied to the setup.py "
- "call before the install command.")
+ "call before the install command.")
no_clean = OptionMaker(
'--no-clean',
diff --git a/pip/commands/bundle.py b/pip/commands/bundle.py
index 69967fe51..52b4f1941 100644
--- a/pip/commands/bundle.py
+++ b/pip/commands/bundle.py
@@ -1,4 +1,3 @@
-import textwrap
from pip.locations import build_prefix, src_prefix
from pip.util import display_path, backup_dir
from pip.log import logger
@@ -22,21 +21,30 @@ class BundleCommand(InstallCommand):
src_opt = self.parser.get_option("--src")
src_opt.default = backup_dir(src_prefix, '-bundle')
self.parser.set_defaults(**{
- src_opt.dest: src_opt.default,
- build_opt.dest: build_opt.default,
- })
+ src_opt.dest: src_opt.default,
+ build_opt.dest: build_opt.default,
+ })
def run(self, options, args):
- logger.deprecated('1.6', "DEPRECATION: 'pip bundle' and support for installing from *.pybundle files is deprecated. "
- "See https://github.com/pypa/pip/pull/1046")
+ logger.deprecated(
+ '1.6',
+ "DEPRECATION: 'pip bundle' and support for installing from "
+ "*.pybundle files is deprecated. "
+ "See https://github.com/pypa/pip/pull/1046"
+ )
if not args:
raise InstallationError('You must give a bundle filename')
# We have to get everything when creating a bundle:
options.ignore_installed = True
- logger.notify('Putting temporary build files in %s and source/develop files in %s'
- % (display_path(options.build_dir), display_path(options.src_dir)))
+ logger.notify(
+ 'Putting temporary build files in %s and source/develop files in '
+ '%s' % (
+ display_path(options.build_dir),
+ display_path(options.src_dir)
+ )
+ )
self.bundle_filename = args.pop(0)
requirement_set = super(BundleCommand, self).run(options, args)
return requirement_set
diff --git a/pip/commands/completion.py b/pip/commands/completion.py
index 5fa237620..a3fa2fb2d 100644
--- a/pip/commands/completion.py
+++ b/pip/commands/completion.py
@@ -56,4 +56,6 @@ class CompletionCommand(Command):
script = COMPLETION_SCRIPTS.get(options.shell, '')
print(BASE_COMPLETION % {'script': script, 'shell': options.shell})
else:
- sys.stderr.write('ERROR: You must pass %s\n' % ' or '.join(shell_options))
+ sys.stderr.write(
+ 'ERROR: You must pass %s\n' % ' or '.join(shell_options)
+ )
diff --git a/pip/commands/freeze.py b/pip/commands/freeze.py
index 930de62a5..e53455828 100644
--- a/pip/commands/freeze.py
+++ b/pip/commands/freeze.py
@@ -25,20 +25,23 @@ class FreezeCommand(Command):
action='store',
default=None,
metavar='file',
- help="Use the order in the given requirements file and it's comments when generating output.")
+ help="Use the order in the given requirements file and it's "
+ "comments when generating output.")
self.cmd_opts.add_option(
'-f', '--find-links',
dest='find_links',
action='append',
default=[],
metavar='URL',
- help='URL for finding packages, which will be added to the output.')
+ help='URL for finding packages, which will be added to the '
+ 'output.')
self.cmd_opts.add_option(
'-l', '--local',
dest='local',
action='store_true',
default=False,
- help='If in a virtualenv that has global access, do not output globally-installed packages.')
+ help='If in a virtualenv that has global access, do not output '
+ 'globally-installed packages.')
self.parser.insert_option_group(0, self.cmd_opts)
@@ -63,7 +66,9 @@ class FreezeCommand(Command):
for dist in pkg_resources.working_set:
if dist.has_metadata('dependency_links.txt'):
- dependency_links.extend(dist.get_metadata_lines('dependency_links.txt'))
+ dependency_links.extend(
+ dist.get_metadata_lines('dependency_links.txt')
+ )
for link in find_links:
if '#egg=' in link:
dependency_links.append(link)
@@ -71,7 +76,11 @@ class FreezeCommand(Command):
f.write('-f %s\n' % link)
installations = {}
for dist in get_installed_distributions(local_only=local_only):
- req = pip.FrozenRequirement.from_dist(dist, dependency_links, find_tags=find_tags)
+ req = pip.FrozenRequirement.from_dist(
+ dist,
+ dependency_links,
+ find_tags=find_tags,
+ )
installations[req.name] = req
if requirement:
req_f = open(requirement)
@@ -87,28 +96,44 @@ class FreezeCommand(Command):
line = line[2:].strip()
else:
line = line[len('--editable'):].strip().lstrip('=')
- line_req = InstallRequirement.from_editable(line, default_vcs=options.default_vcs)
- elif (line.startswith('-r') or line.startswith('--requirement')
- or line.startswith('-Z') or line.startswith('--always-unzip')
- or line.startswith('-f') or line.startswith('-i')
- or line.startswith('--extra-index-url')
- or line.startswith('--find-links')
- or line.startswith('--index-url')):
+ line_req = InstallRequirement.from_editable(
+ line,
+ default_vcs=options.default_vcs
+ )
+ elif (line.startswith('-r')
+ or line.startswith('--requirement')
+ or line.startswith('-Z')
+ or line.startswith('--always-unzip')
+ or line.startswith('-f')
+ or line.startswith('-i')
+ or line.startswith('--extra-index-url')
+ or line.startswith('--find-links')
+ or line.startswith('--index-url')):
f.write(line)
continue
else:
line_req = InstallRequirement.from_line(line)
if not line_req.name:
- logger.notify("Skipping line because it's not clear what it would install: %s"
- % line.strip())
- logger.notify(" (add #egg=PackageName to the URL to avoid this warning)")
+ logger.notify(
+ "Skipping line because it's not clear what it would "
+ "install: %s" % line.strip()
+ )
+ logger.notify(
+ " (add #egg=PackageName to the URL to avoid"
+ " this warning)"
+ )
continue
if line_req.name not in installations:
- logger.warn("Requirement file contains %s, but that package is not installed"
- % line.strip())
+ logger.warn(
+ "Requirement file contains %s, but that package is not"
+ " installed" % line.strip()
+ )
continue
f.write(str(installations[line_req.name]))
del installations[line_req.name]
- f.write('## The following requirements were added by pip --freeze:\n')
- for installation in sorted(installations.values(), key=lambda x: x.name):
+ f.write(
+ '## The following requirements were added by pip --freeze:\n'
+ )
+ for installation in sorted(
+ installations.values(), key=lambda x: x.name):
f.write(str(installation))
diff --git a/pip/commands/install.py b/pip/commands/install.py
index f799366d9..25ab1d414 100644
--- a/pip/commands/install.py
+++ b/pip/commands/install.py
@@ -1,13 +1,15 @@
import os
-import sys
import tempfile
import shutil
+
from pip.req import InstallRequirement, RequirementSet, parse_requirements
from pip.log import logger
from pip.locations import src_prefix, virtualenv_no_global, distutils_scheme
from pip.basecommand import Command
from pip.index import PackageFinder
-from pip.exceptions import InstallationError, CommandError, PreviousBuildDirError
+from pip.exceptions import (
+ InstallationError, CommandError, PreviousBuildDirError,
+)
from pip import cmdoptions
@@ -49,7 +51,9 @@ class InstallCommand(Command):
action='append',
default=[],
metavar='path/url',
- help='Install a project in editable mode (i.e. setuptools "develop mode") from a local project path or a VCS url.')
+ help=('Install a project in editable mode (i.e. setuptools '
+ '"develop mode") from a local project path or a VCS url.'),
+ )
cmd_opts.add_option(cmdoptions.requirements.make())
cmd_opts.add_option(cmdoptions.build_dir.make())
@@ -66,7 +70,9 @@ class InstallCommand(Command):
dest='download_dir',
metavar='dir',
default=None,
- help="Download packages into
instead of installing them, regardless of what's already installed.")
+ help=("Download packages into instead of installing them, "
+ "regardless of what's already installed."),
+ )
cmd_opts.add_option(cmdoptions.download_cache.make())
@@ -84,7 +90,9 @@ class InstallCommand(Command):
dest='upgrade',
action='store_true',
help='Upgrade all packages to the newest available version. '
- 'This process is recursive regardless of whether a dependency is already satisfied.')
+ 'This process is recursive regardless of whether a dependency'
+ ' is already satisfied.'
+ )
cmd_opts.add_option(
'--force-reinstall',
@@ -105,14 +113,17 @@ class InstallCommand(Command):
'--no-install',
dest='no_install',
action='store_true',
- help="DEPRECATED. Download and unpack all packages, but don't actually install them.")
+ help="DEPRECATED. Download and unpack all packages, but don't "
+ "actually install them."
+ )
cmd_opts.add_option(
'--no-download',
dest='no_download',
action="store_true",
- help="DEPRECATED. Don't download any packages, just install the ones already downloaded "
- "(completes an install run with --no-install).")
+ help="DEPRECATED. Don't download any packages, just install the "
+ "ones already downloaded (completes an install run with "
+ "--no-install).")
cmd_opts.add_option(cmdoptions.install_options.make())
cmd_opts.add_option(cmdoptions.global_options.make())
@@ -127,14 +138,18 @@ class InstallCommand(Command):
'--egg',
dest='as_egg',
action='store_true',
- help="Install packages as eggs, not 'flat', like pip normally does. This option is not about installing *from* eggs. (WARNING: Because this option overrides pip's normal install logic, requirements files may not behave as expected.)")
+ help="Install packages as eggs, not 'flat', like pip normally "
+ "does. This option is not about installing *from* eggs. "
+ "(WARNING: Because this option overrides pip's normal install"
+ " logic, requirements files may not behave as expected.)")
cmd_opts.add_option(
'--root',
dest='root_path',
metavar='dir',
default=None,
- help="Install everything relative to this alternate root directory.")
+ help="Install everything relative to this alternate root "
+ "directory.")
cmd_opts.add_option(
"--compile",
@@ -158,11 +173,15 @@ class InstallCommand(Command):
'--pre',
action='store_true',
default=False,
- help="Include pre-release and development versions. By default, pip only finds stable versions.")
+ help="Include pre-release and development versions. By default, "
+ "pip only finds stable versions.")
cmd_opts.add_option(cmdoptions.no_clean.make())
- index_opts = cmdoptions.make_option_group(cmdoptions.index_group, self.parser)
+ index_opts = cmdoptions.make_option_group(
+ cmdoptions.index_group,
+ self.parser,
+ )
self.parser.insert_option_group(0, index_opts)
self.parser.insert_option_group(0, cmd_opts)
@@ -173,22 +192,26 @@ class InstallCommand(Command):
This method is meant to be overridden by subclasses, not
called directly.
"""
- return PackageFinder(find_links=options.find_links,
- index_urls=index_urls,
- use_wheel=options.use_wheel,
- allow_external=options.allow_external,
- allow_unverified=options.allow_unverified,
- allow_all_external=options.allow_all_external,
- allow_all_prereleases=options.pre,
- process_dependency_links=
- options.process_dependency_links,
- session=session,
- )
+ return PackageFinder(
+ find_links=options.find_links,
+ index_urls=index_urls,
+ use_wheel=options.use_wheel,
+ allow_external=options.allow_external,
+ allow_unverified=options.allow_unverified,
+ allow_all_external=options.allow_all_external,
+ allow_all_prereleases=options.pre,
+ process_dependency_links=options.process_dependency_links,
+ session=session,
+ )
def run(self, options, args):
if options.no_install or options.no_download:
- logger.deprecated('1.7', "DEPRECATION: '--no-install' and '--no-download` are deprecated. See https://github.com/pypa/pip/issues/906.")
+ logger.deprecated(
+ '1.7',
+ "DEPRECATION: '--no-install' and '--no-download` are "
+ "deprecated. See https://github.com/pypa/pip/issues/906."
+ )
if options.download_dir:
options.no_install = True
@@ -198,7 +221,10 @@ class InstallCommand(Command):
install_options = options.install_options or []
if options.use_user_site:
if virtualenv_no_global():
- raise InstallationError("Can not perform a '--user' install. User site-packages are not visible in this virtualenv.")
+ raise InstallationError(
+ "Can not perform a '--user' install. User site-packages "
+ "are not visible in this virtualenv."
+ )
install_options.append('--user')
temp_target_dir = None
@@ -206,8 +232,12 @@ class InstallCommand(Command):
options.ignore_installed = True
temp_target_dir = tempfile.mkdtemp()
options.target_dir = os.path.abspath(options.target_dir)
- if os.path.exists(options.target_dir) and not os.path.isdir(options.target_dir):
- raise CommandError("Target path exists but is not a directory, will not continue.")
+ if (os.path.exists(options.target_dir)
+ and not os.path.isdir(options.target_dir)):
+ raise CommandError(
+ "Target path exists but is not a directory, will not "
+ "continue."
+ )
install_options.append('--home=' + temp_target_dir)
global_options = options.global_options or []
@@ -217,16 +247,20 @@ class InstallCommand(Command):
index_urls = []
if options.use_mirrors:
- logger.deprecated("1.7",
- "--use-mirrors has been deprecated and will be removed"
- " in the future. Explicit uses of --index-url and/or "
- "--extra-index-url is suggested.")
+ logger.deprecated(
+ "1.7",
+ "--use-mirrors has been deprecated and will be removed"
+ " in the future. Explicit uses of --index-url and/or "
+ "--extra-index-url is suggested."
+ )
if options.mirrors:
- logger.deprecated("1.7",
- "--mirrors has been deprecated and will be removed in "
- " the future. Explicit uses of --index-url and/or "
- "--extra-index-url is suggested.")
+ logger.deprecated(
+ "1.7",
+ "--mirrors has been deprecated and will be removed in "
+ " the future. Explicit uses of --index-url and/or "
+ "--extra-index-url is suggested."
+ )
index_urls += options.mirrors
session = self._build_session(options)
@@ -253,9 +287,14 @@ class InstallCommand(Command):
InstallRequirement.from_line(name, None))
for name in options.editables:
requirement_set.add_requirement(
- InstallRequirement.from_editable(name, default_vcs=options.default_vcs))
+ InstallRequirement.from_editable(
+ name,
+ default_vcs=options.default_vcs
+ )
+ )
for filename in options.requirements:
- for req in parse_requirements(filename, finder=finder, options=options, session=session):
+ for req in parse_requirements(
+ filename, finder=finder, options=options, session=session):
requirement_set.add_requirement(req)
if not requirement_set.has_requirements:
opts = {'name': self.name}
@@ -271,19 +310,28 @@ class InstallCommand(Command):
try:
if not options.no_download:
- requirement_set.prepare_files(finder, force_root_egg_info=self.bundle, bundle=self.bundle)
+ requirement_set.prepare_files(
+ finder,
+ force_root_egg_info=self.bundle,
+ bundle=self.bundle,
+ )
else:
requirement_set.locate_files()
if not options.no_install and not self.bundle:
- requirement_set.install(install_options, global_options, root=options.root_path)
+ requirement_set.install(
+ install_options,
+ global_options,
+ root=options.root_path,
+ )
installed = ' '.join([req.name for req in
requirement_set.successfully_installed])
if installed:
logger.notify('Successfully installed %s' % installed)
elif not self.bundle:
- downloaded = ' '.join([req.name for req in
- requirement_set.successfully_downloaded])
+ downloaded = ' '.join([
+ req.name for req in requirement_set.successfully_downloaded
+ ])
if downloaded:
logger.notify('Successfully downloaded %s' % downloaded)
elif self.bundle:
@@ -294,7 +342,8 @@ class InstallCommand(Command):
raise
finally:
# Clean up
- if (not options.no_clean) and ((not options.no_install) or options.download_dir):
+ if ((not options.no_clean)
+ and ((not options.no_install) or options.download_dir)):
requirement_set.cleanup_files(bundle=self.bundle)
if options.target_dir:
diff --git a/pip/commands/list.py b/pip/commands/list.py
index 2f59d8df5..15e2a3ac1 100644
--- a/pip/commands/list.py
+++ b/pip/commands/list.py
@@ -14,7 +14,8 @@ class ListCommand(Command):
%prog [options]"""
summary = 'List installed packages.'
- # distributions to skip (python itself is reported by pkg_resources.working_set)
+ # distributions to skip (python itself is reported by
+ # pkg_resources.working_set)
skip = ['python', 'wsgiref']
def __init__(self, *args, **kw):
@@ -41,13 +42,17 @@ class ListCommand(Command):
'-l', '--local',
action='store_true',
default=False,
- help='If in a virtualenv that has global access, do not list globally-installed packages.')
+ help=('If in a virtualenv that has global access, do not list '
+ 'globally-installed packages.'),
+ )
cmd_opts.add_option(
'--pre',
action='store_true',
default=False,
- help="Include pre-release and development versions. By default, pip only finds stable versions.")
+ help=("Include pre-release and development versions. By default, "
+ "pip only finds stable versions."),
+ )
index_opts = make_option_group(index_group, self.parser)
@@ -58,16 +63,16 @@ class ListCommand(Command):
"""
Create a package finder appropriate to this list command.
"""
- return PackageFinder(find_links=options.find_links,
- index_urls=index_urls,
- allow_external=options.allow_external,
- allow_unverified=options.allow_unverified,
- allow_all_external=options.allow_all_external,
- allow_all_prereleases=options.pre,
- process_dependency_links=
- options.process_dependency_links,
- session=session,
- )
+ return PackageFinder(
+ find_links=options.find_links,
+ index_urls=index_urls,
+ allow_external=options.allow_external,
+ allow_unverified=options.allow_unverified,
+ allow_all_external=options.allow_all_external,
+ allow_all_prereleases=options.pre,
+ process_dependency_links=options.process_dependency_links,
+ session=session,
+ )
def run(self, options, args):
if options.outdated:
@@ -80,10 +85,13 @@ class ListCommand(Command):
self.run_listing(options)
def run_outdated(self, options):
- for dist, remote_version_raw, remote_version_parsed in self.find_packages_latests_versions(options):
+ for dist, remote_version_raw, remote_version_parsed in \
+ self.find_packages_latests_versions(options):
if remote_version_parsed > dist.parsed_version:
- logger.notify('%s (Current: %s Latest: %s)' % (dist.project_name,
- dist.version, remote_version_raw))
+ logger.notify(
+ '%s (Current: %s Latest: %s)' %
+ (dist.project_name, dist.version, remote_version_raw)
+ )
def find_packages_latests_versions(self, options):
index_urls = [options.index_url] + options.extra_index_urls
@@ -92,20 +100,25 @@ class ListCommand(Command):
index_urls = []
if options.use_mirrors:
- logger.deprecated("1.7",
- "--use-mirrors has been deprecated and will be removed"
- " in the future. Explicit uses of --index-url and/or "
- "--extra-index-url is suggested.")
+ logger.deprecated(
+ "1.7",
+ "--use-mirrors has been deprecated and will be removed"
+ " in the future. Explicit uses of --index-url and/or "
+ "--extra-index-url is suggested."
+ )
if options.mirrors:
- logger.deprecated("1.7",
- "--mirrors has been deprecated and will be removed in "
- " the future. Explicit uses of --index-url and/or "
- "--extra-index-url is suggested.")
+ logger.deprecated(
+ "1.7",
+ "--mirrors has been deprecated and will be removed in "
+ " the future. Explicit uses of --index-url and/or "
+ "--extra-index-url is suggested."
+ )
index_urls += options.mirrors
dependency_links = []
- for dist in get_installed_distributions(local_only=options.local, skip=self.skip):
+ for dist in get_installed_distributions(
+ local_only=options.local, skip=self.skip):
if dist.has_metadata('dependency_links.txt'):
dependency_links.extend(
dist.get_metadata_lines('dependency_links.txt'),
@@ -116,7 +129,11 @@ class ListCommand(Command):
finder = self._build_package_finder(options, index_urls, session)
finder.add_dependency_links(dependency_links)
- installed_packages = get_installed_distributions(local_only=options.local, include_editables=False, skip=self.skip)
+ installed_packages = get_installed_distributions(
+ local_only=options.local,
+ include_editables=False,
+ skip=self.skip,
+ )
for dist in installed_packages:
req = InstallRequirement.from_line(dist.key, None)
try:
@@ -130,33 +147,49 @@ class ListCommand(Command):
except BestVersionAlreadyInstalled:
remote_version = req.installed_version
else:
- # It might be a good idea that link or finder had a public method
- # that returned version
- remote_version = finder._link_package_versions(link, req.name)[0]
+ # It might be a good idea that link or finder had a public
+ # method that returned version
+ remote_version = finder._link_package_versions(
+ link, req.name
+ )[0]
remote_version_raw = remote_version[2]
remote_version_parsed = remote_version[0]
yield dist, remote_version_raw, remote_version_parsed
def run_listing(self, options):
- installed_packages = get_installed_distributions(local_only=options.local, skip=self.skip)
+ installed_packages = get_installed_distributions(
+ local_only=options.local,
+ skip=self.skip,
+ )
self.output_package_listing(installed_packages)
def run_editables(self, options):
- installed_packages = get_installed_distributions(local_only=options.local, editables_only=True)
+ installed_packages = get_installed_distributions(
+ local_only=options.local,
+ editables_only=True,
+ )
self.output_package_listing(installed_packages)
def output_package_listing(self, installed_packages):
- installed_packages = sorted(installed_packages, key=lambda dist: dist.project_name.lower())
+ installed_packages = sorted(
+ installed_packages,
+ key=lambda dist: dist.project_name.lower(),
+ )
for dist in installed_packages:
if dist_is_editable(dist):
- line = '%s (%s, %s)' % (dist.project_name, dist.version, dist.location)
+ line = '%s (%s, %s)' % (
+ dist.project_name,
+ dist.version,
+ dist.location,
+ )
else:
line = '%s (%s)' % (dist.project_name, dist.version)
logger.notify(line)
def run_uptodate(self, options):
uptodate = []
- for dist, remote_version_raw, remote_version_parsed in self.find_packages_latests_versions(options):
+ for dist, remote_version_raw, remote_version_parsed in \
+ self.find_packages_latests_versions(options):
if dist.parsed_version == remote_version_parsed:
uptodate.append(dist)
self.output_package_listing(uptodate)
diff --git a/pip/commands/search.py b/pip/commands/search.py
index 94c4d12ea..2aec52760 100644
--- a/pip/commands/search.py
+++ b/pip/commands/search.py
@@ -1,8 +1,6 @@
import sys
import textwrap
-import pip.download
-
from pip.basecommand import Command, SUCCESS
from pip.util import get_terminal_size
from pip.log import logger
@@ -71,7 +69,12 @@ def transform_hits(hits):
score = 0
if name not in packages.keys():
- packages[name] = {'name': name, 'summary': summary, 'versions': [version], 'score': score}
+ packages[name] = {
+ 'name': name,
+ 'summary': summary,
+ 'versions': [version],
+ 'score': score,
+ }
else:
packages[name]['versions'].append(version)
@@ -80,8 +83,13 @@ def transform_hits(hits):
packages[name]['summary'] = summary
packages[name]['score'] = score
- # each record has a unique name now, so we will convert the dict into a list sorted by score
- package_list = sorted(packages.values(), key=lambda x: x['score'], reverse=True)
+ # each record has a unique name now, so we will convert the dict into a
+ # list sorted by score
+ package_list = sorted(
+ packages.values(),
+ key=lambda x: x['score'],
+ reverse=True,
+ )
return package_list
@@ -92,7 +100,10 @@ def print_results(hits, name_column_width=25, terminal_width=None):
summary = hit['summary'] or ''
if terminal_width is not None:
# wrap and indent summary to fit terminal
- summary = textwrap.wrap(summary, terminal_width - name_column_width - 5)
+ summary = textwrap.wrap(
+ summary,
+ terminal_width - name_column_width - 5,
+ )
summary = ('\n' + ' ' * (name_column_width + 3)).join(summary)
line = '%s - %s' % (name.ljust(name_column_width), summary)
try:
@@ -129,4 +140,7 @@ def compare_versions(version1, version2):
def highest_version(versions):
- return reduce((lambda v1, v2: compare_versions(v1, v2) == 1 and v1 or v2), versions)
+ return reduce(
+ (lambda v1, v2: compare_versions(v1, v2) == 1 and v1 or v2),
+ versions,
+ )
diff --git a/pip/commands/show.py b/pip/commands/show.py
index 02b473a97..ff89e2570 100644
--- a/pip/commands/show.py
+++ b/pip/commands/show.py
@@ -53,9 +53,10 @@ def search_packages_info(query):
'requires': [dep.project_name for dep in dist.requires()],
}
filelist = os.path.join(
- dist.location,
- dist.egg_name() + '.egg-info',
- 'installed-files.txt')
+ dist.location,
+ dist.egg_name() + '.egg-info',
+ 'installed-files.txt',
+ )
if os.path.isfile(filelist):
package['files'] = filelist
yield package
diff --git a/pip/commands/uninstall.py b/pip/commands/uninstall.py
index b7099cf8c..3d81a307e 100644
--- a/pip/commands/uninstall.py
+++ b/pip/commands/uninstall.py
@@ -27,8 +27,9 @@ class UninstallCommand(Command):
action='append',
default=[],
metavar='file',
- help='Uninstall all the packages listed in the given requirements file. '
- 'This option can be used multiple times.')
+ help='Uninstall all the packages listed in the given requirements '
+ 'file. This option can be used multiple times.',
+ )
self.cmd_opts.add_option(
'-y', '--yes',
dest='yes',
@@ -50,10 +51,14 @@ class UninstallCommand(Command):
requirement_set.add_requirement(
InstallRequirement.from_line(name))
for filename in options.requirements:
- for req in parse_requirements(filename,
- options=options, session=session):
+ for req in parse_requirements(
+ filename,
+ options=options,
+ session=session):
requirement_set.add_requirement(req)
if not requirement_set.has_requirements:
- raise InstallationError('You must give at least one requirement '
- 'to %(name)s (see "pip help %(name)s")' % dict(name=self.name))
+ raise InstallationError(
+ 'You must give at least one requirement to %(name)s (see "pip '
+ 'help %(name)s")' % dict(name=self.name)
+ )
requirement_set.uninstall(auto_confirm=options.yes)
diff --git a/pip/commands/wheel.py b/pip/commands/wheel.py
index 110fff417..edf3ef7ad 100644
--- a/pip/commands/wheel.py
+++ b/pip/commands/wheel.py
@@ -2,7 +2,7 @@
from __future__ import absolute_import
import os
-import sys
+
from pip.basecommand import Command
from pip.index import PackageFinder
from pip.log import logger
@@ -14,16 +14,19 @@ from pip import cmdoptions
DEFAULT_WHEEL_DIR = os.path.join(normalize_path(os.curdir), 'wheelhouse')
+
class WheelCommand(Command):
"""
Build Wheel archives for your requirements and dependencies.
- Wheel is a built-package format, and offers the advantage of not recompiling your software during every install.
- For more details, see the wheel docs: http://wheel.readthedocs.org/en/latest.
+ Wheel is a built-package format, and offers the advantage of not
+ recompiling your software during every install. For more details, see the
+ wheel docs: http://wheel.readthedocs.org/en/latest.
Requirements: setuptools>=0.8, and wheel.
- 'pip wheel' uses the bdist_wheel setuptools extension from the wheel package to build individual wheels.
+ 'pip wheel' uses the bdist_wheel setuptools extension from the wheel
+ package to build individual wheels.
"""
@@ -47,7 +50,9 @@ class WheelCommand(Command):
dest='wheel_dir',
metavar='dir',
default=DEFAULT_WHEEL_DIR,
- help="Build wheels into , where the default is '/wheelhouse'.")
+ help=("Build wheels into , where the default is "
+ "'/wheelhouse'."),
+ )
cmd_opts.add_option(cmdoptions.use_wheel.make())
cmd_opts.add_option(cmdoptions.no_use_wheel.make())
cmd_opts.add_option(
@@ -73,11 +78,16 @@ class WheelCommand(Command):
'--pre',
action='store_true',
default=False,
- help="Include pre-release and development versions. By default, pip only finds stable versions.")
+ help=("Include pre-release and development versions. By default, "
+ "pip only finds stable versions."),
+ )
cmd_opts.add_option(cmdoptions.no_clean.make())
- index_opts = cmdoptions.make_option_group(cmdoptions.index_group, self.parser)
+ index_opts = cmdoptions.make_option_group(
+ cmdoptions.index_group,
+ self.parser,
+ )
self.parser.insert_option_group(0, index_opts)
self.parser.insert_option_group(0, cmd_opts)
@@ -87,8 +97,13 @@ class WheelCommand(Command):
# confirm requirements
try:
import wheel.bdist_wheel
+ # Hack to make flake8 not complain about an unused import
+ wheel.bdist_wheel
except ImportError:
- raise CommandError("'pip wheel' requires the 'wheel' package. To fix this, run: pip install wheel")
+ raise CommandError(
+ "'pip wheel' requires the 'wheel' package. To fix this, run: "
+ "pip install wheel"
+ )
try:
import pkg_resources
@@ -111,31 +126,35 @@ class WheelCommand(Command):
index_urls = []
if options.use_mirrors:
- logger.deprecated("1.7",
- "--use-mirrors has been deprecated and will be removed"
- " in the future. Explicit uses of --index-url and/or "
- "--extra-index-url is suggested.")
+ logger.deprecated(
+ "1.7",
+ "--use-mirrors has been deprecated and will be removed"
+ " in the future. Explicit uses of --index-url and/or "
+ "--extra-index-url is suggested."
+ )
if options.mirrors:
- logger.deprecated("1.7",
- "--mirrors has been deprecated and will be removed in "
- " the future. Explicit uses of --index-url and/or "
- "--extra-index-url is suggested.")
+ logger.deprecated(
+ "1.7",
+ "--mirrors has been deprecated and will be removed in "
+ " the future. Explicit uses of --index-url and/or "
+ "--extra-index-url is suggested."
+ )
index_urls += options.mirrors
session = self._build_session(options)
- finder = PackageFinder(find_links=options.find_links,
- index_urls=index_urls,
- use_wheel=options.use_wheel,
- allow_external=options.allow_external,
- allow_unverified=options.allow_unverified,
- allow_all_external=options.allow_all_external,
- allow_all_prereleases=options.pre,
- process_dependency_links=
- options.process_dependency_links,
- session=session,
- )
+ finder = PackageFinder(
+ find_links=options.find_links,
+ index_urls=index_urls,
+ use_wheel=options.use_wheel,
+ allow_external=options.allow_external,
+ allow_unverified=options.allow_unverified,
+ allow_all_external=options.allow_all_external,
+ allow_all_prereleases=options.pre,
+ process_dependency_links=options.process_dependency_links,
+ session=session,
+ )
options.build_dir = os.path.abspath(options.build_dir)
requirement_set = RequirementSet(
@@ -157,8 +176,13 @@ class WheelCommand(Command):
InstallRequirement.from_line(name, None))
for filename in options.requirements:
- for req in parse_requirements(filename, finder=finder, options=options, session=session):
- if req.editable or (req.name is None and req.url.endswith(".whl")):
+ for req in parse_requirements(
+ filename,
+ finder=finder,
+ options=options,
+ session=session):
+ if (req.editable
+ or (req.name is None and req.url.endswith(".whl"))):
logger.notify("ignoring %s" % req.url)
continue
requirement_set.add_requirement(req)
@@ -177,9 +201,9 @@ class WheelCommand(Command):
requirement_set,
finder,
options.wheel_dir,
- build_options = options.build_options or [],
- global_options = options.global_options or []
- )
+ build_options=options.build_options or [],
+ global_options=options.global_options or [],
+ )
wb.build()
except PreviousBuildDirError:
options.no_clean = True
diff --git a/pip/commands/zip.py b/pip/commands/zip.py
index c8013594d..11134044a 100644
--- a/pip/commands/zip.py
+++ b/pip/commands/zip.py
@@ -36,7 +36,10 @@ class ZipCommand(Command):
'--no-pyc',
action='store_true',
dest='no_pyc',
- help='Do not include .pyc files in zip files (useful on Google App Engine).')
+ help=(
+ 'Do not include .pyc files in zip files (useful on Google App '
+ 'Engine).'),
+ )
self.cmd_opts.add_option(
'-l', '--list',
action='store_true',
@@ -46,12 +49,16 @@ class ZipCommand(Command):
'--sort-files',
action='store_true',
dest='sort_files',
- help='With --list, sort packages according to how many files they contain.')
+ help=('With --list, sort packages according to how many files they'
+ ' contain.'),
+ )
self.cmd_opts.add_option(
'--path',
action='append',
dest='paths',
- help='Restrict operations to the given paths (may include wildcards).')
+ help=('Restrict operations to the given paths (may include '
+ 'wildcards).'),
+ )
self.cmd_opts.add_option(
'-n', '--simulate',
action='store_true',
@@ -91,7 +98,11 @@ class ZipCommand(Command):
def run(self, options, args):
- logger.deprecated('1.7', "DEPRECATION: 'pip zip' and 'pip unzip` are deprecated, and will be removed in a future release.")
+ logger.deprecated(
+ '1.7',
+ "DEPRECATION: 'pip zip' and 'pip unzip` are deprecated, and will "
+ "be removed in a future release."
+ )
self.select_paths = options.paths
self.simulate = options.simulate
@@ -105,24 +116,29 @@ class ZipCommand(Command):
module_name, filename = self.find_package(arg)
if options.unzip and os.path.isdir(filename):
raise InstallationError(
- 'The module %s (in %s) is not a zip file; cannot be unzipped'
- % (module_name, filename))
+ 'The module %s (in %s) is not a zip file; cannot be '
+ 'unzipped' % (module_name, filename)
+ )
elif not options.unzip and not os.path.isdir(filename):
raise InstallationError(
- 'The module %s (in %s) is not a directory; cannot be zipped'
- % (module_name, filename))
+ 'The module %s (in %s) is not a directory; cannot be '
+ 'zipped' % (module_name, filename)
+ )
packages.append((module_name, filename))
last_status = None
for module_name, filename in packages:
if options.unzip:
last_status = self.unzip_package(module_name, filename)
else:
- last_status = self.zip_package(module_name, filename, options.no_pyc)
+ last_status = self.zip_package(
+ module_name, filename, options.no_pyc
+ )
return last_status
def unzip_package(self, module_name, filename):
zip_filename = os.path.dirname(filename)
- if not os.path.isfile(zip_filename) and zipfile.is_zipfile(zip_filename):
+ if (not os.path.isfile(zip_filename)
+ and zipfile.is_zipfile(zip_filename)):
raise InstallationError(
'Module %s (in %s) isn\'t located in a zip file in %s'
% (module_name, filename, zip_filename))
@@ -132,9 +148,13 @@ class ZipCommand(Command):
'Unpacking %s into %s, but %s is not on sys.path'
% (display_path(zip_filename), display_path(package_path),
display_path(package_path)))
- logger.notify('Unzipping %s (in %s)' % (module_name, display_path(zip_filename)))
+ logger.notify(
+ 'Unzipping %s (in %s)' % (module_name, display_path(zip_filename))
+ )
if self.simulate:
- logger.notify('Skipping remaining operations because of --simulate')
+ logger.notify(
+ 'Skipping remaining operations because of --simulate'
+ )
return
logger.indent += 2
try:
@@ -159,11 +179,17 @@ class ZipCommand(Command):
to_save.append((name, zip.read(name)))
zip.close()
if not to_save:
- logger.info('Removing now-empty zip file %s' % display_path(zip_filename))
+ logger.info(
+ 'Removing now-empty zip file %s' %
+ display_path(zip_filename)
+ )
os.unlink(zip_filename)
self.remove_filename_from_pth(zip_filename)
else:
- logger.info('Removing entries in %s/ from zip file %s' % (module_name, display_path(zip_filename)))
+ logger.info(
+ 'Removing entries in %s/ from zip file %s' %
+ (module_name, display_path(zip_filename))
+ )
zip = zipfile.ZipFile(zip_filename, 'w')
for name, content in to_save:
zip.writestr(name, content)
@@ -183,11 +209,15 @@ class ZipCommand(Command):
## FIXME: I think this needs to be undoable:
if filename == dest_filename:
filename = backup_dir(orig_filename)
- logger.notify('Moving %s aside to %s' % (orig_filename, filename))
+ logger.notify(
+ 'Moving %s aside to %s' % (orig_filename, filename)
+ )
if not self.simulate:
shutil.move(orig_filename, filename)
try:
- logger.info('Creating zip file in %s' % display_path(dest_filename))
+ logger.info(
+ 'Creating zip file in %s' % display_path(dest_filename)
+ )
if not self.simulate:
zip = zipfile.ZipFile(dest_filename, 'w')
zip.writestr(module_name + '/', '')
@@ -195,16 +225,25 @@ class ZipCommand(Command):
if no_pyc:
filenames = [f for f in filenames
if not f.lower().endswith('.pyc')]
- for fns, is_dir in [(dirnames, True), (filenames, False)]:
+ for fns, is_dir in [
+ (dirnames, True), (filenames, False)]:
for fn in fns:
full = os.path.join(dirpath, fn)
- dest = os.path.join(module_name, dirpath[len(filename):].lstrip(os.path.sep), fn)
+ dest = os.path.join(
+ module_name,
+ dirpath[len(filename):].lstrip(
+ os.path.sep
+ ),
+ fn,
+ )
if is_dir:
zip.writestr(dest + '/', '')
else:
zip.write(full, dest)
zip.close()
- logger.info('Removing old directory %s' % display_path(filename))
+ logger.info(
+ 'Removing old directory %s' % display_path(filename)
+ )
if not self.simulate:
rmtree(filename)
except:
@@ -226,7 +265,9 @@ class ZipCommand(Command):
logger.info('Removing reference to %s from .pth file %s'
% (display_path(filename), display_path(pth)))
if not [line for line in new_lines if line]:
- logger.info('%s file would be empty: deleting' % display_path(pth))
+ logger.info(
+ '%s file would be empty: deleting' % display_path(pth)
+ )
if not self.simulate:
os.unlink(pth)
else:
@@ -235,13 +276,19 @@ class ZipCommand(Command):
f.writelines(new_lines)
f.close()
return
- logger.warn('Cannot find a reference to %s in any .pth file' % display_path(filename))
+ logger.warn(
+ 'Cannot find a reference to %s in any .pth file' %
+ display_path(filename)
+ )
def add_filename_to_pth(self, filename):
path = os.path.dirname(filename)
dest = filename + '.pth'
if path not in self.paths():
- logger.warn('Adding .pth file %s, but it is not on sys.path' % display_path(dest))
+ logger.warn(
+ 'Adding .pth file %s, but it is not on sys.path' %
+ display_path(dest)
+ )
if not self.simulate:
if os.path.exists(dest):
f = open(dest)
@@ -295,8 +342,9 @@ class ZipCommand(Command):
if os.path.dirname(path) not in self.paths():
logger.notify('Zipped egg: %s' % display_path(path))
continue
- if (basename != 'site-packages' and basename != 'dist-packages'
- and not path.replace('\\', '/').endswith('lib/python')):
+ if (basename != 'site-packages'
+ and basename != 'dist-packages'
+ and not path.replace('\\', '/').endswith('lib/python')):
continue
logger.notify('In %s:' % display_path(path))
logger.indent += 2
@@ -308,7 +356,10 @@ class ZipCommand(Command):
if ext in ('.pth', '.egg-info', '.egg-link'):
continue
if ext == '.py':
- logger.info('Not displaying %s: not a package' % display_path(filename))
+ logger.info(
+ 'Not displaying %s: not a package' %
+ display_path(filename)
+ )
continue
full = os.path.join(path, filename)
if os.path.isdir(full):
@@ -316,7 +367,9 @@ class ZipCommand(Command):
elif zipfile.is_zipfile(full):
zipped.append(filename)
else:
- logger.info('Unknown file: %s' % display_path(filename))
+ logger.info(
+ 'Unknown file: %s' % display_path(filename)
+ )
if zipped:
logger.notify('Zipped packages:')
logger.indent += 2
diff --git a/pip/download.py b/pip/download.py
index 8802ccbe3..a7fd71c73 100644
--- a/pip/download.py
+++ b/pip/download.py
@@ -181,8 +181,10 @@ class LocalFSAdapter(BaseAdapter):
# We only work for requests with a host of localhost
if parsed_url.netloc.lower() != "localhost":
- raise InvalidURL("Invalid URL %r: Only localhost is allowed" %
- request.url)
+ raise InvalidURL(
+ "Invalid URL %r: Only localhost is allowed" %
+ request.url
+ )
real_url = urlparse.urlunparse(parsed_url[:1] + ("",) + parsed_url[2:])
pathname = url_to_path(real_url)
@@ -247,7 +249,7 @@ def get_file_content(url, comes_from=None, session=None):
if match:
scheme = match.group(1).lower()
if (scheme == 'file' and comes_from
- and comes_from.startswith('http')):
+ and comes_from.startswith('http')):
raise InstallationError(
'Requirements file %s references URL %s, which is local'
% (comes_from, url))
@@ -274,7 +276,9 @@ def get_file_content(url, comes_from=None, session=None):
f = open(url)
content = f.read()
except IOError as exc:
- raise InstallationError('Could not open requirements file: %s' % str(exc))
+ raise InstallationError(
+ 'Could not open requirements file: %s' % str(exc)
+ )
else:
f.close()
return url, content
@@ -327,8 +331,9 @@ def path_to_url(path):
def is_archive_file(name):
"""Return True if `name` is a considered as an archive file."""
- archives = ('.zip', '.tar.gz', '.tar.bz2', '.tgz', '.tar', '.pybundle',
- '.whl')
+ archives = (
+ '.zip', '.tar.gz', '.tar.bz2', '.tgz', '.tar', '.pybundle', '.whl'
+ )
ext = splitext(name)[1].lower()
if ext in archives:
return True
@@ -372,20 +377,29 @@ def is_file_url(link):
def _check_hash(download_hash, link):
if download_hash.digest_size != hashlib.new(link.hash_name).digest_size:
- logger.fatal("Hash digest size of the package %d (%s) doesn't match the expected hash name %s!"
- % (download_hash.digest_size, link, link.hash_name))
+ logger.fatal(
+ "Hash digest size of the package %d (%s) doesn't match the "
+ "expected hash name %s!" %
+ (download_hash.digest_size, link, link.hash_name)
+ )
raise HashMismatch('Hash name mismatch for package %s' % link)
if download_hash.hexdigest() != link.hash:
- logger.fatal("Hash of the package %s (%s) doesn't match the expected hash %s!"
- % (link, download_hash.hexdigest(), link.hash))
- raise HashMismatch('Bad %s hash for package %s' % (link.hash_name, link))
+ logger.fatal(
+ "Hash of the package %s (%s) doesn't match the expected hash %s!" %
+ (link, download_hash.hexdigest(), link.hash)
+ )
+ raise HashMismatch(
+ 'Bad %s hash for package %s' % (link.hash_name, link)
+ )
def _get_hash_from_file(target_file, link):
try:
download_hash = hashlib.new(link.hash_name)
except (ValueError, TypeError):
- logger.warn("Unsupported hash name %s for package %s" % (link.hash_name, link))
+ logger.warn(
+ "Unsupported hash name %s for package %s" % (link.hash_name, link)
+ )
return None
fp = open(target_file, 'rb')
@@ -405,7 +419,10 @@ def _download_url(resp, link, temp_location):
try:
download_hash = hashlib.new(link.hash_name)
except ValueError:
- logger.warn("Unsupported hash name %s for package %s" % (link.hash_name, link))
+ logger.warn(
+ "Unsupported hash name %s for package %s" %
+ (link.hash_name, link)
+ )
try:
total_length = int(resp.headers['content-length'])
except (ValueError, KeyError, TypeError):
@@ -417,9 +434,14 @@ def _download_url(resp, link, temp_location):
if show_progress:
## FIXME: the URL can get really long in this message:
if total_length:
- logger.start_progress('Downloading %s (%s): ' % (show_url, format_size(total_length)))
+ logger.start_progress(
+ 'Downloading %s (%s): ' %
+ (show_url, format_size(total_length))
+ )
else:
- logger.start_progress('Downloading %s (unknown size): ' % show_url)
+ logger.start_progress(
+ 'Downloading %s (unknown size): ' % show_url
+ )
else:
logger.notify('Downloading %s' % show_url)
logger.info('Downloading from URL %s' % link)
@@ -447,7 +469,13 @@ def _download_url(resp, link, temp_location):
if not total_length:
logger.show_progress('%s' % format_size(downloaded))
else:
- logger.show_progress('%3i%% %s' % (100 * downloaded / total_length, format_size(downloaded)))
+ logger.show_progress(
+ '%3i%% %s' %
+ (
+ 100 * downloaded / total_length,
+ format_size(downloaded)
+ )
+ )
if download_hash is not None:
download_hash.update(chunk)
fp.write(chunk)
@@ -472,8 +500,10 @@ def _copy_file(filename, location, content_type, link):
os.remove(download_location)
elif response == 'b':
dest_file = backup_dir(download_location)
- logger.warn('Backing up %s to %s'
- % (display_path(download_location), display_path(dest_file)))
+ logger.warn(
+ 'Backing up %s to %s' %
+ (display_path(download_location), display_path(dest_file))
+ )
shutil.move(download_location, dest_file)
if copy:
shutil.copy(filename, download_location)
@@ -495,8 +525,10 @@ def unpack_http_url(link, location, download_cache, download_dir=None,
cache_content_type_file = None
download_hash = None
if download_cache:
- cache_file = os.path.join(download_cache,
- urllib.quote(target_url, ''))
+ cache_file = os.path.join(
+ download_cache,
+ urllib.quote(target_url, '')
+ )
cache_content_type_file = cache_file + '.content-type'
already_cached = (
os.path.exists(cache_file) and
@@ -528,7 +560,8 @@ def unpack_http_url(link, location, download_cache, download_dir=None,
os.unlink(already_downloaded)
already_downloaded = None
- # We have a cached file, and we haven't already found a good downloaded copy
+ # We have a cached file, and we haven't already found a good downloaded
+ # copy
if already_cached and not temp_location:
with open(cache_content_type_file) as fp:
content_type = fp.read().strip()
diff --git a/pip/index.py b/pip/index.py
index 46916c19c..e42b4a7c4 100644
--- a/pip/index.py
+++ b/pip/index.py
@@ -8,8 +8,10 @@ import posixpath
from pip.log import logger
from pip.util import Inf, normalize_name, splitext, is_prerelease
-from pip.exceptions import (DistributionNotFound, BestVersionAlreadyInstalled,
- InstallationError, InvalidWheelFilename, UnsupportedWheel)
+from pip.exceptions import (
+ DistributionNotFound, BestVersionAlreadyInstalled, InvalidWheelFilename,
+ UnsupportedWheel,
+)
from pip.backwardcompat import urlparse, url2pathname
from pip.download import PipSession, url_to_path, path_to_url
from pip.wheel import Wheel, wheel_ext
@@ -36,9 +38,9 @@ class PackageFinder(object):
"""
def __init__(self, find_links, index_urls,
- use_wheel=True, allow_external=[], allow_unverified=[],
- allow_all_external=False, allow_all_prereleases=False,
- process_dependency_links=False, session=None):
+ use_wheel=True, allow_external=[], allow_unverified=[],
+ allow_all_external=False, allow_all_prereleases=False,
+ process_dependency_links=False, session=None):
self.find_links = find_links
self.index_urls = index_urls
self.dependency_links = []
@@ -154,11 +156,14 @@ class PackageFinder(object):
if link == INSTALLED_VERSION:
pri = 1
elif link.ext == wheel_ext:
- wheel = Wheel(link.filename) # can raise InvalidWheelFilename
+ wheel = Wheel(link.filename) # can raise InvalidWheelFilename
if not wheel.supported():
- raise UnsupportedWheel("%s is not a supported wheel for this platform. It can't be sorted." % wheel.filename)
+ raise UnsupportedWheel(
+ "%s is not a supported wheel for this platform. It "
+ "can't be sorted." % wheel.filename
+ )
pri = -(wheel.support_index_min())
- else: # sdist
+ else: # sdist
pri = -(support_num)
return (parsed_version, pri)
else:
@@ -166,11 +171,15 @@ class PackageFinder(object):
def _sort_versions(self, applicable_versions):
"""
- Bring the latest version (and wheels) to the front, but maintain the existing ordering as secondary.
- See the docstring for `_link_sort_key` for details.
- This function is isolated for easier unit testing.
+ Bring the latest version (and wheels) to the front, but maintain the
+ existing ordering as secondary. See the docstring for `_link_sort_key`
+ for details. This function is isolated for easier unit testing.
"""
- return sorted(applicable_versions, key=self._link_sort_key, reverse=True)
+ return sorted(
+ applicable_versions,
+ key=self._link_sort_key,
+ reverse=True
+ )
def find_requirement(self, req, upgrade):
@@ -179,7 +188,8 @@ class PackageFinder(object):
# For maximum compatibility with easy_install, ensure the path
# ends in a trailing slash. Although this isn't in the spec
# (and PyPI can handle it without the slash) some other index
- # implementations might break if they relied on easy_install's behavior.
+ # implementations might break if they relied on easy_install's
+ # behavior.
if not loc.endswith('/'):
loc = loc + '/'
return loc
@@ -189,11 +199,18 @@ class PackageFinder(object):
main_index_url = None
if self.index_urls:
# Check that we have the url_name correctly spelled:
- main_index_url = Link(mkurl_pypi_url(self.index_urls[0]), trusted=True)
- # This will also cache the page, so it's okay that we get it again later:
+ main_index_url = Link(
+ mkurl_pypi_url(self.index_urls[0]),
+ trusted=True,
+ )
+ # This will also cache the page, so it's okay that we get it again
+ # later:
page = self._get_page(main_index_url, req)
if page is None:
- url_name = self._find_url_name(Link(self.index_urls[0], trusted=True), url_name, req) or req.url_name
+ url_name = self._find_url_name(
+ Link(self.index_urls[0], trusted=True),
+ url_name, req
+ ) or req.url_name
if url_name is not None:
locations = [
@@ -233,8 +250,12 @@ class PackageFinder(object):
"Consider using %s if %s has it available" %
ctx)
elif len(secure_schemes) > 1:
- ctx = (location, parsed.scheme, ", ".join(secure_schemes),
- parsed.netloc)
+ ctx = (
+ location,
+ parsed.scheme,
+ ", ".join(secure_schemes),
+ parsed.netloc,
+ )
logger.warn("%s uses an insecure transport scheme (%s). "
"Consider using one of %s if %s has any of "
"them available" % ctx)
@@ -247,23 +268,43 @@ class PackageFinder(object):
found_versions.extend(
self._package_versions(
# We trust every directly linked archive in find_links
- [Link(url, '-f', trusted=True) for url in self.find_links], req.name.lower()))
+ [Link(url, '-f', trusted=True) for url in self.find_links],
+ req.name.lower()
+ )
+ )
page_versions = []
for page in self._get_pages(locations, req):
logger.debug('Analyzing links from page %s' % page.url)
logger.indent += 2
try:
- page_versions.extend(self._package_versions(page.links, req.name.lower()))
+ page_versions.extend(
+ self._package_versions(page.links, req.name.lower())
+ )
finally:
logger.indent -= 2
dependency_versions = list(self._package_versions(
[Link(url) for url in self.dependency_links], req.name.lower()))
if dependency_versions:
- logger.info('dependency_links found: %s' % ', '.join([link.url for parsed, link, version in dependency_versions]))
- file_versions = list(self._package_versions(
- [Link(url) for url in file_locations], req.name.lower()))
- if not found_versions and not page_versions and not dependency_versions and not file_versions:
- logger.fatal('Could not find any downloads that satisfy the requirement %s' % req)
+ logger.info(
+ 'dependency_links found: %s' %
+ ', '.join([
+ link.url for parsed, link, version in dependency_versions
+ ])
+ )
+ file_versions = list(
+ self._package_versions(
+ [Link(url) for url in file_locations],
+ req.name.lower()
+ )
+ )
+ if (not found_versions
+ and not page_versions
+ and not dependency_versions
+ and not file_versions):
+ logger.fatal(
+ 'Could not find any downloads that satisfy the requirement'
+ ' %s' % req
+ )
if self.need_warn_external:
logger.warn("Some externally hosted files were ignored (use "
@@ -274,41 +315,82 @@ class PackageFinder(object):
" (use --allow-unverified %s to allow)." %
req.name)
- raise DistributionNotFound('No distributions at all found for %s' % req)
+ raise DistributionNotFound(
+ 'No distributions at all found for %s' % req
+ )
installed_version = []
if req.satisfied_by is not None:
- installed_version = [(req.satisfied_by.parsed_version, INSTALLED_VERSION, req.satisfied_by.version)]
+ installed_version = [(
+ req.satisfied_by.parsed_version,
+ INSTALLED_VERSION,
+ req.satisfied_by.version,
+ )]
if file_versions:
file_versions.sort(reverse=True)
- logger.info('Local files found: %s' % ', '.join([url_to_path(link.url) for parsed, link, version in file_versions]))
+ logger.info(
+ 'Local files found: %s' %
+ ', '.join([
+ url_to_path(link.url)
+ for parsed, link, version in file_versions
+ ])
+ )
#this is an intentional priority ordering
- all_versions = installed_version + file_versions + found_versions + page_versions + dependency_versions
+ all_versions = installed_version + file_versions + found_versions \
+ + page_versions + dependency_versions
applicable_versions = []
for (parsed_version, link, version) in all_versions:
if version not in req.req:
- logger.info("Ignoring link %s, version %s doesn't match %s"
- % (link, version, ','.join([''.join(s) for s in req.req.specs])))
+ logger.info(
+ "Ignoring link %s, version %s doesn't match %s" %
+ (
+ link,
+ version,
+ ','.join([''.join(s) for s in req.req.specs])
+ )
+ )
continue
- elif is_prerelease(version) and not (self.allow_all_prereleases or req.prereleases):
+ elif (is_prerelease(version)
+ and not (self.allow_all_prereleases or req.prereleases)):
# If this version isn't the already installed one, then
# ignore it if it's a pre-release.
if link is not INSTALLED_VERSION:
- logger.info("Ignoring link %s, version %s is a pre-release (use --pre to allow)." % (link, version))
+ logger.info(
+ "Ignoring link %s, version %s is a pre-release (use "
+ "--pre to allow)." % (link, version)
+ )
continue
applicable_versions.append((parsed_version, link, version))
applicable_versions = self._sort_versions(applicable_versions)
- existing_applicable = bool([link for parsed_version, link, version in applicable_versions if link is INSTALLED_VERSION])
+ existing_applicable = bool([
+ link
+ for parsed_version, link, version in applicable_versions
+ if link is INSTALLED_VERSION
+ ])
if not upgrade and existing_applicable:
if applicable_versions[0][1] is INSTALLED_VERSION:
- logger.info('Existing installed version (%s) is most up-to-date and satisfies requirement'
- % req.satisfied_by.version)
+ logger.info(
+ 'Existing installed version (%s) is most up-to-date and '
+ 'satisfies requirement' % req.satisfied_by.version
+ )
else:
- logger.info('Existing installed version (%s) satisfies requirement (most up-to-date version is %s)'
- % (req.satisfied_by.version, applicable_versions[0][2]))
+ logger.info(
+ 'Existing installed version (%s) satisfies requirement '
+ '(most up-to-date version is %s)' %
+ (req.satisfied_by.version, applicable_versions[0][2])
+ )
return None
if not applicable_versions:
- logger.fatal('Could not find a version that satisfies the requirement %s (from versions: %s)'
- % (req, ', '.join([version for parsed_version, link, version in all_versions])))
+ logger.fatal(
+ 'Could not find a version that satisfies the requirement %s '
+ '(from versions: %s)' %
+ (
+ req,
+ ', '.join([
+ version
+ for parsed_version, link, version in all_versions
+ ])
+ )
+ )
if self.need_warn_external:
logger.warn("Some externally hosted files were ignored (use "
@@ -319,15 +401,31 @@ class PackageFinder(object):
" (use --allow-unverified %s to allow)." %
req.name)
- raise DistributionNotFound('No distributions matching the version for %s' % req)
+ raise DistributionNotFound(
+ 'No distributions matching the version for %s' % req
+ )
if applicable_versions[0][1] is INSTALLED_VERSION:
# We have an existing version, and its the best version
- logger.info('Installed version (%s) is most up-to-date (past versions: %s)'
- % (req.satisfied_by.version, ', '.join([version for parsed_version, link, version in applicable_versions[1:]]) or 'none'))
+ logger.info(
+ 'Installed version (%s) is most up-to-date (past versions: '
+ '%s)' % (
+ req.satisfied_by.version,
+ ', '.join([
+ version for parsed_version, link, version
+ in applicable_versions[1:]
+ ]) or 'none'))
raise BestVersionAlreadyInstalled
if len(applicable_versions) > 1:
- logger.info('Using version %s (newest of versions: %s)' %
- (applicable_versions[0][2], ', '.join([version for parsed_version, link, version in applicable_versions])))
+ logger.info(
+ 'Using version %s (newest of versions: %s)' %
+ (
+ applicable_versions[0][2],
+ ', '.join([
+ version for parsed_version, link, version
+ in applicable_versions
+ ])
+ )
+ )
selected_version = applicable_versions[0][1]
@@ -350,10 +448,12 @@ class PackageFinder(object):
return selected_version
-
def _find_url_name(self, index_url, url_name, req):
- """Finds the true URL name of a package, when the given name isn't quite correct.
- This is usually used to implement case-insensitivity."""
+ """
+ Finds the true URL name of a package, when the given name isn't quite
+ correct.
+ This is usually used to implement case-insensitivity.
+ """
if not index_url.url.endswith('/'):
# Vaguely part of the PyPI API... weird but true.
## FIXME: bad to modify this?
@@ -366,7 +466,9 @@ class PackageFinder(object):
for link in page.links:
base = posixpath.basename(link.path.rstrip('/'))
if norm_name == normalize_name(base):
- logger.notify('Real name of requirement %s is %s' % (url_name, base))
+ logger.notify(
+ 'Real name of requirement %s is %s' % (url_name, base)
+ )
return base
return None
@@ -403,9 +505,11 @@ class PackageFinder(object):
if (link.trusted is not None
and not link.trusted
and not normalized in self.allow_unverified):
- logger.debug("Not searching %s for urls, it is an "
- "untrusted link and cannot produce safe or "
- "verifiable files." % link)
+ logger.debug(
+ "Not searching %s for urls, it is an "
+ "untrusted link and cannot produce safe or "
+ "verifiable files." % link
+ )
self.need_warn_unverified = True
continue
@@ -416,7 +520,10 @@ class PackageFinder(object):
_py_version_re = re.compile(r'-py([123]\.?[0-9]?)$')
def _sort_links(self, links):
- "Returns elements of links in order, non-egg links first, egg links second, while eliminating duplicates"
+ """
+ Returns elements of links in order, non-egg links first, egg links
+ second, while eliminating duplicates
+ """
eggs, no_eggs = [], []
seen = set()
for link in links:
@@ -465,7 +572,10 @@ class PackageFinder(object):
ext = '.tar' + ext
if ext not in self._known_extensions():
if link not in self.logged_links:
- logger.debug('Skipping link %s; unknown archive format: %s' % (link, ext))
+ logger.debug(
+ 'Skipping link %s; unknown archive format: %s' %
+ (link, ext)
+ )
self.logged_links.add(link)
return []
if "macosx10" in link.path and ext == '.zip':
@@ -477,28 +587,39 @@ class PackageFinder(object):
try:
wheel = Wheel(link.filename)
except InvalidWheelFilename:
- logger.debug('Skipping %s because the wheel filename is invalid' % link)
+ logger.debug(
+ 'Skipping %s because the wheel filename is invalid' %
+ link
+ )
return []
if wheel.name.lower() != search_name.lower():
- logger.debug('Skipping link %s; wrong project name (not %s)' % (link, search_name))
+ logger.debug(
+ 'Skipping link %s; wrong project name (not %s)' %
+ (link, search_name)
+ )
return []
if not wheel.supported():
- logger.debug('Skipping %s because it is not compatible with this Python' % link)
+ logger.debug(
+ 'Skipping %s because it is not compatible with this '
+ 'Python' % link
+ )
return []
# This is a dirty hack to prevent installing Binary Wheels from
# PyPI unless it is a Windows or Mac Binary Wheel. This is
# paired with a change to PyPI disabling uploads for the
- # same. Once we have a mechanism for enabling support for binary
- # wheels on linux that deals with the inherent problems of
- # binary distribution this can be removed.
+ # same. Once we have a mechanism for enabling support for
+ # binary wheels on linux that deals with the inherent problems
+ # of binary distribution this can be removed.
comes_from = getattr(link, "comes_from", None)
- if ((
- not platform.startswith('win')
- and not platform.startswith('macosx')
+ if (
+ (
+ not platform.startswith('win')
+ and not platform.startswith('macosx')
)
- and comes_from is not None
- and urlparse.urlparse(comes_from.url).netloc.endswith(
- "pypi.python.org")):
+ and comes_from is not None
+ and urlparse.urlparse(
+ comes_from.url
+ ).netloc.endswith("pypi.python.org")):
if not wheel.supported(tags=supported_tags_noarch):
logger.debug(
"Skipping %s because it is a pypi-hosted binary "
@@ -510,12 +631,16 @@ class PackageFinder(object):
if not version:
version = self._egg_info_matches(egg_info, search_name, link)
if version is None:
- logger.debug('Skipping link %s; wrong project name (not %s)' % (link, search_name))
+ logger.debug(
+ 'Skipping link %s; wrong project name (not %s)' %
+ (link, search_name)
+ )
return []
if (link.internal is not None
and not link.internal
- and not normalize_name(search_name).lower() in self.allow_external
+ and not normalize_name(search_name).lower()
+ in self.allow_external
and not self.allow_all_external):
# We have a link that we are sure is external, so we should skip
# it unless we are allowing externals
@@ -526,7 +651,7 @@ class PackageFinder(object):
if (link.verifiable is not None
and not link.verifiable
and not (normalize_name(search_name).lower()
- in self.allow_unverified)):
+ in self.allow_unverified)):
# We have a link that we are sure we cannot verify it's integrity,
# so we should skip it unless we are allowing unsafe installs
# for this requirement.
@@ -540,12 +665,16 @@ class PackageFinder(object):
version = version[:match.start()]
py_version = match.group(1)
if py_version != sys.version[:3]:
- logger.debug('Skipping %s because Python version is incorrect' % link)
+ logger.debug(
+ 'Skipping %s because Python version is incorrect' % link
+ )
return []
logger.debug('Found link %s, version: %s' % (link, version))
- return [(pkg_resources.parse_version(version),
- link,
- version)]
+ return [(
+ pkg_resources.parse_version(version),
+ link,
+ version,
+ )]
def _egg_info_matches(self, egg_info, search_name, link):
match = self._egg_info_re.search(egg_info)
@@ -563,7 +692,8 @@ class PackageFinder(object):
return None
def _get_page(self, link, req):
- return HTMLPage.get_page(link, req,
+ return HTMLPage.get_page(
+ link, req,
cache=self.cache,
session=self.session,
)
@@ -605,7 +735,10 @@ class HTMLPage(object):
## FIXME: these regexes are horrible hacks:
_homepage_re = re.compile(r'\s*home\s*page', re.I)
_download_re = re.compile(r' | \s*download\s+url', re.I)
- _href_re = re.compile('href=(?:"([^"]*)"|\'([^\']*)\'|([^>\\s\\n]*))', re.I|re.S)
+ _href_re = re.compile(
+ 'href=(?:"([^"]*)"|\'([^\']*)\'|([^>\\s\\n]*))',
+ re.I | re.S
+ )
def __init__(self, content, url, headers=None, trusted=None):
self.content = content
@@ -631,7 +764,9 @@ class HTMLPage(object):
from pip.vcs import VcsSupport
for scheme in VcsSupport.schemes:
if url.lower().startswith(scheme) and url[len(scheme)] in '+:':
- logger.debug('Cannot look at %(scheme)s URL %(link)s' % locals())
+ logger.debug(
+ 'Cannot look at %(scheme)s URL %(link)s' % locals()
+ )
return None
if cache is not None:
@@ -646,22 +781,27 @@ class HTMLPage(object):
filename = link.filename
for bad_ext in ['.tar', '.tar.gz', '.tar.bz2', '.tgz', '.zip']:
if filename.endswith(bad_ext):
- content_type = cls._get_content_type(url,
- session=session,
+ content_type = cls._get_content_type(
+ url, session=session,
)
if content_type.lower().startswith('text/html'):
break
else:
- logger.debug('Skipping page %s because of Content-Type: %s' % (link, content_type))
+ logger.debug(
+ 'Skipping page %s because of Content-Type: '
+ '%s' % (link, content_type)
+ )
if cache is not None:
cache.set_is_archive(url)
return None
logger.debug('Getting page %s' % url)
# Tack index.html onto file:// URLs that point to directories
- (scheme, netloc, path, params, query, fragment) = urlparse.urlparse(url)
+ (scheme, netloc, path, params, query, fragment) = \
+ urlparse.urlparse(url)
if scheme == 'file' and os.path.isdir(url2pathname(path)):
- # add trailing slash if not present so urljoin doesn't trim final segment
+ # add trailing slash if not present so urljoin doesn't trim
+ # final segment
if not url.endswith('/'):
url += '/'
url = urlparse.urljoin(url, 'index.html')
@@ -672,15 +812,15 @@ class HTMLPage(object):
# The check for archives above only works if the url ends with
# something that looks like an archive. However that is not a
- # requirement. For instance http://sourceforge.net/projects/docutils/files/docutils/0.8.1/docutils-0.8.1.tar.gz/download
- # redirects to http://superb-dca3.dl.sourceforge.net/project/docutils/docutils/0.8.1/docutils-0.8.1.tar.gz
- # Unless we issue a HEAD request on every url we cannot know
- # ahead of time for sure if something is HTML or not. However we
- # can check after we've downloaded it.
+ # requirement of an url. Unless we issue a HEAD request on every
+ # url we cannot know ahead of time for sure if something is HTML
+ # or not. However we can check after we've downloaded it.
content_type = resp.headers.get('Content-Type', 'unknown')
if not content_type.lower().startswith("text/html"):
- logger.debug('Skipping page %s because of Content-Type: %s' %
- (link, content_type))
+ logger.debug(
+ 'Skipping page %s because of Content-Type: %s' %
+ (link, content_type)
+ )
if cache is not None:
cache.set_is_archive(url)
return None
@@ -699,7 +839,8 @@ class HTMLPage(object):
except SSLError as exc:
reason = ("There was a problem confirming the ssl certificate: "
"%s" % exc)
- cls._handle_fail(req, link, reason, url,
+ cls._handle_fail(
+ req, link, reason, url,
cache=cache,
level=2,
meth=logger.notify,
@@ -743,8 +884,10 @@ class HTMLPage(object):
if not hasattr(self, "_api_version"):
_api_version = None
- metas = [x for x in self.parsed.findall(".//meta")
- if x.get("name", "").lower() == "api-version"]
+ metas = [
+ x for x in self.parsed.findall(".//meta")
+ if x.get("name", "").lower() == "api-version"
+ ]
if metas:
try:
_api_version = int(metas[0].get("value", None))
@@ -778,8 +921,10 @@ class HTMLPage(object):
if self.api_version and self.api_version >= 2:
# Only api_versions >= 2 have a distinction between
# external and internal links
- internal = bool(anchor.get("rel")
- and "internal" in anchor.get("rel").split())
+ internal = bool(
+ anchor.get("rel")
+ and "internal" in anchor.get("rel").split()
+ )
yield Link(url, self, internal=internal)
@@ -800,7 +945,9 @@ class HTMLPage(object):
# what rels were being looked for
if found_rels & rels:
href = anchor.get("href")
- url = self.clean_link(urlparse.urljoin(self.base_url, href))
+ url = self.clean_link(
+ urlparse.urljoin(self.base_url, href)
+ )
yield Link(url, self, trusted=False)
def scraped_rel_links(self):
@@ -812,7 +959,11 @@ class HTMLPage(object):
href_match = self._href_re.search(self.content, pos=match.end())
if not href_match:
continue
- url = href_match.group(1) or href_match.group(2) or href_match.group(3)
+ url = (
+ href_match.group(1)
+ or href_match.group(2)
+ or href_match.group(3)
+ )
if not url:
continue
url = self.clean_link(urlparse.urljoin(self.base_url, url))
@@ -831,7 +982,7 @@ class HTMLPage(object):
class Link(object):
def __init__(self, url, comes_from=None, internal=None, trusted=None,
- _deprecated_regex=False):
+ _deprecated_regex=False):
self.url = url
self.comes_from = comes_from
self.internal = internal
@@ -904,7 +1055,9 @@ class Link(object):
return None
return match.group(1)
- _hash_re = re.compile(r'(sha1|sha224|sha384|sha256|sha512|md5)=([a-f0-9]+)')
+ _hash_re = re.compile(
+ r'(sha1|sha224|sha384|sha256|sha512|md5)=([a-f0-9]+)'
+ )
@property
def hash(self):
diff --git a/pip/locations.py b/pip/locations.py
index 9cb6eef11..b270349d0 100644
--- a/pip/locations.py
+++ b/pip/locations.py
@@ -9,6 +9,9 @@ import getpass
from pip.backwardcompat import get_python_lib, get_path_uid, user_site
import pip.exceptions
+# Hack for flake8
+install
+
DELETE_MARKER_MESSAGE = '''\
This file is placed here by pip to indicate the source was put
@@ -19,6 +22,7 @@ deleted (unless you remove this file).
'''
PIP_DELETE_MARKER_FILENAME = 'pip-delete-this-directory.txt'
+
def write_delete_marker_file(directory):
"""
Write the pip delete marker file into this directory.
@@ -46,12 +50,14 @@ def virtualenv_no_global():
"""
Return True if in a venv and no system site packages.
"""
- #this mirrors the logic in virtualenv.py for locating the no-global-site-packages.txt file
+ # this mirrors the logic in virtualenv.py for locating the
+ # no-global-site-packages.txt file
site_mod_dir = os.path.dirname(os.path.abspath(site.__file__))
no_global_file = os.path.join(site_mod_dir, 'no-global-site-packages.txt')
if running_under_virtualenv() and os.path.isfile(no_global_file):
return True
+
def __get_username():
""" Returns the effective username of the current process. """
if sys.platform == 'win32':
@@ -59,10 +65,13 @@ def __get_username():
import pwd
return pwd.getpwuid(os.geteuid()).pw_name
+
def _get_build_prefix():
""" Returns a safe build_prefix """
- path = os.path.join(tempfile.gettempdir(), 'pip_build_%s' %
- __get_username())
+ path = os.path.join(
+ tempfile.gettempdir(),
+ 'pip_build_%s' % __get_username()
+ )
if sys.platform == 'win32':
""" on windows(tested on 7) temp dirs are isolated """
return path
@@ -79,11 +88,15 @@ def _get_build_prefix():
file_uid = None
if file_uid != os.geteuid():
- msg = "The temporary folder for building (%s) is either not owned by you, or is a symlink." \
- % path
- print (msg)
- print("pip will not work until the temporary folder is " + \
- "either deleted or is a real directory owned by your user account.")
+ msg = (
+ "The temporary folder for building (%s) is either not owned by"
+ " you, or is a symlink." % path
+ )
+ print(msg)
+ print(
+ "pip will not work until the temporary folder is either "
+ "deleted or is a real directory owned by your user account."
+ )
raise pip.exceptions.InstallationError(msg)
return path
@@ -100,7 +113,9 @@ else:
src_prefix = os.path.join(os.getcwd(), 'src')
except OSError:
# In case the current working directory has been renamed or deleted
- sys.exit("The folder you are executing pip from can no longer be found.")
+ sys.exit(
+ "The folder you are executing pip from can no longer be found."
+ )
# under Mac OS X + virtualenv sys.prefix is not properly resolved
# it is something like /path/to/python/bin/..
@@ -121,14 +136,20 @@ if sys.platform == 'win32':
bin_user = os.path.join(user_site, 'bin') if user_site else None
default_storage_dir = os.path.join(user_dir, 'pip')
default_config_basename = 'pip.ini'
- default_config_file = os.path.join(default_storage_dir, default_config_basename)
+ default_config_file = os.path.join(
+ default_storage_dir,
+ default_config_basename,
+ )
default_log_file = os.path.join(default_storage_dir, 'pip.log')
else:
bin_py = os.path.join(sys.prefix, 'bin')
bin_user = os.path.join(user_site, 'bin') if user_site else None
default_storage_dir = os.path.join(user_dir, '.pip')
default_config_basename = 'pip.conf'
- default_config_file = os.path.join(default_storage_dir, default_config_basename)
+ default_config_file = os.path.join(
+ default_storage_dir,
+ default_config_basename,
+ )
default_log_file = os.path.join(default_storage_dir, 'pip.log')
# Forcing to use /usr/local/bin for standard Mac OS X framework installs
@@ -148,8 +169,8 @@ def distutils_scheme(dist_name, user=False, home=None, root=None):
d = Distribution({'name': dist_name})
d.parse_config_files()
i = d.get_command_obj('install', create=True)
- # NOTE: setting user or home has the side-effect of creating the home dir or
- # user base for installations during finalize_options()
+ # NOTE: setting user or home has the side-effect of creating the home dir
+ # or user base for installations during finalize_options()
# ideally, we'd prefer a scheme class that has no side-effects.
i.user = user or i.user
i.home = home or i.home
@@ -159,11 +180,13 @@ def distutils_scheme(dist_name, user=False, home=None, root=None):
scheme[key] = getattr(i, 'install_'+key)
if running_under_virtualenv():
- scheme['headers'] = os.path.join(sys.prefix,
- 'include',
- 'site',
- 'python' + sys.version[:3],
- dist_name)
+ scheme['headers'] = os.path.join(
+ sys.prefix,
+ 'include',
+ 'site',
+ 'python' + sys.version[:3],
+ dist_name,
+ )
if root is not None:
scheme["headers"] = os.path.join(
diff --git a/pip/log.py b/pip/log.py
index 4e3177383..5977f8f47 100644
--- a/pip/log.py
+++ b/pip/log.py
@@ -5,7 +5,6 @@ import sys
import os
import logging
-from pip import backwardcompat
from pip._vendor import colorama, pkg_resources
@@ -16,8 +15,10 @@ def _color_wrap(*colors):
def should_color(consumer, environ, std=(sys.stdout, sys.stderr)):
- real_consumer = (consumer if not isinstance(consumer, colorama.AnsiToWin32)
- else consumer.wrapped)
+ real_consumer = (
+ consumer if not isinstance(consumer, colorama.AnsiToWin32)
+ else consumer.wrapped
+ )
# If consumer isn't stdout or stderr we shouldn't colorize it
if real_consumer not in std:
@@ -49,7 +50,7 @@ def should_warn(current_version, removal_version):
# Test if our current_version should be a warn
return (pkg_resources.parse_version(current_version)
- < pkg_resources.parse_version(warn_version))
+ < pkg_resources.parse_version(warn_version))
class Logger(object):
@@ -147,7 +148,7 @@ class Logger(object):
for consumer_level, consumer in self.consumers:
if self.level_matches(level, consumer_level):
if (self.in_progress_hanging
- and consumer in (sys.stdout, sys.stderr)):
+ and consumer in (sys.stdout, sys.stderr)):
self.in_progress_hanging = False
sys.stdout.write('\n')
sys.stdout.flush()
@@ -191,7 +192,8 @@ class Logger(object):
sys.stdout.write('...' + self.in_progress + msg + '\n')
sys.stdout.flush()
else:
- # These erase any messages shown with show_progress (besides .'s)
+ # These erase any messages shown with show_progress
+ # (besides .'s)
logger.show_progress('')
logger.show_progress('')
sys.stdout.write(msg + '\n')
@@ -208,11 +210,16 @@ class Logger(object):
sys.stdout.flush()
else:
if self.last_message:
- padding = ' ' * max(0, len(self.last_message) - len(message))
+ padding = ' ' * max(
+ 0,
+ len(self.last_message) - len(message)
+ )
else:
padding = ''
- sys.stdout.write('\r%s%s%s%s' %
- (' ' * self.indent, self.in_progress, message, padding))
+ sys.stdout.write(
+ '\r%s%s%s%s' %
+ (' ' * self.indent, self.in_progress, message, padding)
+ )
sys.stdout.flush()
self.last_message = message
diff --git a/pip/pep425tags.py b/pip/pep425tags.py
index 95d375395..b760c922c 100644
--- a/pip/pep425tags.py
+++ b/pip/pep425tags.py
@@ -58,7 +58,7 @@ def get_supported(versions=None, noarch=False):
try:
soabi = sysconfig.get_config_var('SOABI')
- except IOError as e: # Issue #1074
+ except IOError as e: # Issue #1074
warnings.warn("{0}".format(e), RuntimeWarning)
soabi = None
diff --git a/pip/req/__init__.py b/pip/req/__init__.py
index 76f785411..1e90c40dd 100644
--- a/pip/req/__init__.py
+++ b/pip/req/__init__.py
@@ -3,5 +3,6 @@ from .req_install import InstallRequirement
from .req_set import RequirementSet, Requirements
from .req_file import parse_requirements
-__all__ = [RequirementSet, Requirements, InstallRequirement,
- parse_requirements]
+__all__ = [
+ RequirementSet, Requirements, InstallRequirement, parse_requirements,
+]
diff --git a/pip/req/req_file.py b/pip/req/req_file.py
index e2a7e9357..74c1c2772 100644
--- a/pip/req/req_file.py
+++ b/pip/req/req_file.py
@@ -19,7 +19,8 @@ def parse_requirements(filename, finder=None, comes_from=None, options=None,
if skip_regex:
skip_match = re.compile(skip_regex)
reqs_file_dir = os.path.dirname(os.path.abspath(filename))
- filename, content = get_file_content(filename,
+ filename, content = get_file_content(
+ filename,
comes_from=comes_from,
session=session,
)
@@ -44,7 +45,11 @@ def parse_requirements(filename, finder=None, comes_from=None, options=None,
req_url = urlparse.urljoin(filename, req_url)
elif not _scheme_re.search(req_url):
req_url = os.path.join(os.path.dirname(filename), req_url)
- for item in parse_requirements(req_url, finder, comes_from=filename, options=options, session=session):
+ for item in parse_requirements(
+ req_url, finder,
+ comes_from=filename,
+ options=options,
+ session=session):
yield item
elif line.startswith('-Z') or line.startswith('--always-unzip'):
# No longer used, but previously these were used in
@@ -104,7 +109,14 @@ def parse_requirements(filename, finder=None, comes_from=None, options=None,
else:
line = line[len('--editable'):].strip().lstrip('=')
req = InstallRequirement.from_editable(
- line, comes_from=comes_from, default_vcs=options.default_vcs if options else None)
+ line,
+ comes_from=comes_from,
+ default_vcs=options.default_vcs if options else None
+ )
else:
- req = InstallRequirement.from_line(line, comes_from, prereleases=getattr(options, "pre", None))
+ req = InstallRequirement.from_line(
+ line,
+ comes_from,
+ prereleases=getattr(options, "pre", None)
+ )
yield req
diff --git a/pip/req/req_install.py b/pip/req/req_install.py
index 680243607..e2f018ab5 100644
--- a/pip/req/req_install.py
+++ b/pip/req/req_install.py
@@ -9,15 +9,23 @@ from email.parser import FeedParser
import pip.wheel
from pip._vendor import pkg_resources
-from pip.backwardcompat import urllib, ConfigParser, string_types, get_python_version
+from pip.backwardcompat import (
+ urllib, ConfigParser, string_types, get_python_version,
+)
from pip.download import is_url, url_to_path, path_to_url, is_archive_file
-from pip.exceptions import InstallationError, UninstallationError, UnsupportedWheel
+from pip.exceptions import (
+ InstallationError, UninstallationError, UnsupportedWheel,
+)
from pip.index import Link
-from pip.locations import bin_py, running_under_virtualenv, PIP_DELETE_MARKER_FILENAME, bin_user
+from pip.locations import (
+ bin_py, running_under_virtualenv, PIP_DELETE_MARKER_FILENAME, bin_user,
+)
from pip.log import logger
-from pip.util import (display_path, rmtree, ask_path_exists, backup_dir, is_installable_dir,
- dist_in_usersite, dist_in_site_packages, egg_link_path, make_path_relative,
- call_subprocess, is_prerelease, read_text_file, FakeFile, _make_build_dir)
+from pip.util import (
+ display_path, rmtree, ask_path_exists, backup_dir, is_installable_dir,
+ dist_in_usersite, dist_in_site_packages, egg_link_path, make_path_relative,
+ call_subprocess, is_prerelease, read_text_file, FakeFile, _make_build_dir,
+)
from pip.req.req_uninstall import UninstallPathSet
from pip.vcs import vcs
from pip.wheel import move_wheel_files, Wheel, wheel_ext
@@ -68,7 +76,9 @@ class InstallRequirement(object):
if prereleases:
self.prereleases = True
elif self.req is not None:
- self.prereleases = any([is_prerelease(x[1]) and x[0] != "!=" for x in self.req.specs])
+ self.prereleases = any([
+ is_prerelease(x[1]) and x[0] != "!=" for x in self.req.specs
+ ])
else:
self.prereleases = False
@@ -104,20 +114,30 @@ class InstallRequirement(object):
if is_url(name):
link = Link(name)
- elif os.path.isdir(path) and (os.path.sep in name or name.startswith('.')):
+ elif (os.path.isdir(path)
+ and (os.path.sep in name or name.startswith('.'))):
if not is_installable_dir(path):
- raise InstallationError("Directory %r is not installable. File 'setup.py' not found." % name)
+ raise InstallationError(
+ "Directory %r is not installable. File 'setup.py' not "
+ "found." % name
+ )
link = Link(path_to_url(name))
elif is_archive_file(path):
if not os.path.isfile(path):
- logger.warn('Requirement %r looks like a filename, but the file does not exist', name)
+ logger.warn(
+ 'Requirement %r looks like a filename, but the file does '
+ 'not exist',
+ name
+ )
link = Link(path_to_url(name))
- # If the line has an egg= definition, but isn't editable, pull the requirement out.
- # Otherwise, assume the name is the req for the non URL/path/archive case.
+ # If the line has an egg= definition, but isn't editable, pull the
+ # requirement out. Otherwise, assume the name is the req for the non
+ # URL/path/archive case.
if link and req is None:
url = link.url_without_fragment
- req = link.egg_fragment #when fragment is None, this will become an 'unnamed' requirement
+ # when fragment is None, this will become an 'unnamed' requirement
+ req = link.egg_fragment
# Handle relative file URLs
if link.scheme == 'file' and re.search(r'\.\./', url):
@@ -125,9 +145,12 @@ class InstallRequirement(object):
# fail early for invalid or unsupported wheels
if link.ext == wheel_ext:
- wheel = Wheel(link.filename) # can raise InvalidWheelFilename
+ wheel = Wheel(link.filename) # can raise InvalidWheelFilename
if not wheel.supported():
- raise UnsupportedWheel("%s is not a supported wheel on this platform." % wheel.filename)
+ raise UnsupportedWheel(
+ "%s is not a supported wheel on this platform." %
+ wheel.filename
+ )
else:
req = name
@@ -176,7 +199,8 @@ class InstallRequirement(object):
name = self.name.lower()
else:
name = self.name
- # FIXME: Is there a better place to create the build_dir? (hg and bzr need this)
+ # FIXME: Is there a better place to create the build_dir? (hg and bzr
+ # need this)
if not os.path.exists(build_dir):
_make_build_dir(build_dir)
return os.path.join(build_dir, name)
@@ -203,8 +227,10 @@ class InstallRequirement(object):
raise InstallationError(
'A package already exists in %s; please remove it to continue'
% display_path(new_location))
- logger.debug('Moving package %s from %s to new location %s'
- % (self, display_path(old_location), display_path(new_location)))
+ logger.debug(
+ 'Moving package %s from %s to new location %s' %
+ (self, display_path(old_location), display_path(new_location))
+ )
shutil.move(old_location, new_location)
self._temp_build_dir = new_location
self.source_dir = new_location
@@ -226,6 +252,8 @@ class InstallRequirement(object):
def setup_py(self):
try:
import setuptools
+ # Small hack to make flake8 not complain about an unused import
+ setuptools
except ImportError:
# Setuptools is not available
raise InstallationError(
@@ -248,28 +276,37 @@ class InstallRequirement(object):
def run_egg_info(self, force_root_egg_info=False):
assert self.source_dir
if self.name:
- logger.notify('Running setup.py (path:%s) egg_info for package %s' % (self.setup_py, self.name))
+ logger.notify(
+ 'Running setup.py (path:%s) egg_info for package %s' %
+ (self.setup_py, self.name)
+ )
else:
- logger.notify('Running setup.py (path:%s) egg_info for package from %s' % (self.setup_py, self.url))
+ logger.notify(
+ 'Running setup.py (path:%s) egg_info for package from %s' %
+ (self.setup_py, self.url)
+ )
logger.indent += 2
try:
# if it's distribute>=0.7, it won't contain an importable
# setuptools, and having an egg-info dir blocks the ability of
- # setup.py to find setuptools plugins, so delete the egg-info dir if
- # no setuptools. it will get recreated by the run of egg_info
- # NOTE: this self.name check only works when installing from a specifier
- # (not archive path/urls)
+ # setup.py to find setuptools plugins, so delete the egg-info dir
+ # if no setuptools. it will get recreated by the run of egg_info
+ # NOTE: this self.name check only works when installing from a
+ # specifier (not archive path/urls)
# TODO: take this out later
- if self.name == 'distribute' and not os.path.isdir(os.path.join(self.source_dir, 'setuptools')):
+ if (self.name == 'distribute'
+ and not os.path.isdir(
+ os.path.join(self.source_dir, 'setuptools'))):
rmtree(os.path.join(self.source_dir, 'distribute.egg-info'))
script = self._run_setup_py
script = script.replace('__SETUP_PY__', repr(self.setup_py))
script = script.replace('__PKG_NAME__', repr(self.name))
egg_info_cmd = [sys.executable, '-c', script, 'egg_info']
- # We can't put the .egg-info files at the root, because then the source code will be mistaken
- # for an installed egg, causing problems
+ # We can't put the .egg-info files at the root, because then the
+ # source code will be mistaken for an installed egg, causing
+ # problems
if self.editable or force_root_egg_info:
egg_base_option = []
else:
@@ -279,7 +316,9 @@ class InstallRequirement(object):
egg_base_option = ['--egg-base', 'pip-egg-info']
call_subprocess(
egg_info_cmd + egg_base_option,
- cwd=self.source_dir, filter_stdout=self._filter_install, show_stdout=False,
+ cwd=self.source_dir,
+ filter_stdout=self._filter_install,
+ show_stdout=False,
command_level=logger.VERBOSE_DEBUG,
command_desc='python setup.py egg_info')
finally:
@@ -307,7 +346,11 @@ def replacement_run(self):
writer(self, ep.name, os.path.join(self.egg_info,ep.name))
self.find_sources()
egg_info.egg_info.run = replacement_run
-exec(compile(getattr(tokenize, 'open', open)(__file__).read().replace('\\r\\n', '\\n'), __file__, 'exec'))
+exec(compile(
+ getattr(tokenize, 'open', open)(__file__).read().replace('\\r\\n', '\\n'),
+ __file__,
+ 'exec'
+))
"""
def egg_info_data(self, filename):
@@ -339,9 +382,17 @@ exec(compile(getattr(tokenize, 'open', open)(__file__).read().replace('\\r\\n',
# a list while iterating over it can cause trouble.
# (See https://github.com/pypa/pip/pull/462.)
for dir in list(dirs):
- # Don't search in anything that looks like a virtualenv environment
- if (os.path.exists(os.path.join(root, dir, 'bin', 'python'))
- or os.path.exists(os.path.join(root, dir, 'Scripts', 'Python.exe'))):
+ # Don't search in anything that looks like a virtualenv
+ # environment
+ if (
+ os.path.exists(
+ os.path.join(root, dir, 'bin', 'python')
+ )
+ or os.path.exists(
+ os.path.join(
+ root, dir, 'Scripts', 'Python.exe'
+ )
+ )):
dirs.remove(dir)
# Also don't search through tests
if dir == 'test' or dir == 'tests':
@@ -351,16 +402,20 @@ exec(compile(getattr(tokenize, 'open', open)(__file__).read().replace('\\r\\n',
filenames = [f for f in filenames if f.endswith('.egg-info')]
if not filenames:
- raise InstallationError('No files/directories in %s (from %s)' % (base, filename))
- assert filenames, "No files/directories in %s (from %s)" % (base, filename)
+ raise InstallationError(
+ 'No files/directories in %s (from %s)' % (base, filename)
+ )
+ assert filenames, \
+ "No files/directories in %s (from %s)" % (base, filename)
- # if we have more than one match, we pick the toplevel one. This can
- # easily be the case if there is a dist folder which contains an
- # extracted tarball for testing purposes.
+ # if we have more than one match, we pick the toplevel one. This
+ # can easily be the case if there is a dist folder which contains
+ # an extracted tarball for testing purposes.
if len(filenames) > 1:
- filenames.sort(key=lambda x: x.count(os.path.sep) +
- (os.path.altsep and
- x.count(os.path.altsep) or 0))
+ filenames.sort(
+ key=lambda x: x.count(os.path.sep)
+ + (os.path.altsep and x.count(os.path.altsep) or 0)
+ )
self._egg_info_path = os.path.join(base, filenames[0])
return os.path.join(self._egg_info_path, filename)
@@ -380,7 +435,10 @@ exec(compile(getattr(tokenize, 'open', open)(__file__).read().replace('\\r\\n',
p = FeedParser()
data = self.egg_info_data('PKG-INFO')
if not data:
- logger.warn('No PKG-INFO file found in %s' % display_path(self.egg_info_path('PKG-INFO')))
+ logger.warn(
+ 'No PKG-INFO file found in %s' %
+ display_path(self.egg_info_path('PKG-INFO'))
+ )
p.feed(data or '')
return p.close()
@@ -417,14 +475,22 @@ exec(compile(getattr(tokenize, 'open', open)(__file__).read().replace('\\r\\n',
assert self.source_dir
version = self.installed_version
if version not in self.req:
- logger.warn('Requested %s, but installing version %s' % (self, self.installed_version))
+ logger.warn(
+ 'Requested %s, but installing version %s' %
+ (self, self.installed_version)
+ )
else:
- logger.debug('Source in %s has version %s, which satisfies requirement %s'
- % (display_path(self.source_dir), version, self))
+ logger.debug(
+ 'Source in %s has version %s, which satisfies requirement %s' %
+ (display_path(self.source_dir), version, self)
+ )
def update_editable(self, obtain=True):
if not self.url:
- logger.info("Cannot update repository at %s; repository location is unknown" % self.source_dir)
+ logger.info(
+ "Cannot update repository at %s; repository location is "
+ "unknown" % self.source_dir
+ )
return
assert self.editable
assert self.source_dir
@@ -461,7 +527,9 @@ exec(compile(getattr(tokenize, 'open', open)(__file__).read().replace('\\r\\n',
"""
if not self.check_if_exists():
- raise UninstallationError("Cannot uninstall requirement %s, not installed" % (self.name,))
+ raise UninstallationError(
+ "Cannot uninstall requirement %s, not installed" % (self.name,)
+ )
dist = self.satisfied_by or self.conflicts_with
paths_to_remove = UninstallPathSet(dist)
@@ -471,7 +539,7 @@ exec(compile(getattr(tokenize, 'open', open)(__file__).read().replace('\\r\\n',
dist_info_path = os.path.join(dist.location,
'-'.join(dist.egg_name().split('-')[:2])
) + '.dist-info'
- # workaround for http://bugs.debian.org/cgi-bin/bugreport.cgi?bug=618367
+ # Workaround - http://bugs.debian.org/cgi-bin/bugreport.cgi?bug=618367
debian_egg_info_path = pip_egg_info_path.replace(
'-py%s' % pkg_resources.PY_MAJOR, '')
easy_install_egg = dist.egg_name() + '.egg'
@@ -488,19 +556,24 @@ exec(compile(getattr(tokenize, 'open', open)(__file__).read().replace('\\r\\n',
egg_info_path = debian_egg_info_path
paths_to_remove.add(egg_info_path)
if dist.has_metadata('installed-files.txt'):
- for installed_file in dist.get_metadata('installed-files.txt').splitlines():
- path = os.path.normpath(os.path.join(egg_info_path, installed_file))
+ for installed_file in dist.get_metadata(
+ 'installed-files.txt').splitlines():
+ path = os.path.normpath(
+ os.path.join(egg_info_path, installed_file)
+ )
paths_to_remove.add(path)
- #FIXME: need a test for this elif block
- #occurs with --single-version-externally-managed/--record outside of pip
+ # FIXME: need a test for this elif block
+ # occurs with --single-version-externally-managed/--record outside
+ # of pip
elif dist.has_metadata('top_level.txt'):
if dist.has_metadata('namespace_packages.txt'):
namespaces = dist.get_metadata('namespace_packages.txt')
else:
namespaces = []
- for top_level_pkg in [p for p
- in dist.get_metadata('top_level.txt').splitlines()
- if p and p not in namespaces]:
+ for top_level_pkg in [
+ p for p
+ in dist.get_metadata('top_level.txt').splitlines()
+ if p and p not in namespaces]:
path = os.path.join(dist.location, top_level_pkg)
paths_to_remove.add(path)
paths_to_remove.add(path + '.py')
@@ -518,7 +591,10 @@ exec(compile(getattr(tokenize, 'open', open)(__file__).read().replace('\\r\\n',
fh = open(develop_egg_link, 'r')
link_pointer = os.path.normcase(fh.readline().strip())
fh.close()
- assert (link_pointer == dist.location), 'Egg-link %s does not match installed location of %s (at %s)' % (link_pointer, self.name, dist.location)
+ assert (link_pointer == dist.location), (
+ 'Egg-link %s does not match installed location of %s '
+ '(at %s)' % (link_pointer, self.name, dist.location)
+ )
paths_to_remove.add(develop_egg_link)
easy_install_pth = os.path.join(os.path.dirname(develop_egg_link),
'easy-install.pth')
@@ -541,7 +617,9 @@ exec(compile(getattr(tokenize, 'open', open)(__file__).read().replace('\\r\\n',
# find console_scripts
if dist.has_metadata('entry_points.txt'):
config = ConfigParser.SafeConfigParser()
- config.readfp(FakeFile(dist.get_metadata_lines('entry_points.txt')))
+ config.readfp(
+ FakeFile(dist.get_metadata_lines('entry_points.txt'))
+ )
if config.has_section('console_scripts'):
for name, value in config.items('console_scripts'):
if dist_in_usersite(dist):
@@ -550,9 +628,15 @@ exec(compile(getattr(tokenize, 'open', open)(__file__).read().replace('\\r\\n',
bin_dir = bin_py
paths_to_remove.add(os.path.join(bin_dir, name))
if sys.platform == 'win32':
- paths_to_remove.add(os.path.join(bin_dir, name) + '.exe')
- paths_to_remove.add(os.path.join(bin_dir, name) + '.exe.manifest')
- paths_to_remove.add(os.path.join(bin_dir, name) + '-script.py')
+ paths_to_remove.add(
+ os.path.join(bin_dir, name) + '.exe'
+ )
+ paths_to_remove.add(
+ os.path.join(bin_dir, name) + '.exe.manifest'
+ )
+ paths_to_remove.add(
+ os.path.join(bin_dir, name) + '-script.py'
+ )
paths_to_remove.remove(auto_confirm)
self.uninstalled = paths_to_remove
@@ -587,8 +671,10 @@ exec(compile(getattr(tokenize, 'open', open)(__file__).read().replace('\\r\\n',
os.remove(archive_path)
elif response == 'b':
dest_file = backup_dir(archive_path)
- logger.warn('Backing up %s to %s'
- % (display_path(archive_path), display_path(dest_file)))
+ logger.warn(
+ 'Backing up %s to %s' %
+ (display_path(archive_path), display_path(dest_file))
+ )
shutil.move(archive_path, dest_file)
if create_archive:
zip = zipfile.ZipFile(archive_path, 'w', zipfile.ZIP_DEFLATED)
@@ -600,7 +686,7 @@ exec(compile(getattr(tokenize, 'open', open)(__file__).read().replace('\\r\\n',
dirname = os.path.join(dirpath, dirname)
name = self._clean_zip_name(dirname, dir)
zipdir = zipfile.ZipInfo(self.name + '/' + name + '/')
- zipdir.external_attr = 0x1ED << 16 # 0o755
+ zipdir.external_attr = 0x1ED << 16 # 0o755
zip.writestr(zipdir, '')
for filename in filenames:
if filename == PIP_DELETE_MARKER_FILENAME:
@@ -634,9 +720,12 @@ exec(compile(getattr(tokenize, 'open', open)(__file__).read().replace('\\r\\n',
install_args = [sys.executable]
install_args.append('-c')
install_args.append(
- "import setuptools, tokenize;__file__=%r;"\
- "exec(compile(getattr(tokenize, 'open', open)(__file__).read().replace('\\r\\n', '\\n'), __file__, 'exec'))" % self.setup_py)
- install_args += list(global_options) + ['install','--record', record_filename]
+ "import setuptools, tokenize;__file__=%r;"
+ "exec(compile(getattr(tokenize, 'open', open)(__file__).read()"
+ ".replace('\\r\\n', '\\n'), __file__, 'exec'))" % self.setup_py
+ )
+ install_args += list(global_options) + \
+ ['install', '--record', record_filename]
if not self.as_egg:
install_args += ['--single-version-externally-managed']
@@ -650,16 +739,21 @@ exec(compile(getattr(tokenize, 'open', open)(__file__).read().replace('\\r\\n',
install_args += ["--no-compile"]
if running_under_virtualenv():
- ## FIXME: I'm not sure if this is a reasonable location; probably not
- ## but we can't put it in the default location, as that is a virtualenv symlink that isn't writable
+ ## FIXME: I'm not sure if this is a reasonable location;
+ ## probably not but we can't put it in the default location, as
+ # that is a virtualenv symlink that isn't writable
install_args += ['--install-headers',
os.path.join(sys.prefix, 'include', 'site',
'python' + get_python_version())]
logger.notify('Running setup.py install for %s' % self.name)
logger.indent += 2
try:
- call_subprocess(install_args + install_options,
- cwd=self.source_dir, filter_stdout=self._filter_install, show_stdout=False)
+ call_subprocess(
+ install_args + install_options,
+ cwd=self.source_dir,
+ filter_stdout=self._filter_install,
+ show_stdout=False,
+ )
finally:
logger.indent -= 2
if not os.path.exists(record_filename):
@@ -667,8 +761,8 @@ exec(compile(getattr(tokenize, 'open', open)(__file__).read().replace('\\r\\n',
return
self.install_succeeded = True
if self.as_egg:
- # there's no --always-unzip option we can pass to install command
- # so we unable to save the installed-files.txt
+ # there's no --always-unzip option we can pass to install
+ # command so we unable to save the installed-files.txt
return
def prepend_root(path):
@@ -684,7 +778,10 @@ exec(compile(getattr(tokenize, 'open', open)(__file__).read().replace('\\r\\n',
egg_info_dir = prepend_root(line)
break
else:
- logger.warn('Could not find .egg-info directory in install record for %s' % self)
+ logger.warn(
+ 'Could not find .egg-info directory in install record for '
+ '%s' % self
+ )
f.close()
## FIXME: put the record somewhere
## FIXME: should this be an error?
@@ -696,7 +793,9 @@ exec(compile(getattr(tokenize, 'open', open)(__file__).read().replace('\\r\\n',
filename = line.strip()
if os.path.isdir(filename):
filename += os.path.sep
- new_lines.append(make_path_relative(prepend_root(filename), egg_info_dir))
+ new_lines.append(
+ make_path_relative(prepend_root(filename), egg_info_dir)
+ )
f.close()
f = open(os.path.join(egg_info_dir, 'installed-files.txt'), 'w')
f.write('\n'.join(new_lines)+'\n')
@@ -724,9 +823,16 @@ exec(compile(getattr(tokenize, 'open', open)(__file__).read().replace('\\r\\n',
try:
## FIXME: should we do --install-headers here too?
call_subprocess(
- [sys.executable, '-c',
- "import setuptools, tokenize; __file__=%r; exec(compile(getattr(tokenize, 'open', open)(__file__).read().replace('\\r\\n', '\\n'), __file__, 'exec'))" % self.setup_py]
- + list(global_options) + ['develop', '--no-deps'] + list(install_options),
+ [
+ sys.executable,
+ '-c',
+ "import setuptools, tokenize; __file__=%r; exec(compile("
+ "getattr(tokenize, 'open', open)(__file__).read().replace"
+ "('\\r\\n', '\\n'), __file__, 'exec'))" % self.setup_py
+ ]
+ + list(global_options)
+ + ['develop', '--no-deps']
+ + list(install_options),
cwd=self.source_dir, filter_stdout=self._filter_install,
show_stdout=False)
@@ -736,11 +842,16 @@ exec(compile(getattr(tokenize, 'open', open)(__file__).read().replace('\\r\\n',
def _filter_install(self, line):
level = logger.NOTIFY
- for regex in [r'^running .*', r'^writing .*', '^creating .*', '^[Cc]opying .*',
- r'^reading .*', r"^removing .*\.egg-info' \(and everything under it\)$",
- r'^byte-compiling ',
- # Not sure what this warning is, but it seems harmless:
- r"^warning: manifest_maker: standard file '-c' not found$"]:
+ for regex in [
+ r'^running .*',
+ r'^writing .*',
+ '^creating .*',
+ '^[Cc]opying .*',
+ r'^reading .*',
+ r"^removing .*\.egg-info' \(and everything under it\)$",
+ r'^byte-compiling ',
+ # Not sure what this warning is, but it seems harmless:
+ r"^warning: manifest_maker: standard file '-c' not found$"]:
if re.search(regex, line.strip()):
level = logger.INFO
break
@@ -760,21 +871,27 @@ exec(compile(getattr(tokenize, 'open', open)(__file__).read().replace('\\r\\n',
# run again, and return False, since it would block the uninstall
# TODO: remove this later
if (self.req.project_name == 'setuptools'
- and self.conflicts_with
- and self.conflicts_with.project_name == 'distribute'):
+ and self.conflicts_with
+ and self.conflicts_with.project_name == 'distribute'):
return True
else:
self.satisfied_by = pkg_resources.get_distribution(self.req)
except pkg_resources.DistributionNotFound:
return False
except pkg_resources.VersionConflict:
- existing_dist = pkg_resources.get_distribution(self.req.project_name)
+ existing_dist = pkg_resources.get_distribution(
+ self.req.project_name
+ )
if self.use_user_site:
if dist_in_usersite(existing_dist):
self.conflicts_with = existing_dist
- elif running_under_virtualenv() and dist_in_site_packages(existing_dist):
- raise InstallationError("Will not install to the user site because it will lack sys.path precedence to %s in %s"
- %(existing_dist.project_name, existing_dist.location))
+ elif (running_under_virtualenv()
+ and dist_in_site_packages(existing_dist)):
+ raise InstallationError(
+ "Will not install to the user site because it will "
+ "lack sys.path precedence to %s in %s" %
+ (existing_dist.project_name, existing_dist.location)
+ )
else:
self.conflicts_with = existing_dist
return True
@@ -791,8 +908,10 @@ exec(compile(getattr(tokenize, 'open', open)(__file__).read().replace('\\r\\n',
if not base:
## FIXME: this doesn't seem right:
return False
- self._is_bundle = (os.path.exists(os.path.join(base, 'pip-manifest.txt'))
- or os.path.exists(os.path.join(base, 'pyinstall-manifest.txt')))
+ self._is_bundle = (
+ os.path.exists(os.path.join(base, 'pip-manifest.txt'))
+ or os.path.exists(os.path.join(base, 'pyinstall-manifest.txt'))
+ )
return self._is_bundle
def bundle_requirements(self):
@@ -819,7 +938,12 @@ exec(compile(getattr(tokenize, 'open', open)(__file__).read().replace('\\r\\n',
update=False, source_dir=dest_dir, from_bundle=True)
for dest_dir in self._bundle_build_dirs:
package = os.path.basename(dest_dir)
- yield InstallRequirement(package, self,source_dir=dest_dir, from_bundle=True)
+ yield InstallRequirement(
+ package,
+ self,
+ source_dir=dest_dir,
+ from_bundle=True,
+ )
def move_bundle_files(self, dest_build_dir, dest_src_dir):
base = self._temp_build_dir
@@ -829,15 +953,18 @@ exec(compile(getattr(tokenize, 'open', open)(__file__).read().replace('\\r\\n',
bundle_build_dirs = []
bundle_editable_dirs = []
for source_dir, dest_dir, dir_collection in [
- (src_dir, dest_src_dir, bundle_editable_dirs),
- (build_dir, dest_build_dir, bundle_build_dirs)]:
+ (src_dir, dest_src_dir, bundle_editable_dirs),
+ (build_dir, dest_build_dir, bundle_build_dirs)]:
if os.path.exists(source_dir):
for dirname in os.listdir(source_dir):
dest = os.path.join(dest_dir, dirname)
dir_collection.append(dest)
if os.path.exists(dest):
- logger.warn('The directory %s (containing package %s) already exists; cannot move source from bundle %s'
- % (dest, dirname, self))
+ logger.warn(
+ 'The directory %s (containing package %s) already '
+ 'exists; cannot move source from bundle %s' %
+ (dest, dirname, self)
+ )
continue
if not os.path.exists(dest_dir):
logger.info('Creating directory %s' % dest_dir)
@@ -875,6 +1002,7 @@ def _strip_postfix(req):
req = match.group(1)
return req
+
def _build_req_from_url(url):
parts = [p for p in url.split('#', 1)[0].split('/') if p]
@@ -886,6 +1014,7 @@ def _build_req_from_url(url):
req = parts[-2]
return req
+
def _build_editable_options(req):
"""
@@ -923,13 +1052,22 @@ def parse_editable(editable_req, default_vcs=None):
if os.path.isdir(url_no_extras):
if not os.path.exists(os.path.join(url_no_extras, 'setup.py')):
- raise InstallationError("Directory %r is not installable. File 'setup.py' not found." % url_no_extras)
+ raise InstallationError(
+ "Directory %r is not installable. File 'setup.py' not found." %
+ url_no_extras
+ )
# Treating it as code that has already been checked out
url_no_extras = path_to_url(url_no_extras)
if url_no_extras.lower().startswith('file:'):
if extras:
- return None, url_no_extras, pkg_resources.Requirement.parse('__placeholder__' + extras).extras
+ return (
+ None,
+ url_no_extras,
+ pkg_resources.Requirement.parse(
+ '__placeholder__' + extras
+ ).extras,
+ )
else:
return None, url_no_extras, None
@@ -943,7 +1081,10 @@ def parse_editable(editable_req, default_vcs=None):
url = default_vcs + '+' + url
else:
raise InstallationError(
- '%s should either be a path to a local project or a VCS url beginning with svn+, git+, hg+, or bzr+' % editable_req)
+ '%s should either be a path to a local project or a VCS url '
+ 'beginning with svn+, git+, hg+, or bzr+' %
+ editable_req
+ )
vc_type = url.split('+', 1)[0].lower()
@@ -962,11 +1103,12 @@ def parse_editable(editable_req, default_vcs=None):
if not options or 'egg' not in options:
req = _build_req_from_url(editable_req)
if not req:
- raise InstallationError('--editable=%s is not the right format; it must have #egg=Package' % editable_req)
+ raise InstallationError(
+ '--editable=%s is not the right format; it must have '
+ '#egg=Package' % editable_req
+ )
else:
req = options['egg']
package = _strip_postfix(req)
return package, url, options
-
-
diff --git a/pip/req/req_set.py b/pip/req/req_set.py
index 726d19729..e398e5072 100644
--- a/pip/req/req_set.py
+++ b/pip/req/req_set.py
@@ -1,7 +1,5 @@
import os
import shutil
-import sys
-import textwrap
import zipfile
from pip._vendor import pkg_resources
@@ -11,8 +9,9 @@ from pip.download import (PipSession, url_to_path, unpack_vcs_link, is_vcs_url,
from pip.exceptions import (InstallationError, BestVersionAlreadyInstalled,
DistributionNotFound, PreviousBuildDirError)
from pip.index import Link
-from pip.locations import (PIP_DELETE_MARKER_FILENAME, write_delete_marker_file,
- build_prefix)
+from pip.locations import (
+ PIP_DELETE_MARKER_FILENAME, write_delete_marker_file, build_prefix,
+)
from pip.log import logger
from pip.req.req_install import InstallRequirement
from pip.util import (display_path, rmtree, dist_in_usersite, call_subprocess,
@@ -51,9 +50,10 @@ class Requirements(object):
class RequirementSet(object):
def __init__(self, build_dir, src_dir, download_dir, download_cache=None,
- upgrade=False, ignore_installed=False, as_egg=False, target_dir=None,
- ignore_dependencies=False, force_reinstall=False, use_user_site=False,
- session=None, pycompile=True):
+ upgrade=False, ignore_installed=False, as_egg=False,
+ target_dir=None, ignore_dependencies=False,
+ force_reinstall=False, use_user_site=False, session=None,
+ pycompile=True):
self.build_dir = build_dir
self.src_dir = src_dir
self.download_dir = download_dir
@@ -71,7 +71,7 @@ class RequirementSet(object):
self.reqs_to_cleanup = []
self.as_egg = as_egg
self.use_user_site = use_user_site
- self.target_dir = target_dir #set from --target option
+ self.target_dir = target_dir # set from --target option
self.session = session or PipSession()
self.pycompile = pycompile
@@ -159,9 +159,14 @@ class RequirementSet(object):
req_to_install.check_if_exists()
if req_to_install.satisfied_by:
if self.upgrade:
- #don't uninstall conflict if user install and and conflict is not user install
- if not (self.use_user_site and not dist_in_usersite(req_to_install.satisfied_by)):
- req_to_install.conflicts_with = req_to_install.satisfied_by
+ # don't uninstall conflict if user install and and
+ # conflict is not user install
+ if not (self.use_user_site
+ and not dist_in_usersite(
+ req_to_install.satisfied_by
+ )):
+ req_to_install.conflicts_with = \
+ req_to_install.satisfied_by
req_to_install.satisfied_by = None
else:
install_needed = False
@@ -172,19 +177,28 @@ class RequirementSet(object):
if req_to_install.editable:
if req_to_install.source_dir is None:
- req_to_install.source_dir = req_to_install.build_location(self.src_dir)
+ req_to_install.source_dir = req_to_install.build_location(
+ self.src_dir
+ )
elif install_needed:
- req_to_install.source_dir = req_to_install.build_location(self.build_dir, not self.is_download)
+ req_to_install.source_dir = req_to_install.build_location(
+ self.build_dir,
+ not self.is_download,
+ )
- if req_to_install.source_dir is not None and not os.path.isdir(req_to_install.source_dir):
- raise InstallationError('Could not install requirement %s '
- 'because source folder %s does not exist '
- '(perhaps --no-download was used without first running '
- 'an equivalent install with --no-install?)'
- % (req_to_install, req_to_install.source_dir))
+ if (req_to_install.source_dir is not None
+ and not os.path.isdir(req_to_install.source_dir)):
+ raise InstallationError(
+ 'Could not install requirement %s because source folder %s'
+ ' does not exist (perhaps --no-download was used without '
+ 'first running an equivalent install with --no-install?)' %
+ (req_to_install, req_to_install.source_dir)
+ )
def prepare_files(self, finder, force_root_egg_info=False, bundle=False):
- """Prepare process. Create temp directories, download and/or unpack files."""
+ """
+ Prepare process. Create temp directories, download and/or unpack files.
+ """
unnamed = list(self.unnamed_requirements)
reqs = list(self.requirements.values())
while reqs or unnamed:
@@ -213,9 +227,14 @@ class RequirementSet(object):
req_to_install.url = url.url
if not best_installed:
- #don't uninstall conflict if user install and conflict is not user install
- if not (self.use_user_site and not dist_in_usersite(req_to_install.satisfied_by)):
- req_to_install.conflicts_with = req_to_install.satisfied_by
+ # don't uninstall conflict if user install and
+ # conflict is not user install
+ if not (self.use_user_site
+ and not dist_in_usersite(
+ req_to_install.satisfied_by
+ )):
+ req_to_install.conflicts_with = \
+ req_to_install.satisfied_by
req_to_install.satisfied_by = None
else:
install = False
@@ -230,8 +249,12 @@ class RequirementSet(object):
if req_to_install.editable:
logger.notify('Obtaining %s' % req_to_install)
elif install:
- if req_to_install.url and req_to_install.url.lower().startswith('file:'):
- logger.notify('Unpacking %s' % display_path(url_to_path(req_to_install.url)))
+ if (req_to_install.url
+ and req_to_install.url.lower().startswith('file:')):
+ logger.notify(
+ 'Unpacking %s' %
+ display_path(url_to_path(req_to_install.url))
+ )
else:
logger.notify('Downloading/unpacking %s' % req_to_install)
logger.indent += 2
@@ -258,8 +281,12 @@ class RequirementSet(object):
##occurs when the script attempts to unpack the
##build directory
- # NB: This call can result in the creation of a temporary build directory
- location = req_to_install.build_location(self.build_dir, not self.is_download)
+ # NB: This call can result in the creation of a temporary
+ # build directory
+ location = req_to_install.build_location(
+ self.build_dir,
+ not self.is_download,
+ )
unpack = True
url = None
@@ -271,59 +298,78 @@ class RequirementSet(object):
# inconsistencies are logged later, but do not fail the
# installation.
elif os.path.exists(os.path.join(location, 'setup.py')):
- raise PreviousBuildDirError(textwrap.dedent("""
- pip can't proceed with requirement '%s' due to a pre-existing build directory.
- location: %s
- This is likely due to a previous installation that failed.
- pip is being responsible and not assuming it can delete this.
- Please delete it and try again.
- """ % (req_to_install, location)))
+ raise PreviousBuildDirError(
+ "pip can't proceed with requirements '%s' due to a"
+ " pre-existing buld directory (%s). This is likely"
+ " due to a previous installation that failed. pip "
+ "is being responsible and not assuming it can "
+ "delete this. Please delete it and try again." %
+ (req_to_install, location)
+ )
else:
- ## FIXME: this won't upgrade when there's an existing package unpacked in `location`
+ ## FIXME: this won't upgrade when there's an existing
+ # package unpacked in `location`
if req_to_install.url is None:
if not_found:
raise not_found
- url = finder.find_requirement(req_to_install, upgrade=self.upgrade)
+ url = finder.find_requirement(
+ req_to_install,
+ upgrade=self.upgrade,
+ )
else:
- ## FIXME: should req_to_install.url already be a link?
+ ## FIXME: should req_to_install.url already be a
+ # link?
url = Link(req_to_install.url)
assert url
if url:
try:
- self.unpack_url(url, location, self.is_download)
+ self.unpack_url(
+ url, location, self.is_download,
+ )
except HTTPError as exc:
- logger.fatal('Could not install requirement %s because of error %s'
- % (req_to_install, exc))
+ logger.fatal(
+ 'Could not install requirement %s because '
+ 'of error %s' % (req_to_install, exc)
+ )
raise InstallationError(
- 'Could not install requirement %s because of HTTP error %s for URL %s'
- % (req_to_install, e, url))
+ 'Could not install requirement %s because '
+ 'of HTTP error %s for URL %s' %
+ (req_to_install, exc, url)
+ )
else:
unpack = False
if unpack:
is_bundle = req_to_install.is_bundle
is_wheel = url and url.filename.endswith('.whl')
if is_bundle:
- req_to_install.move_bundle_files(self.build_dir, self.src_dir)
+ req_to_install.move_bundle_files(
+ self.build_dir,
+ self.src_dir,
+ )
for subreq in req_to_install.bundle_requirements():
reqs.append(subreq)
self.add_requirement(subreq)
elif self.is_download:
req_to_install.source_dir = location
if not is_wheel:
- # FIXME: see https://github.com/pypa/pip/issues/1112
+ # FIXME:https://github.com/pypa/pip/issues/1112
req_to_install.run_egg_info()
if url and url.scheme in vcs.all_schemes:
req_to_install.archive(self.download_dir)
elif is_wheel:
req_to_install.source_dir = location
req_to_install.url = url.url
- dist = list(pkg_resources.find_distributions(location))[0]
+ dist = list(
+ pkg_resources.find_distributions(location)
+ )[0]
if not req_to_install.req:
req_to_install.req = dist.as_requirement()
self.add_requirement(req_to_install)
if not self.ignore_dependencies:
- for subreq in dist.requires(req_to_install.extras):
- if self.has_requirement(subreq.project_name):
+ for subreq in dist.requires(
+ req_to_install.extras):
+ if self.has_requirement(
+ subreq.project_name):
continue
subreq = InstallRequirement(str(subreq),
req_to_install)
@@ -333,39 +379,64 @@ class RequirementSet(object):
req_to_install.source_dir = location
req_to_install.run_egg_info()
if force_root_egg_info:
- # We need to run this to make sure that the .egg-info/
- # directory is created for packing in the bundle
- req_to_install.run_egg_info(force_root_egg_info=True)
+ # We need to run this to make sure that the
+ # .egg-info/ directory is created for packing
+ # in the bundle
+ req_to_install.run_egg_info(
+ force_root_egg_info=True,
+ )
req_to_install.assert_source_matches_version()
- #@@ sketchy way of identifying packages not grabbed from an index
+ #@@ sketchy way of identifying packages not grabbed
+ # from an index
if bundle and req_to_install.url:
self.copy_to_build_dir(req_to_install)
install = False
- # req_to_install.req is only avail after unpack for URL pkgs
- # repeat check_if_exists to uninstall-on-upgrade (#14)
+ # req_to_install.req is only avail after unpack for URL
+ # pkgs repeat check_if_exists to uninstall-on-upgrade
+ # (#14)
if not self.ignore_installed:
req_to_install.check_if_exists()
if req_to_install.satisfied_by:
if self.upgrade or self.ignore_installed:
- #don't uninstall conflict if user install and and conflict is not user install
- if not (self.use_user_site and not dist_in_usersite(req_to_install.satisfied_by)):
- req_to_install.conflicts_with = req_to_install.satisfied_by
+ # don't uninstall conflict if user install and
+ # conflict is not user install
+ if not (self.use_user_site
+ and not dist_in_usersite(
+ req_to_install.satisfied_by)):
+ req_to_install.conflicts_with = \
+ req_to_install.satisfied_by
req_to_install.satisfied_by = None
else:
- logger.notify('Requirement already satisfied (use --upgrade to upgrade): %s' % req_to_install)
+ logger.notify(
+ 'Requirement already satisfied (use '
+ '--upgrade to upgrade): %s' %
+ req_to_install
+ )
install = False
if not (is_bundle or is_wheel):
## FIXME: shouldn't be globally added:
- finder.add_dependency_links(req_to_install.dependency_links)
+ finder.add_dependency_links(
+ req_to_install.dependency_links
+ )
if (req_to_install.extras):
- logger.notify("Installing extra requirements: %r" % ','.join(req_to_install.extras))
+ logger.notify(
+ "Installing extra requirements: %r" %
+ ','.join(req_to_install.extras)
+ )
if not self.ignore_dependencies:
- for req in req_to_install.requirements(req_to_install.extras):
+ for req in req_to_install.requirements(
+ req_to_install.extras):
try:
- name = pkg_resources.Requirement.parse(req).project_name
+ name = pkg_resources.Requirement.parse(
+ req
+ ).project_name
except ValueError as exc:
## FIXME: proper warning
- logger.error('Invalid requirement: %r (%s) in requirement %s' % (req, exc, req_to_install))
+ logger.error(
+ 'Invalid requirement: %r (%s) in '
+ 'requirement %s' %
+ (req, exc, req_to_install)
+ )
continue
if self.has_requirement(name):
## FIXME: check for conflict
@@ -376,14 +447,19 @@ class RequirementSet(object):
if not self.has_requirement(req_to_install.name):
#'unnamed' requirements will get added here
self.add_requirement(req_to_install)
- if self.is_download or req_to_install._temp_build_dir is not None:
+ if (self.is_download
+ or req_to_install._temp_build_dir is not None):
self.reqs_to_cleanup.append(req_to_install)
else:
self.reqs_to_cleanup.append(req_to_install)
if install:
self.successfully_downloaded.append(req_to_install)
- if bundle and (req_to_install.url and req_to_install.url.startswith('file:///')):
+ if (bundle
+ and (
+ req_to_install.url
+ and req_to_install.url.startswith('file:///')
+ )):
self.copy_to_build_dir(req_to_install)
finally:
logger.indent -= 2
@@ -412,8 +488,12 @@ class RequirementSet(object):
logger.indent -= 2
def _pip_has_created_build_dir(self):
- return (self.build_dir == build_prefix and
- os.path.exists(os.path.join(self.build_dir, PIP_DELETE_MARKER_FILENAME)))
+ return (
+ self.build_dir == build_prefix
+ and os.path.exists(
+ os.path.join(self.build_dir, PIP_DELETE_MARKER_FILENAME)
+ )
+ )
def copy_to_build_dir(self, req_to_install):
target_dir = req_to_install.editable and self.src_dir or self.build_dir
@@ -436,13 +516,22 @@ class RequirementSet(object):
else:
if self.download_cache:
self.download_cache = os.path.expanduser(self.download_cache)
- retval = unpack_http_url(link, location, self.download_cache, self.download_dir, self.session)
+ retval = unpack_http_url(
+ link,
+ location,
+ self.download_cache,
+ self.download_dir,
+ self.session,
+ )
if only_download:
write_delete_marker_file(location)
return retval
def install(self, install_options, global_options=(), *args, **kwargs):
- """Install everything in this set (after having downloaded and unpacked the packages)"""
+ """
+ Install everything in this set (after having downloaded and unpacked
+ the packages)
+ """
to_install = [r for r in self.requirements.values()
if not r.satisfied_by]
@@ -454,12 +543,16 @@ class RequirementSet(object):
# TODO: take this out later
distribute_req = pkg_resources.Requirement.parse("distribute>=0.7")
for req in to_install:
- if req.name == 'distribute' and req.installed_version in distribute_req:
+ if (req.name == 'distribute'
+ and req.installed_version in distribute_req):
to_install.remove(req)
to_install.append(req)
if to_install:
- logger.notify('Installing collected packages: %s' % ', '.join([req.name for req in to_install]))
+ logger.notify(
+ 'Installing collected packages: %s' %
+ ', '.join([req.name for req in to_install])
+ )
logger.indent += 2
try:
for requirement in to_install:
@@ -468,19 +561,22 @@ class RequirementSet(object):
# when upgrading from distribute-0.6.X to the new merged
# setuptools in py2, we need to force setuptools to uninstall
# distribute. In py3, which is always using distribute, this
- # conversion is already happening in distribute's pkg_resources.
- # It's ok *not* to check if setuptools>=0.7 because if someone
- # were actually trying to ugrade from distribute to setuptools
- # 0.6.X, then all this could do is actually help, although that
- # upgade path was certainly never "supported"
+ # conversion is already happening in distribute's
+ # pkg_resources. It's ok *not* to check if setuptools>=0.7
+ # because if someone were actually trying to ugrade from
+ # distribute to setuptools 0.6.X, then all this could do is
+ # actually help, although that upgade path was certainly never
+ # "supported"
# TODO: remove this later
if requirement.name == 'setuptools':
try:
# only uninstall distribute<0.7. For >=0.7, setuptools
# will also be present, and that's what we need to
# uninstall
- distribute_requirement = pkg_resources.Requirement.parse("distribute<0.7")
- existing_distribute = pkg_resources.get_distribution("distribute")
+ distribute_requirement = \
+ pkg_resources.Requirement.parse("distribute<0.7")
+ existing_distribute = \
+ pkg_resources.get_distribution("distribute")
if existing_distribute in distribute_requirement:
requirement.conflicts_with = existing_distribute
except pkg_resources.DistributionNotFound:
@@ -496,14 +592,21 @@ class RequirementSet(object):
finally:
logger.indent -= 2
try:
- requirement.install(install_options, global_options, *args, **kwargs)
+ requirement.install(
+ install_options,
+ global_options,
+ *args,
+ **kwargs
+ )
except:
# if install did not succeed, rollback previous uninstall
- if requirement.conflicts_with and not requirement.install_succeeded:
+ if (requirement.conflicts_with
+ and not requirement.install_succeeded):
requirement.rollback_uninstall()
raise
else:
- if requirement.conflicts_with and requirement.install_succeeded:
+ if (requirement.conflicts_with
+ and requirement.install_succeeded):
requirement.commit_uninstall()
requirement.remove_temporary_source()
finally:
@@ -572,7 +675,10 @@ class RequirementSet(object):
for req in [req for req in self.requirements.values()
if not req.comes_from]:
parts.append('%s==%s\n' % (req.name, req.installed_version))
- parts.append('# These packages were installed to satisfy the above requirements:\n')
+ parts.append(
+ '# These packages were installed to satisfy the above '
+ 'requirements:\n'
+ )
for req in [req for req in self.requirements.values()
if req.comes_from]:
parts.append('%s==%s\n' % (req.name, req.installed_version))
diff --git a/pip/req/req_uninstall.py b/pip/req/req_uninstall.py
index 02f94e03d..6b4f4e142 100644
--- a/pip/req/req_uninstall.py
+++ b/pip/req/req_uninstall.py
@@ -31,8 +31,14 @@ class UninstallPathSet(object):
def _can_uninstall(self):
if not dist_is_local(self.dist):
- logger.notify("Not uninstalling %s at %s, outside environment %s"
- % (self.dist.project_name, normalize_path(self.dist.location), sys.prefix))
+ logger.notify(
+ "Not uninstalling %s at %s, outside environment %s" %
+ (
+ self.dist.project_name,
+ normalize_path(self.dist.location),
+ sys.prefix
+ ),
+ )
return False
return True
@@ -45,11 +51,11 @@ class UninstallPathSet(object):
else:
self._refuse.add(path)
- # __pycache__ files can show up after 'installed-files.txt' is created, due to imports
+ # __pycache__ files can show up after 'installed-files.txt' is created,
+ # due to imports
if os.path.splitext(path)[1] == '.py' and uses_pycache:
self.add(imp.cache_from_source(path))
-
def add_pth(self, pth_file, entry):
pth_file = normalize_path(pth_file)
if self._permitted(pth_file):
@@ -66,9 +72,10 @@ class UninstallPathSet(object):
shorter path."""
short_paths = set()
for path in sorted(paths, key=len):
- if not any([(path.startswith(shortpath) and
- path[len(shortpath.rstrip(os.path.sep))] == os.path.sep)
- for shortpath in short_paths]):
+ if not any([
+ (path.startswith(shortpath) and
+ path[len(shortpath.rstrip(os.path.sep))] == os.path.sep)
+ for shortpath in short_paths]):
short_paths.add(path)
return short_paths
@@ -82,7 +89,10 @@ class UninstallPathSet(object):
if not self._can_uninstall():
return
if not self.paths:
- logger.notify("Can't uninstall '%s'. No files were found to uninstall." % self.dist.project_name)
+ logger.notify(
+ "Can't uninstall '%s'. No files were found to uninstall." %
+ self.dist.project_name
+ )
return
logger.notify('Uninstalling %s:' % self.dist.project_name)
logger.indent += 2
@@ -108,7 +118,9 @@ class UninstallPathSet(object):
renames(path, new_path)
for pth in self.pth.values():
pth.remove()
- logger.notify('Successfully uninstalled %s' % self.dist.project_name)
+ logger.notify(
+ 'Successfully uninstalled %s' % self.dist.project_name
+ )
finally:
logger.indent -= 2
@@ -116,7 +128,10 @@ class UninstallPathSet(object):
def rollback(self):
"""Rollback the changes previously made by remove()."""
if self.save_dir is None:
- logger.error("Can't roll back %s; was not uninstalled" % self.dist.project_name)
+ logger.error(
+ "Can't roll back %s; was not uninstalled" %
+ self.dist.project_name
+ )
return False
logger.notify('Rolling back uninstall of %s' % self.dist.project_name)
for path in self._moved_paths:
@@ -137,7 +152,9 @@ class UninstallPathSet(object):
class UninstallPthEntries(object):
def __init__(self, pth_file):
if not os.path.isfile(pth_file):
- raise UninstallationError("Cannot remove entries from nonexistent file %s" % pth_file)
+ raise UninstallationError(
+ "Cannot remove entries from nonexistent file %s" % pth_file
+ )
self.file = pth_file
self.entries = set()
self._saved_lines = None
@@ -175,11 +192,12 @@ class UninstallPthEntries(object):
def rollback(self):
if self._saved_lines is None:
- logger.error('Cannot roll back changes to %s, none were made' % self.file)
+ logger.error(
+ 'Cannot roll back changes to %s, none were made' % self.file
+ )
return False
logger.info('Rolling %s back to previous state' % self.file)
fh = open(self.file, 'wb')
fh.writelines(self._saved_lines)
fh.close()
return True
-
diff --git a/pip/util.py b/pip/util.py
index 106f0dd59..6cb5b1871 100644
--- a/pip/util.py
+++ b/pip/util.py
@@ -8,13 +8,16 @@ import posixpath
import zipfile
import tarfile
import subprocess
-import textwrap
-from pip.exceptions import InstallationError, BadCommand, PipError
-from pip.backwardcompat import(WindowsError, string_types, raw_input,
- console_to_str, user_site, PermissionError)
-from pip.locations import (site_packages, running_under_virtualenv, virtualenv_no_global,
- write_delete_marker_file)
+from pip.exceptions import InstallationError, BadCommand
+from pip.backwardcompat import(
+ WindowsError, string_types, raw_input, console_to_str, user_site,
+ PermissionError,
+)
+from pip.locations import (
+ site_packages, running_under_virtualenv, virtualenv_no_global,
+ write_delete_marker_file,
+)
from pip.log import logger
from pip._vendor import pkg_resources
from pip._vendor.distlib import version
@@ -50,9 +53,9 @@ def rmtree_errorhandler(func, path, exc_info):
remove them, an exception is thrown. We catch that here, remove the
read-only attribute, and hopefully continue without problems."""
exctype, value = exc_info[:2]
- if not ((exctype is WindowsError and value.args[0] == 5) or #others
- (exctype is OSError and value.args[0] == 13) or #python2.4
- (exctype is PermissionError and value.args[3] == 5) #python3.3
+ if not ((exctype is WindowsError and value.args[0] == 5) or # others
+ (exctype is OSError and value.args[0] == 13) or # python2.4
+ (exctype is PermissionError and value.args[3] == 5) # python3.3
):
raise
# file type should currently be read only
@@ -130,12 +133,17 @@ def ask(message, options):
"""Ask the message interactively, with the given possible responses"""
while 1:
if os.environ.get('PIP_NO_INPUT'):
- raise Exception('No input was expected ($PIP_NO_INPUT set); question: %s' % message)
+ raise Exception(
+ 'No input was expected ($PIP_NO_INPUT set); question: %s' %
+ message
+ )
response = raw_input(message)
response = response.strip().lower()
if response not in options:
- print('Your response (%r) was not one of the expected responses: %s' % (
- response, ', '.join(options)))
+ print(
+ 'Your response (%r) was not one of the expected responses: '
+ '%s' % (response, ', '.join(options))
+ )
else:
return response
@@ -168,7 +176,7 @@ class _Inf(object):
return 'Inf'
-Inf = _Inf() #this object is not currently used as a sortable in our code
+Inf = _Inf() # this object is not currently used as a sortable in our code
del _Inf
@@ -201,7 +209,9 @@ def is_installable_dir(path):
def is_svn_page(html):
- """Returns true if the page appears to be the index page of an svn repository"""
+ """
+ Returns true if the page appears to be the index page of an svn repository
+ """
return (re.search(r'[^<]*Revision \d+:', html)
and re.search(r'Powered by (?:]*?>)?Subversion', html, re.I))
@@ -246,13 +256,13 @@ def make_path_relative(path, rel_to):
Make a filename relative, where the filename path, and it is
relative to rel_to
- >>> make_relative_path('/usr/share/something/a-file.pth',
+ >>> make_path_relative('/usr/share/something/a-file.pth',
... '/usr/share/another-place/src/Directory')
'../../../something/a-file.pth'
- >>> make_relative_path('/usr/share/something/a-file.pth',
+ >>> make_path_relative('/usr/share/something/a-file.pth',
... '/home/user/src/Directory')
'../../../usr/share/something/a-file.pth'
- >>> make_relative_path('/usr/share/a-file.pth', '/usr/share/')
+ >>> make_path_relative('/usr/share/a-file.pth', '/usr/share/')
'a-file.pth'
"""
path_filename = os.path.basename(path)
@@ -332,15 +342,21 @@ def dist_in_usersite(dist):
Return True if given Distribution is installed in user site.
"""
if user_site:
- return normalize_path(dist_location(dist)).startswith(normalize_path(user_site))
+ return normalize_path(
+ dist_location(dist)
+ ).startswith(normalize_path(user_site))
else:
return False
+
def dist_in_site_packages(dist):
"""
- Return True if given Distribution is installed in distutils.sysconfig.get_python_lib().
+ Return True if given Distribution is installed in
+ distutils.sysconfig.get_python_lib().
"""
- return normalize_path(dist_location(dist)).startswith(normalize_path(site_packages))
+ return normalize_path(
+ dist_location(dist)
+ ).startswith(normalize_path(site_packages))
def dist_is_editable(dist):
@@ -350,10 +366,11 @@ def dist_is_editable(dist):
req = FrozenRequirement.from_dist(dist, [])
return req.editable
-def get_installed_distributions(local_only=True,
- skip=('setuptools', 'pip', 'python', 'distribute', 'wsgiref'),
- include_editables=True,
- editables_only=False):
+
+def get_installed_distributions(
+ local_only=True,
+ skip=('setuptools', 'pip', 'python', 'distribute', 'wsgiref'),
+ include_editables=True, editables_only=False):
"""
Return a list of installed Distribution objects.
@@ -402,9 +419,12 @@ def egg_link_path(dist):
2) in a no-global virtualenv
try to find in site_packages
3) in a yes-global virtualenv
- try to find in site_packages, then site.USER_SITE (don't look in global location)
+ try to find in site_packages, then site.USER_SITE
+ (don't look in global location)
+
+ For #1 and #3, there could be odd cases, where there's an egg-link in 2
+ locations.
- For #1 and #3, there could be odd cases, where there's an egg-link in 2 locations.
This method will just return the first one found.
"""
sites = []
@@ -448,8 +468,10 @@ def get_terminal_size():
import fcntl
import termios
import struct
- cr = struct.unpack('hh', fcntl.ioctl(fd, termios.TIOCGWINSZ,
- '1234'))
+ cr = struct.unpack(
+ 'hh',
+ fcntl.ioctl(fd, termios.TIOCGWINSZ, '1234')
+ )
except:
return None
if cr == (0, 0):
@@ -513,10 +535,11 @@ def unzip_file(filename, location, flatten=True):
finally:
fp.close()
mode = info.external_attr >> 16
- # if mode and regular file and any execute permissions for user/group/world?
- if mode and stat.S_ISREG(mode) and mode & 0o111:
- # make dest file have execute for user/group/world (chmod +x)
- # no-op on windows per python docs
+ # if mode and regular file and any execute permissions for
+ # user/group/world?
+ if mode and stat.S_ISREG(mode) and mode & 0o111:
+ # make dest file have execute for user/group/world
+ # (chmod +x) no-op on windows per python docs
os.chmod(fn, (0o777-current_umask() | 0o111))
finally:
zipfp.close()
@@ -535,7 +558,8 @@ def untar_file(filename, location):
os.makedirs(location)
if filename.lower().endswith('.gz') or filename.lower().endswith('.tgz'):
mode = 'r:gz'
- elif filename.lower().endswith('.bz2') or filename.lower().endswith('.tbz'):
+ elif (filename.lower().endswith('.bz2')
+ or filename.lower().endswith('.tbz')):
mode = 'r:bz2'
elif filename.lower().endswith('.tar'):
mode = 'r'
@@ -604,7 +628,9 @@ def create_download_cache_folder(folder):
def cache_download(target_file, temp_location, content_type):
- logger.notify('Storing download in cache at %s' % display_path(target_file))
+ logger.notify(
+ 'Storing download in cache at %s' % display_path(target_file)
+ )
shutil.copyfile(temp_location, target_file)
fp = open(target_file+'.content-type', 'w')
fp.write(content_type)
@@ -614,26 +640,36 @@ def cache_download(target_file, temp_location, content_type):
def unpack_file(filename, location, content_type, link):
filename = os.path.realpath(filename)
if (content_type == 'application/zip'
- or filename.endswith('.zip')
- or filename.endswith('.pybundle')
- or filename.endswith('.whl')
- or zipfile.is_zipfile(filename)):
- unzip_file(filename, location, flatten=not filename.endswith(('.pybundle', '.whl')))
+ or filename.endswith('.zip')
+ or filename.endswith('.pybundle')
+ or filename.endswith('.whl')
+ or zipfile.is_zipfile(filename)):
+ unzip_file(
+ filename,
+ location,
+ flatten=not filename.endswith(('.pybundle', '.whl'))
+ )
elif (content_type == 'application/x-gzip'
- or tarfile.is_tarfile(filename)
- or splitext(filename)[1].lower() in ('.tar', '.tar.gz', '.tar.bz2', '.tgz', '.tbz')):
+ or tarfile.is_tarfile(filename)
+ or splitext(filename)[1].lower() in (
+ '.tar', '.tar.gz', '.tar.bz2', '.tgz', '.tbz')):
untar_file(filename, location)
elif (content_type and content_type.startswith('text/html')
- and is_svn_page(file_contents(filename))):
+ and is_svn_page(file_contents(filename))):
# We don't really care about this
from pip.vcs.subversion import Subversion
Subversion('svn+' + link.url).unpack(location)
else:
## FIXME: handle?
## FIXME: magic signatures?
- logger.fatal('Cannot unpack file %s (downloaded from %s, content-type: %s); cannot detect archive format'
- % (filename, location, content_type))
- raise InstallationError('Cannot determine archive format of %s' % location)
+ logger.fatal(
+ 'Cannot unpack file %s (downloaded from %s, content-type: %s); '
+ 'cannot detect archive format' %
+ (filename, location, content_type)
+ )
+ raise InstallationError(
+ 'Cannot determine archive format of %s' % location
+ )
def call_subprocess(cmd, show_stdout=True,
@@ -689,8 +725,13 @@ def call_subprocess(cmd, show_stdout=True,
if proc.returncode:
if raise_on_returncode:
if all_output:
- logger.notify('Complete output from command %s:' % command_desc)
- logger.notify('\n'.join(all_output) + '\n----------------------------------------')
+ logger.notify(
+ 'Complete output from command %s:' % command_desc
+ )
+ logger.notify(
+ '\n'.join(all_output) +
+ '\n----------------------------------------'
+ )
raise InstallationError(
"Command %s failed with error code %s in %s"
% (command_desc, proc.returncode, cwd))
@@ -716,7 +757,11 @@ def is_prerelease(vers):
return True
parsed = version._normalized_key(normalized)
- return any([any([y in set(["a", "b", "c", "rc", "dev"]) for y in x]) for x in parsed])
+ return any([
+ any([y in set(["a", "b", "c", "rc", "dev"]) for y in x])
+ for x in parsed
+ ])
+
def read_text_file(filename):
"""Return the contents of *filename*.
diff --git a/pip/vcs/__init__.py b/pip/vcs/__init__.py
index a56dd202b..87f49c730 100644
--- a/pip/vcs/__init__.py
+++ b/pip/vcs/__init__.py
@@ -17,7 +17,8 @@ class VcsSupport(object):
schemes = ['ssh', 'git', 'hg', 'bzr', 'sftp', 'svn']
def __init__(self):
- # Register more schemes with urlparse for various version control systems
+ # Register more schemes with urlparse for various version control
+ # systems
urlparse.uses_netloc.extend(self.schemes)
# Python >= 2.7.4, 3.3 doesn't have uses_fragment
if getattr(urlparse, 'uses_fragment', None):
@@ -118,9 +119,10 @@ class VersionControl(object):
repository URL
"""
error_message = (
- "Sorry, '%s' is a malformed VCS url. "
- "The format is +://, "
- "e.g. svn+http://myrepo/svn/MyApp#egg=MyApp")
+ "Sorry, '%s' is a malformed VCS url. "
+ "The format is +://, "
+ "e.g. svn+http://myrepo/svn/MyApp#egg=MyApp"
+ )
assert '+' in self.url, error_message % self.url
url = self.url.split('+', 1)[1]
scheme, netloc, path, query, frag = urlparse.urlsplit(url)
@@ -134,12 +136,14 @@ class VersionControl(object):
"""
Returns (url, revision), where both are strings
"""
- assert not location.rstrip('/').endswith(self.dirname), 'Bad directory: %s' % location
+ assert not location.rstrip('/').endswith(self.dirname), \
+ 'Bad directory: %s' % location
return self.get_url(location), self.get_revision(location)
def normalize_url(self, url):
"""
- Normalize a URL for comparison by unquoting it and removing any trailing slash.
+ Normalize a URL for comparison by unquoting it and removing any
+ trailing slash.
"""
return urllib.unquote(url).rstrip('/')
@@ -247,5 +251,8 @@ def get_src_requirement(dist, location, find_tags):
version_control = vcs.get_backend_from_location(location)
if version_control:
return version_control().get_src_requirement(dist, location, find_tags)
- logger.warn('cannot determine version of editable source in %s (is not SVN checkout, Git clone, Mercurial clone or Bazaar branch)' % location)
+ logger.warn(
+ 'cannot determine version of editable source in %s (is not SVN '
+ 'checkout, Git clone, Mercurial clone or Bazaar branch)' % location
+ )
return dist.as_requirement()
diff --git a/pip/vcs/bazaar.py b/pip/vcs/bazaar.py
index c62c9c85a..23f194440 100644
--- a/pip/vcs/bazaar.py
+++ b/pip/vcs/bazaar.py
@@ -13,7 +13,10 @@ class Bazaar(VersionControl):
dirname = '.bzr'
repo_name = 'branch'
bundle_file = 'bzr-branch.txt'
- schemes = ('bzr', 'bzr+http', 'bzr+https', 'bzr+ssh', 'bzr+sftp', 'bzr+ftp', 'bzr+lp')
+ schemes = (
+ 'bzr', 'bzr+http', 'bzr+https', 'bzr+ssh', 'bzr+sftp', 'bzr+ftp',
+ 'bzr+lp',
+ )
guide = ('# This was a Bazaar branch; to make it a branch again run:\n'
'bzr branch -r %(rev)s %(url)s .\n')
@@ -39,7 +42,9 @@ class Bazaar(VersionControl):
return None, None
def export(self, location):
- """Export the Bazaar repository at the url to the destination location"""
+ """
+ Export the Bazaar repository at the url to the destination location
+ """
temp_dir = tempfile.mkdtemp('-export', 'pip-')
self.unpack(temp_dir)
if os.path.exists(location):
diff --git a/pip/vcs/git.py b/pip/vcs/git.py
index 16acebdc4..54951764d 100644
--- a/pip/vcs/git.py
+++ b/pip/vcs/git.py
@@ -14,10 +14,13 @@ class Git(VersionControl):
name = 'git'
dirname = '.git'
repo_name = 'clone'
- schemes = ('git', 'git+http', 'git+https', 'git+ssh', 'git+git', 'git+file')
+ schemes = (
+ 'git', 'git+http', 'git+https', 'git+ssh', 'git+git', 'git+file',
+ )
bundle_file = 'git-clone.txt'
guide = ('# This was a Git repo; to make it a repo again run:\n'
- 'git init\ngit remote add origin %(url)s -f\ngit checkout %(rev)s\n')
+ 'git init\ngit remote add origin %(url)s -f\ngit '
+ 'checkout %(rev)s\n')
def __init__(self, url=None, *args, **kwargs):
@@ -27,10 +30,15 @@ class Git(VersionControl):
scheme, netloc, path, query, fragment = urlsplit(url)
if scheme.endswith('file'):
initial_slashes = path[:-len(path.lstrip('/'))]
- newpath = initial_slashes + url2pathname(path).replace('\\', '/').lstrip('/')
+ newpath = (
+ initial_slashes +
+ url2pathname(path).replace('\\', '/').lstrip('/')
+ )
url = urlunsplit((scheme, netloc, newpath, query, fragment))
after_plus = scheme.find('+') + 1
- url = scheme[:after_plus] + urlunsplit((scheme[after_plus:], netloc, newpath, query, fragment))
+ url = scheme[:after_plus] + urlunsplit(
+ (scheme[after_plus:], netloc, newpath, query, fragment),
+ )
super(Git, self).__init__(url, *args, **kwargs)
@@ -39,7 +47,10 @@ class Git(VersionControl):
for line in content.splitlines():
if not line.strip() or line.strip().startswith('#'):
continue
- url_match = re.search(r'git\s*remote\s*add\s*origin(.*)\s*-f', line)
+ url_match = re.search(
+ r'git\s*remote\s*add\s*origin(.*)\s*-f',
+ line,
+ )
if url_match:
url = url_match.group(1).strip()
rev_match = re.search(r'^git\s*checkout\s*-q\s*(.*)\s*', line)
@@ -77,7 +88,9 @@ class Git(VersionControl):
# a local tag or branch name
return [revisions[rev]]
else:
- logger.warn("Could not find a tag or branch '%s', assuming commit." % rev)
+ logger.warn(
+ "Could not find a tag or branch '%s', assuming commit." % rev,
+ )
return rev_options
def switch(self, dest, url, rev_options):
@@ -93,8 +106,13 @@ class Git(VersionControl):
call_subprocess([self.cmd, 'fetch', '-q'], cwd=dest)
# Then reset to wanted revision (maby even origin/master)
if rev_options:
- rev_options = self.check_rev_options(rev_options[0], dest, rev_options)
- call_subprocess([self.cmd, 'reset', '--hard', '-q'] + rev_options, cwd=dest)
+ rev_options = self.check_rev_options(
+ rev_options[0], dest, rev_options,
+ )
+ call_subprocess(
+ [self.cmd, 'reset', '--hard', '-q'] + rev_options,
+ cwd=dest,
+ )
#: update submodules
self.update_submodules(dest)
@@ -107,7 +125,9 @@ class Git(VersionControl):
rev_options = ['origin/master']
rev_display = ''
if self.check_destination(dest, url, rev_options, rev_display):
- logger.notify('Cloning %s%s to %s' % (url, rev_display, display_path(dest)))
+ logger.notify(
+ 'Cloning %s%s to %s' % (url, rev_display, display_path(dest)),
+ )
call_subprocess([self.cmd, 'clone', '-q', url, dest])
#: repo may contain submodules
self.update_submodules(dest)
@@ -115,7 +135,10 @@ class Git(VersionControl):
rev_options = self.check_rev_options(rev, dest, rev_options)
# Only do a checkout if rev_options differs from HEAD
if not self.get_revision(dest).startswith(rev_options[0]):
- call_subprocess([self.cmd, 'checkout', '-q'] + rev_options, cwd=dest)
+ call_subprocess(
+ [self.cmd, 'checkout', '-q'] + rev_options,
+ cwd=dest,
+ )
def get_url(self, location):
url = call_subprocess(
@@ -162,7 +185,9 @@ class Git(VersionControl):
if current_rev in names_by_commit:
# It's a tag
- full_egg_name = '%s-%s' % (egg_project_name, names_by_commit[current_rev])
+ full_egg_name = (
+ '%s-%s' % (egg_project_name, names_by_commit[current_rev])
+ )
else:
full_egg_name = '%s-dev' % egg_project_name
@@ -188,7 +213,9 @@ class Git(VersionControl):
def update_submodules(self, location):
if not os.path.exists(os.path.join(location, '.gitmodules')):
return
- call_subprocess([self.cmd, 'submodule', 'update', '--init', '--recursive', '-q'],
- cwd=location)
+ call_subprocess(
+ [self.cmd, 'submodule', 'update', '--init', '--recursive', '-q'],
+ cwd=location,
+ )
vcs.register(Git)
diff --git a/pip/vcs/mercurial.py b/pip/vcs/mercurial.py
index b3859750d..75975585a 100644
--- a/pip/vcs/mercurial.py
+++ b/pip/vcs/mercurial.py
@@ -1,7 +1,7 @@
import os
import tempfile
import re
-import sys
+
from pip.util import call_subprocess
from pip.util import display_path, rmtree
from pip.log import logger
@@ -17,7 +17,7 @@ class Mercurial(VersionControl):
schemes = ('hg', 'hg+http', 'hg+https', 'hg+ssh', 'hg+static-http')
bundle_file = 'hg-clone.txt'
guide = ('# This was a Mercurial repo; to make it a repo again run:\n'
- 'hg init\nhg pull %(url)s\nhg update -r %(rev)s\n')
+ 'hg init\nhg pull %(url)s\nhg update -r %(rev)s\n')
def parse_vcs_bundle_file(self, content):
url = rev = None
@@ -142,7 +142,10 @@ class Mercurial(VersionControl):
full_egg_name = '%s-%s' % (egg_project_name, tag_revs[current_rev])
elif current_rev in branch_revs:
# It's the tip of a branch
- full_egg_name = '%s-%s' % (egg_project_name, branch_revs[current_rev])
+ full_egg_name = '%s-%s' % (
+ egg_project_name,
+ branch_revs[current_rev],
+ )
else:
full_egg_name = '%s-dev' % egg_project_name
return '%s@%s#egg=%s' % (repo, current_rev_hash, full_egg_name)
diff --git a/pip/vcs/subversion.py b/pip/vcs/subversion.py
index 88163ff73..468c1a8fa 100644
--- a/pip/vcs/subversion.py
+++ b/pip/vcs/subversion.py
@@ -21,22 +21,32 @@ class Subversion(VersionControl):
schemes = ('svn', 'svn+ssh', 'svn+http', 'svn+https', 'svn+svn')
bundle_file = 'svn-checkout.txt'
guide = ('# This was an svn checkout; to make it a checkout again run:\n'
- 'svn checkout --force -r %(rev)s %(url)s .\n')
+ 'svn checkout --force -r %(rev)s %(url)s .\n')
def get_info(self, location):
"""Returns (url, revision), where both are strings"""
- assert not location.rstrip('/').endswith(self.dirname), 'Bad directory: %s' % location
+ assert not location.rstrip('/').endswith(self.dirname), \
+ 'Bad directory: %s' % location
output = call_subprocess(
- [self.cmd, 'info', location], show_stdout=False, extra_environ={'LANG': 'C'})
+ [self.cmd, 'info', location],
+ show_stdout=False,
+ extra_environ={'LANG': 'C'},
+ )
match = _svn_url_re.search(output)
if not match:
- logger.warn('Cannot determine URL of svn checkout %s' % display_path(location))
+ logger.warn(
+ 'Cannot determine URL of svn checkout %s' %
+ display_path(location)
+ )
logger.info('Output that cannot be parsed: \n%s' % output)
return None, None
url = match.group(1).strip()
match = _svn_revision_re.search(output)
if not match:
- logger.warn('Cannot determine revision of svn checkout %s' % display_path(location))
+ logger.warn(
+ 'Cannot determine revision of svn checkout %s' %
+ display_path(location)
+ )
logger.info('Output that cannot be parsed: \n%s' % output)
return url, None
return url, match.group(1)
@@ -61,8 +71,8 @@ class Subversion(VersionControl):
logger.indent += 2
try:
if os.path.exists(location):
- # Subversion doesn't like to check out over an existing directory
- # --force fixes this, but was only added in svn 1.5
+ # Subversion doesn't like to check out over an existing
+ # directory --force fixes this, but was only added in svn 1.5
rmtree(location)
call_subprocess(
[self.cmd, 'export'] + rev_options + [url, location],
@@ -140,16 +150,21 @@ class Subversion(VersionControl):
return url, rev
def get_url(self, location):
- # In cases where the source is in a subdirectory, not alongside setup.py
- # we have to look up in the location until we find a real setup.py
+ # In cases where the source is in a subdirectory, not alongside
+ # setup.py we have to look up in the location until we find a real
+ # setup.py
orig_location = location
while not os.path.exists(os.path.join(location, 'setup.py')):
last_location = location
location = os.path.dirname(location)
if location == last_location:
- # We've traversed up to the root of the filesystem without finding setup.py
- logger.warn("Could not find setup.py for directory %s (tried all parent directories)"
- % orig_location)
+ # We've traversed up to the root of the filesystem without
+ # finding setup.py
+ logger.warn(
+ "Could not find setup.py for directory %s (tried all "
+ "parent directories)" %
+ orig_location
+ )
return None
return self._get_svn_url_rev(location)[0]
@@ -160,7 +175,9 @@ class Subversion(VersionControl):
f = open(os.path.join(location, self.dirname, 'entries'))
data = f.read()
f.close()
- if data.startswith('8') or data.startswith('9') or data.startswith('10'):
+ if (data.startswith('8')
+ or data.startswith('9')
+ or data.startswith('10')):
data = list(map(str.splitlines, data.split('\n\x0c\n')))
del data[0][0] # get rid of the '8'
url = data[0][3]
@@ -174,9 +191,14 @@ class Subversion(VersionControl):
else:
try:
# subversion >= 1.7
- xml = call_subprocess([self.cmd, 'info', '--xml', location], show_stdout=False)
+ xml = call_subprocess(
+ [self.cmd, 'info', '--xml', location],
+ show_stdout=False,
+ )
url = _svn_info_xml_url_re.search(xml).group(1)
- revs = [int(m.group(1)) for m in _svn_info_xml_rev_re.finditer(xml)]
+ revs = [
+ int(m.group(1)) for m in _svn_info_xml_rev_re.finditer(xml)
+ ]
except InstallationError:
url, revs = None, []
@@ -203,7 +225,7 @@ class Subversion(VersionControl):
best_tag = None
for tag, tag_rev in tag_revs:
if (tag_rev > rev and
- (best_match_rev is None or best_match_rev > tag_rev)):
+ (best_match_rev is None or best_match_rev > tag_rev)):
# FIXME: Is best_match > tag_rev really possible?
# or is it a sign something is wacky?
best_match_rev = tag_rev
@@ -232,12 +254,18 @@ class Subversion(VersionControl):
tag_revs = self.get_tag_revs(tag_url)
match = self.find_tag_match(rev, tag_revs)
if match:
- logger.notify('trunk checkout %s seems to be equivalent to tag %s' % match)
+ logger.notify(
+ 'trunk checkout %s seems to be equivalent to tag %s' %
+ match
+ )
repo = '%s/%s' % (tag_url, match)
full_egg_name = '%s-%s' % (egg_project_name, match)
else:
# Don't know what it is
- logger.warn('svn URL does not fit normal structure (tags/branches/trunk): %s' % repo)
+ logger.warn(
+ 'svn URL does not fit normal structure (tags/branches/trunk): '
+ '%s' % repo
+ )
full_egg_name = '%s-dev_r%s' % (egg_project_name, rev)
return 'svn+%s@%s#egg=%s' % (repo, rev, full_egg_name)
diff --git a/pip/wheel.py b/pip/wheel.py
index 1e643cc55..9b5509f99 100644
--- a/pip/wheel.py
+++ b/pip/wheel.py
@@ -20,13 +20,12 @@ from pip.locations import distutils_scheme
from pip.log import logger
from pip import pep425tags
from pip.util import call_subprocess, normalize_path, make_path_relative
-from pip._vendor import pkg_resources
from pip._vendor.distlib.scripts import ScriptMaker
wheel_ext = '.whl'
-def rehash(path, algo='sha256', blocksize=1<<20):
+def rehash(path, algo='sha256', blocksize=1 << 20):
"""Return (hash, length) for path using hashlib.new(algo)"""
h = hashlib.new(algo)
length = 0
@@ -36,11 +35,14 @@ def rehash(path, algo='sha256', blocksize=1<<20):
length += len(block)
h.update(block)
block = f.read(blocksize)
- digest = 'sha256='+urlsafe_b64encode(h.digest()).decode('latin1').rstrip('=')
+ digest = 'sha256=' + urlsafe_b64encode(
+ h.digest()
+ ).decode('latin1').rstrip('=')
return (digest, length)
try:
unicode
+
def binary(s):
if isinstance(s, unicode):
return s.encode('ascii')
@@ -50,15 +52,17 @@ except NameError:
if isinstance(s, str):
return s.encode('ascii')
+
def open_for_csv(name, mode):
if sys.version_info[0] < 3:
nl = {}
bin = 'b'
else:
- nl = { 'newline': '' }
+ nl = {'newline': ''}
bin = ''
return open(name, mode + bin, **nl)
+
def fix_script(path):
"""Replace #!python with #!/path/to/python
Return True if file was changed."""
@@ -85,6 +89,7 @@ def fix_script(path):
dist_info_re = re.compile(r"""^(?P(?P.+?)(-(?P\d.+?))?)
\.dist-info$""", re.VERBOSE)
+
def root_is_purelib(name, wheeldir):
"""
Return True if the extracted wheel in wheeldir should go into purelib.
@@ -129,7 +134,7 @@ def get_entrypoints(filename):
def move_wheel_files(name, req, wheeldir, user=False, home=None, root=None,
- pycompile=True):
+ pycompile=True):
"""Install a wheel"""
scheme = distutils_scheme(name, user=user, home=home, root=root)
@@ -167,7 +172,7 @@ def move_wheel_files(name, req, wheeldir, user=False, home=None, root=None,
changed.add(destfile)
def clobber(source, dest, is_base, fixer=None, filter=None):
- if not os.path.exists(dest): # common for the 'include' path
+ if not os.path.exists(dest): # common for the 'include' path
os.makedirs(dest)
for dir, subdirs, files in os.walk(source):
@@ -180,9 +185,10 @@ def move_wheel_files(name, req, wheeldir, user=False, home=None, root=None,
data_dirs.append(s)
continue
elif (is_base
- and s.endswith('.dist-info')
- # is self.req.project_name case preserving?
- and s.lower().startswith(req.project_name.replace('-', '_').lower())):
+ and s.endswith('.dist-info')
+ # is self.req.project_name case preserving?
+ and s.lower().startswith(
+ req.project_name.replace('-', '_').lower())):
assert not info_dir, 'Multiple .dist-info directories'
info_dir.append(destsubdir)
if not os.path.exists(destsubdir):
@@ -326,16 +332,24 @@ if __name__ == '__main__':
spec = 'easy_install-%s = %s' % (sys.version[:3], easy_install_script)
generated.extend(maker.make(spec))
# Delete any other versioned easy_install entry points
- easy_install_ep = [k for k in console
- if re.match(r'easy_install(-\d\.\d)?$', k)]
+ easy_install_ep = [
+ k for k in console if re.match(r'easy_install(-\d\.\d)?$', k)
+ ]
for k in easy_install_ep:
del console[k]
# Generate the console and GUI entry points specified in the wheel
if len(console) > 0:
- generated.extend(maker.make_multiple(['%s = %s' % kv for kv in console.items()]))
+ generated.extend(
+ maker.make_multiple(['%s = %s' % kv for kv in console.items()])
+ )
if len(gui) > 0:
- generated.extend(maker.make_multiple(['%s = %s' % kv for kv in gui.items()], {'gui': True}))
+ generated.extend(
+ maker.make_multiple(
+ ['%s = %s' % kv for kv in gui.items()],
+ {'gui': True}
+ )
+ )
record = os.path.join(info_dir[0], 'RECORD')
temp_record = os.path.join(info_dir[0], 'RECORD.pip')
@@ -355,6 +369,7 @@ if __name__ == '__main__':
writer.writerow((installed[f], '', ''))
shutil.move(temp_record, record)
+
def _unique(fn):
@functools.wraps(fn)
def unique(*args, **kw):
@@ -365,6 +380,7 @@ def _unique(fn):
yield item
return unique
+
# TODO: this goes somewhere besides the wheel module
@_unique
def uninstallation_paths(dist):
@@ -376,7 +392,7 @@ def uninstallation_paths(dist):
UninstallPathSet.add() takes care of the __pycache__ .pyc.
"""
- from pip.util import FakeFile # circular import
+ from pip.util import FakeFile # circular import
r = csv.reader(FakeFile(dist.get_metadata_lines('RECORD')))
for row in r:
path = os.path.join(dist.location, row[0])
@@ -394,10 +410,11 @@ class Wheel(object):
# TODO: maybe move the install code into this class
wheel_file_re = re.compile(
- r"""^(?P(?P.+?)-(?P\d.*?))
- ((-(?P\d.*?))?-(?P.+?)-(?P.+?)-(?P.+?)
- \.whl|\.dist-info)$""",
- re.VERBOSE)
+ r"""^(?P(?P.+?)-(?P\d.*?))
+ ((-(?P\d.*?))?-(?P.+?)-(?P.+?)-(?P.+?)
+ \.whl|\.dist-info)$""",
+ re.VERBOSE
+ )
def __init__(self, filename):
"""
@@ -405,7 +422,9 @@ class Wheel(object):
"""
wheel_info = self.wheel_file_re.match(filename)
if not wheel_info:
- raise InvalidWheelFilename("%s is not a valid wheel filename." % filename)
+ raise InvalidWheelFilename(
+ "%s is not a valid wheel filename." % filename
+ )
self.filename = filename
self.name = wheel_info.group('name').replace('_', '-')
# we'll assume "_" means "-" due to wheel naming scheme
@@ -416,8 +435,10 @@ class Wheel(object):
self.plats = wheel_info.group('plat').split('.')
# All the tag combinations from this file
- self.file_tags = set((x, y, z) for x in self.pyversions for y
- in self.abis for z in self.plats)
+ self.file_tags = set(
+ (x, y, z) for x in self.pyversions
+ for y in self.abis for z in self.plats
+ )
def support_index_min(self, tags=None):
"""
@@ -426,14 +447,14 @@ class Wheel(object):
and one of the file tags is first in the list, then return 0. Returns
None is the wheel is not supported.
"""
- if tags is None: # for mock
+ if tags is None: # for mock
tags = pep425tags.supported_tags
indexes = [tags.index(c) for c in self.file_tags if c in tags]
return min(indexes) if indexes else None
def supported(self, tags=None):
"""Is this wheel supported on this system?"""
- if tags is None: # for mock
+ if tags is None: # for mock
tags = pep425tags.supported_tags
return bool(set(tags).intersection(self.file_tags))
@@ -441,7 +462,8 @@ class Wheel(object):
class WheelBuilder(object):
"""Build wheels from a RequirementSet."""
- def __init__(self, requirement_set, finder, wheel_dir, build_options=[], global_options=[]):
+ def __init__(self, requirement_set, finder, wheel_dir, build_options=[],
+ global_options=[]):
self.requirement_set = requirement_set
self.finder = finder
self.wheel_dir = normalize_path(wheel_dir)
@@ -453,13 +475,15 @@ class WheelBuilder(object):
base_args = [
sys.executable, '-c',
- "import setuptools;__file__=%r;"\
- "exec(compile(open(__file__).read().replace('\\r\\n', '\\n'), __file__, 'exec'))" % req.setup_py] + \
- list(self.global_options)
+ "import setuptools;__file__=%r;"
+ "exec(compile(open(__file__).read().replace('\\r\\n', '\\n'), "
+ "__file__, 'exec'))" % req.setup_py
+ ] + list(self.global_options)
logger.notify('Running setup.py bdist_wheel for %s' % req.name)
logger.notify('Destination directory: %s' % self.wheel_dir)
- wheel_args = base_args + ['bdist_wheel', '-d', self.wheel_dir] + self.build_options
+ wheel_args = base_args + ['bdist_wheel', '-d', self.wheel_dir] \
+ + self.build_options
try:
call_subprocess(wheel_args, cwd=req.source_dir, show_stdout=False)
return True
@@ -480,7 +504,10 @@ class WheelBuilder(object):
os.makedirs(self.wheel_dir)
#build the wheels
- logger.notify('Building wheels for collected packages: %s' % ', '.join([req.name for req in reqset]))
+ logger.notify(
+ 'Building wheels for collected packages: %s' %
+ ','.join([req.name for req in reqset])
+ )
logger.indent += 2
build_success, build_failure = [], []
for req in reqset:
@@ -495,6 +522,12 @@ class WheelBuilder(object):
#notify sucess/failure
if build_success:
- logger.notify('Successfully built %s' % ' '.join([req.name for req in build_success]))
+ logger.notify(
+ 'Successfully built %s' %
+ ' '.join([req.name for req in build_success])
+ )
if build_failure:
- logger.notify('Failed to build %s' % ' '.join([req.name for req in build_failure]))
+ logger.notify(
+ 'Failed to build %s' %
+ ' '.join([req.name for req in build_failure])
+ )
diff --git a/tests/unit/test_req.py b/tests/unit/test_req.py
index ec4d7822f..092ef8edf 100644
--- a/tests/unit/test_req.py
+++ b/tests/unit/test_req.py
@@ -160,13 +160,10 @@ def test_remote_reqs_parse():
for req in parse_requirements('https://raw.github.com/pypa/pip-test-package/master/tests/req_just_comment.txt'):
pass
-def test_req_file_parse_use_wheel(data, monkeypatch):
+def test_req_file_parse_use_wheel(data):
"""
Test parsing --use-wheel from a req file
"""
- # patch this for travis which has distribute in it's base env for now
- monkeypatch.setattr(pip.wheel.pkg_resources, "get_distribution", lambda x: Distribution(project_name='setuptools', version='0.9'))
-
finder = PackageFinder([], [])
for req in parse_requirements(data.reqfiles.join("supported_options.txt"), finder):
pass
|