1
1
Fork 0
mirror of https://github.com/pypa/pip synced 2023-12-13 21:30:23 +01:00

Merge branch 'master' into sysconfig

This commit is contained in:
Tzu-ping Chung 2021-02-28 06:52:26 +08:00
commit 09513b82da
67 changed files with 876 additions and 690 deletions

View file

@ -23,9 +23,7 @@ repos:
exclude: |
(?x)
^docs/|
^src/pip/_internal/cli|
^src/pip/_internal/commands|
^src/pip/_internal/distributions|
^src/pip/_internal/index|
^src/pip/_internal/models|
^src/pip/_internal/network|

View file

@ -133,11 +133,11 @@ Actual testing
| | +-------+---------------+-----------------+
| | | PyPy3 | | |
| MacOS +----------+-------+---------------+-----------------+
| | | CP3.6 | Azure | Azure |
| | | CP3.6 | Github | Github |
| | +-------+---------------+-----------------+
| | x64 | CP3.7 | Azure | Azure |
| | x64 | CP3.7 | Github | Github |
| | +-------+---------------+-----------------+
| | | CP3.8 | Azure | Azure |
| | | CP3.8 | Github | Github |
| | +-------+---------------+-----------------+
| | | PyPy3 | | |
+-----------+----------+-------+---------------+-----------------+

View file

@ -561,8 +561,12 @@ See the :ref:`pip install Examples<pip install Examples>`.
SSL Certificate Verification
----------------------------
Starting with v1.3, pip provides SSL certificate verification over https, to
prevent man-in-the-middle attacks against PyPI downloads.
Starting with v1.3, pip provides SSL certificate verification over HTTP, to
prevent man-in-the-middle attacks against PyPI downloads. This does not use
the system certificate store but instead uses a bundled CA certificate
store. The default bundled CA certificate store certificate store may be
overridden by using ``--cert`` option or by using ``PIP_CERT``,
``REQUESTS_CA_BUNDLE``, or ``CURL_CA_BUNDLE`` environment variables.
.. _`Caching`:
@ -808,7 +812,15 @@ You can install local projects by specifying the project path to pip:
During regular installation, pip will copy the entire project directory to a
temporary location and install from there. The exception is that pip will
exclude .tox and .nox directories present in the top level of the project from
being copied.
being copied. This approach is the cause of several performance and correctness
issues, so it is planned that pip 21.3 will change to install directly from the
local project directory. Depending on the build backend used by the project,
this may generate secondary build artifacts in the project directory, such as
the ``.egg-info`` and ``build`` directories in the case of the setuptools
backend.
To opt in to the future behavior, specify the ``--use-feature=in-tree-build``
option in pip's command line.
.. _`editable-installs`:

View file

@ -1857,9 +1857,11 @@ We plan for the resolver changeover to proceed as follows, using
environments, pip defaults to the old resolver, and the new one is
available using the flag ``--use-feature=2020-resolver``.
* pip 21.0: pip uses new resolver, and the old resolver is no longer
available. Python 2 support is removed per our :ref:`Python 2
Support` policy.
* pip 21.0: pip uses new resolver by default, and the old resolver is
no longer supported. It will be removed after a currently undecided
amount of time, as the removal is dependent on pip's volunteer
maintainers' availability. Python 2 support is removed per our
:ref:`Python 2 Support` policy.
Since this work will not change user-visible behavior described in the
pip documentation, this change is not covered by the :ref:`Deprecation

1
news/6720.doc.rst Normal file
View file

@ -0,0 +1 @@
Improve SSL Certificate Verification docs and ``--cert`` help text.

1
news/8733.bugfix.rst Normal file
View file

@ -0,0 +1 @@
Correctly uninstall script files (from setuptools' ``scripts`` argument), when installed with ``--user``.

4
news/9091.feature.rst Normal file
View file

@ -0,0 +1,4 @@
Add a feature ``--use-feature=in-tree-build`` to build local projects in-place
when installing. This is expected to become the default behavior in pip 21.3;
see `Installing from local packages <https://pip.pypa.io/en/stable/user_guide/#installing-from-local-packages>`_
for more information.

2
news/9300.bugfix.rst Normal file
View file

@ -0,0 +1,2 @@
New resolver: Show relevant entries from user-supplied constraint files in the
error message to improve debuggability.

View file

@ -174,11 +174,14 @@ def vendoring(session):
def pinned_requirements(path):
# type: (Path) -> Iterator[Tuple[str, str]]
for line in path.read_text().splitlines():
one, two = line.split("==", 1)
for line in path.read_text().splitlines(keepends=False):
one, sep, two = line.partition("==")
if not sep:
continue
name = one.strip()
version = two.split("#")[0].strip()
yield name, version
version = two.split("#", 1)[0].strip()
if name and version:
yield name, version
vendor_txt = Path("src/pip/_vendor/vendor.txt")
for name, old_version in pinned_requirements(vendor_txt):

View file

@ -1,6 +1,3 @@
# The following comment should be removed at some point in the future.
# mypy: disallow-untyped-defs=False
import os
import sys
@ -8,14 +5,16 @@ from setuptools import find_packages, setup
def read(rel_path):
# type: (str) -> str
here = os.path.abspath(os.path.dirname(__file__))
# intentionally *not* adding an encoding option to open, See:
# https://github.com/pypa/virtualenv/issues/201#issuecomment-3145690
with open(os.path.join(here, rel_path), 'r') as fp:
with open(os.path.join(here, rel_path)) as fp:
return fp.read()
def get_version(rel_path):
# type: (str) -> str
for line in read(rel_path).splitlines():
if line.startswith('__version__'):
# __version__ = "0.9"

View file

@ -14,17 +14,16 @@ from pip._internal.utils.misc import get_installed_distributions
def autocomplete():
# type: () -> None
"""Entry Point for completion of main and subcommand options.
"""
"""Entry Point for completion of main and subcommand options."""
# Don't complete if user hasn't sourced bash_completion file.
if 'PIP_AUTO_COMPLETE' not in os.environ:
if "PIP_AUTO_COMPLETE" not in os.environ:
return
cwords = os.environ['COMP_WORDS'].split()[1:]
cword = int(os.environ['COMP_CWORD'])
cwords = os.environ["COMP_WORDS"].split()[1:]
cword = int(os.environ["COMP_CWORD"])
try:
current = cwords[cword - 1]
except IndexError:
current = ''
current = ""
parser = create_main_parser()
subcommands = list(commands_dict)
@ -39,13 +38,13 @@ def autocomplete():
# subcommand options
if subcommand_name is not None:
# special case: 'help' subcommand has no options
if subcommand_name == 'help':
if subcommand_name == "help":
sys.exit(1)
# special case: list locally installed dists for show and uninstall
should_list_installed = (
subcommand_name in ['show', 'uninstall'] and
not current.startswith('-')
)
should_list_installed = not current.startswith("-") and subcommand_name in [
"show",
"uninstall",
]
if should_list_installed:
lc = current.lower()
installed = [
@ -67,13 +66,15 @@ def autocomplete():
options.append((opt_str, opt.nargs))
# filter out previously specified options from available options
prev_opts = [x.split('=')[0] for x in cwords[1:cword - 1]]
prev_opts = [x.split("=")[0] for x in cwords[1 : cword - 1]]
options = [(x, v) for (x, v) in options if x not in prev_opts]
# filter options by current input
options = [(k, v) for k, v in options if k.startswith(current)]
# get completion type given cwords and available subcommand options
completion_type = get_path_completion_type(
cwords, cword, subcommand.parser.option_list_all,
cwords,
cword,
subcommand.parser.option_list_all,
)
# get completion files and directories if ``completion_type`` is
# ``<file>``, ``<dir>`` or ``<path>``
@ -84,7 +85,7 @@ def autocomplete():
opt_label = option[0]
# append '=' to options which require args
if option[1] and option[0][:2] == "--":
opt_label += '='
opt_label += "="
print(opt_label)
else:
# show main parser options only when necessary
@ -92,19 +93,17 @@ def autocomplete():
opts = [i.option_list for i in parser.option_groups]
opts.append(parser.option_list)
flattened_opts = chain.from_iterable(opts)
if current.startswith('-'):
if current.startswith("-"):
for opt in flattened_opts:
if opt.help != optparse.SUPPRESS_HELP:
subcommands += opt._long_opts + opt._short_opts
else:
# get completion type given cwords and all available options
completion_type = get_path_completion_type(cwords, cword,
flattened_opts)
completion_type = get_path_completion_type(cwords, cword, flattened_opts)
if completion_type:
subcommands = list(auto_complete_paths(current,
completion_type))
subcommands = list(auto_complete_paths(current, completion_type))
print(' '.join([x for x in subcommands if x.startswith(current)]))
print(" ".join([x for x in subcommands if x.startswith(current)]))
sys.exit(1)
@ -117,16 +116,16 @@ def get_path_completion_type(cwords, cword, opts):
:param opts: The available options to check
:return: path completion type (``file``, ``dir``, ``path`` or None)
"""
if cword < 2 or not cwords[cword - 2].startswith('-'):
if cword < 2 or not cwords[cword - 2].startswith("-"):
return None
for opt in opts:
if opt.help == optparse.SUPPRESS_HELP:
continue
for o in str(opt).split('/'):
if cwords[cword - 2].split('=')[0] == o:
for o in str(opt).split("/"):
if cwords[cword - 2].split("=")[0] == o:
if not opt.metavar or any(
x in ('path', 'file', 'dir')
for x in opt.metavar.split('/')):
x in ("path", "file", "dir") for x in opt.metavar.split("/")
):
return opt.metavar
return None
@ -148,15 +147,16 @@ def auto_complete_paths(current, completion_type):
return
filename = os.path.normcase(filename)
# list all files that start with ``filename``
file_list = (x for x in os.listdir(current_path)
if os.path.normcase(x).startswith(filename))
file_list = (
x for x in os.listdir(current_path) if os.path.normcase(x).startswith(filename)
)
for f in file_list:
opt = os.path.join(current_path, f)
comp_file = os.path.normcase(os.path.join(directory, f))
# complete regular files when there is not ``<dir>`` after option
# complete directories when there is ``<file>``, ``<path>`` or
# ``<dir>``after option
if completion_type != 'dir' and os.path.isfile(opt):
if completion_type != "dir" and os.path.isfile(opt):
yield comp_file
elif os.path.isdir(opt):
yield os.path.join(comp_file, '')
yield os.path.join(comp_file, "")

View file

@ -34,7 +34,7 @@ from pip._internal.utils.temp_dir import TempDirectoryTypeRegistry as TempDirReg
from pip._internal.utils.temp_dir import global_tempdir_manager, tempdir_registry
from pip._internal.utils.virtualenv import running_under_virtualenv
__all__ = ['Command']
__all__ = ["Command"]
logger = logging.getLogger(__name__)
@ -51,7 +51,7 @@ class Command(CommandContextMixIn):
self.summary = summary
self.parser = ConfigOptionParser(
usage=self.usage,
prog=f'{get_prog()} {name}',
prog=f"{get_prog()} {name}",
formatter=UpdatingDefaultsHelpFormatter(),
add_help_option=False,
name=name,
@ -62,7 +62,7 @@ class Command(CommandContextMixIn):
self.tempdir_registry = None # type: Optional[TempDirRegistry]
# Commands should add options to this option group
optgroup_name = f'{self.name.capitalize()} Options'
optgroup_name = f"{self.name.capitalize()} Options"
self.cmd_opts = optparse.OptionGroup(self.parser, optgroup_name)
# Add the general options
@ -86,7 +86,7 @@ class Command(CommandContextMixIn):
"""
# Make sure we do the pip version check if the index_group options
# are present.
assert not hasattr(options, 'no_index')
assert not hasattr(options, "no_index")
def run(self, options, args):
# type: (Values, List[Any]) -> int
@ -131,17 +131,15 @@ class Command(CommandContextMixIn):
# This also affects isolated builds and it should.
if options.no_input:
os.environ['PIP_NO_INPUT'] = '1'
os.environ["PIP_NO_INPUT"] = "1"
if options.exists_action:
os.environ['PIP_EXISTS_ACTION'] = ' '.join(options.exists_action)
os.environ["PIP_EXISTS_ACTION"] = " ".join(options.exists_action)
if options.require_venv and not self.ignore_require_venv:
# If a venv is required check if it can really be found
if not running_under_virtualenv():
logger.critical(
'Could not find an activated virtualenv (required).'
)
logger.critical("Could not find an activated virtualenv (required).")
sys.exit(VIRTUALENV_NOT_FOUND)
if options.cache_dir:
@ -171,7 +169,7 @@ class Command(CommandContextMixIn):
issue=8333,
)
if '2020-resolver' in options.features_enabled:
if "2020-resolver" in options.features_enabled:
logger.warning(
"--use-feature=2020-resolver no longer has any effect, "
"since it is now the default dependency resolver in pip. "
@ -184,35 +182,39 @@ class Command(CommandContextMixIn):
return status
except PreviousBuildDirError as exc:
logger.critical(str(exc))
logger.debug('Exception information:', exc_info=True)
logger.debug("Exception information:", exc_info=True)
return PREVIOUS_BUILD_DIR_ERROR
except (InstallationError, UninstallationError, BadCommand,
NetworkConnectionError) as exc:
except (
InstallationError,
UninstallationError,
BadCommand,
NetworkConnectionError,
) as exc:
logger.critical(str(exc))
logger.debug('Exception information:', exc_info=True)
logger.debug("Exception information:", exc_info=True)
return ERROR
except CommandError as exc:
logger.critical('%s', exc)
logger.debug('Exception information:', exc_info=True)
logger.critical("%s", exc)
logger.debug("Exception information:", exc_info=True)
return ERROR
except BrokenStdoutLoggingError:
# Bypass our logger and write any remaining messages to stderr
# because stdout no longer works.
print('ERROR: Pipe to stdout was broken', file=sys.stderr)
print("ERROR: Pipe to stdout was broken", file=sys.stderr)
if level_number <= logging.DEBUG:
traceback.print_exc(file=sys.stderr)
return ERROR
except KeyboardInterrupt:
logger.critical('Operation cancelled by user')
logger.debug('Exception information:', exc_info=True)
logger.critical("Operation cancelled by user")
logger.debug("Exception information:", exc_info=True)
return ERROR
except BaseException:
logger.critical('Exception:', exc_info=True)
logger.critical("Exception:", exc_info=True)
return UNKNOWN_ERROR
finally:

File diff suppressed because it is too large Load diff

View file

@ -1,7 +1,7 @@
from contextlib import ExitStack, contextmanager
from typing import ContextManager, Iterator, TypeVar
_T = TypeVar('_T', covariant=True)
_T = TypeVar("_T", covariant=True)
class CommandContextMixIn:

View file

@ -41,6 +41,7 @@ logger = logging.getLogger(__name__)
# call to main. As it is not safe to do any processing after calling
# main, this should not be an issue in practice.
def main(args=None):
# type: (Optional[List[str]]) -> int
if args is None:
@ -61,7 +62,7 @@ def main(args=None):
# Needed for locale.getpreferredencoding(False) to work
# in pip._internal.utils.encoding.auto_decode
try:
locale.setlocale(locale.LC_ALL, '')
locale.setlocale(locale.LC_ALL, "")
except locale.Error as e:
# setlocale can apparently crash if locale are uninitialized
logger.debug("Ignoring error %s when setting locale", e)

View file

@ -16,14 +16,13 @@ __all__ = ["create_main_parser", "parse_command"]
def create_main_parser():
# type: () -> ConfigOptionParser
"""Creates and returns the main parser for pip's CLI
"""
"""Creates and returns the main parser for pip's CLI"""
parser = ConfigOptionParser(
usage='\n%prog <command> [options]',
usage="\n%prog <command> [options]",
add_help_option=False,
formatter=UpdatingDefaultsHelpFormatter(),
name='global',
name="global",
prog=get_prog(),
)
parser.disable_interspersed_args()
@ -38,11 +37,11 @@ def create_main_parser():
parser.main = True # type: ignore
# create command listing for description
description = [''] + [
f'{name:27} {command_info.summary}'
description = [""] + [
f"{name:27} {command_info.summary}"
for name, command_info in commands_dict.items()
]
parser.description = '\n'.join(description)
parser.description = "\n".join(description)
return parser
@ -67,7 +66,7 @@ def parse_command(args):
sys.exit()
# pip || pip help -> print_help()
if not args_else or (args_else[0] == 'help' and len(args_else) == 1):
if not args_else or (args_else[0] == "help" and len(args_else) == 1):
parser.print_help()
sys.exit()
@ -81,7 +80,7 @@ def parse_command(args):
if guess:
msg.append(f'maybe you meant "{guess}"')
raise CommandError(' - '.join(msg))
raise CommandError(" - ".join(msg))
# all the args without the subcommand
cmd_args = args[:]

View file

@ -1,15 +1,12 @@
"""Base option parser setup"""
# The following comment should be removed at some point in the future.
# mypy: disallow-untyped-defs=False
import logging
import optparse
import shutil
import sys
import textwrap
from contextlib import suppress
from typing import Any
from typing import Any, Dict, Iterator, List, Tuple
from pip._internal.cli.status_codes import UNKNOWN_ERROR
from pip._internal.configuration import Configuration, ConfigurationError
@ -22,16 +19,19 @@ class PrettyHelpFormatter(optparse.IndentedHelpFormatter):
"""A prettier/less verbose help formatter for optparse."""
def __init__(self, *args, **kwargs):
# type: (*Any, **Any) -> None
# help position must be aligned with __init__.parseopts.description
kwargs['max_help_position'] = 30
kwargs['indent_increment'] = 1
kwargs['width'] = shutil.get_terminal_size()[0] - 2
kwargs["max_help_position"] = 30
kwargs["indent_increment"] = 1
kwargs["width"] = shutil.get_terminal_size()[0] - 2
super().__init__(*args, **kwargs)
def format_option_strings(self, option):
# type: (optparse.Option) -> str
return self._format_option_strings(option)
def _format_option_strings(self, option, mvarfmt=' <{}>', optsep=', '):
def _format_option_strings(self, option, mvarfmt=" <{}>", optsep=", "):
# type: (optparse.Option, str, str) -> str
"""
Return a comma-separated list of option strings and metavars.
@ -49,52 +49,57 @@ class PrettyHelpFormatter(optparse.IndentedHelpFormatter):
opts.insert(1, optsep)
if option.takes_value():
assert option.dest is not None
metavar = option.metavar or option.dest.lower()
opts.append(mvarfmt.format(metavar.lower()))
return ''.join(opts)
return "".join(opts)
def format_heading(self, heading):
if heading == 'Options':
return ''
return heading + ':\n'
# type: (str) -> str
if heading == "Options":
return ""
return heading + ":\n"
def format_usage(self, usage):
# type: (str) -> str
"""
Ensure there is only one newline between usage and the first heading
if there is no description.
"""
msg = '\nUsage: {}\n'.format(
self.indent_lines(textwrap.dedent(usage), " "))
msg = "\nUsage: {}\n".format(self.indent_lines(textwrap.dedent(usage), " "))
return msg
def format_description(self, description):
# type: (str) -> str
# leave full control over description to us
if description:
if hasattr(self.parser, 'main'):
label = 'Commands'
if hasattr(self.parser, "main"):
label = "Commands"
else:
label = 'Description'
label = "Description"
# some doc strings have initial newlines, some don't
description = description.lstrip('\n')
description = description.lstrip("\n")
# some doc strings have final newlines and spaces, some don't
description = description.rstrip()
# dedent, then reindent
description = self.indent_lines(textwrap.dedent(description), " ")
description = f'{label}:\n{description}\n'
description = f"{label}:\n{description}\n"
return description
else:
return ''
return ""
def format_epilog(self, epilog):
# type: (str) -> str
# leave full control over epilog to us
if epilog:
return epilog
else:
return ''
return ""
def indent_lines(self, text, indent):
new_lines = [indent + line for line in text.split('\n')]
# type: (str, str) -> str
new_lines = [indent + line for line in text.split("\n")]
return "\n".join(new_lines)
@ -108,13 +113,16 @@ class UpdatingDefaultsHelpFormatter(PrettyHelpFormatter):
"""
def expand_default(self, option):
# type: (optparse.Option) -> str
default_values = None
if self.parser is not None:
assert isinstance(self.parser, ConfigOptionParser)
self.parser._update_defaults(self.parser.defaults)
assert option.dest is not None
default_values = self.parser.defaults.get(option.dest)
help_text = super().expand_default(option)
if default_values and option.metavar == 'URL':
if default_values and option.metavar == "URL":
if isinstance(default_values, str):
default_values = [default_values]
@ -123,15 +131,14 @@ class UpdatingDefaultsHelpFormatter(PrettyHelpFormatter):
default_values = []
for val in default_values:
help_text = help_text.replace(
val, redact_auth_from_url(val))
help_text = help_text.replace(val, redact_auth_from_url(val))
return help_text
class CustomOptionParser(optparse.OptionParser):
def insert_option_group(self, idx, *args, **kwargs):
# type: (int, Any, Any) -> optparse.OptionGroup
"""Insert an OptionGroup at a given position."""
group = self.add_option_group(*args, **kwargs)
@ -142,6 +149,7 @@ class CustomOptionParser(optparse.OptionParser):
@property
def option_list_all(self):
# type: () -> List[optparse.Option]
"""Get a list of all options, including those in option groups."""
res = self.option_list[:]
for i in self.option_groups:
@ -169,6 +177,7 @@ class ConfigOptionParser(CustomOptionParser):
super().__init__(*args, **kwargs)
def check_default(self, option, key, val):
# type: (optparse.Option, str, Any) -> Any
try:
return option.check_value(key, val)
except optparse.OptionValueError as exc:
@ -176,17 +185,20 @@ class ConfigOptionParser(CustomOptionParser):
sys.exit(3)
def _get_ordered_configuration_items(self):
# type: () -> Iterator[Tuple[str, Any]]
# Configuration gives keys in an unordered manner. Order them.
override_order = ["global", self.name, ":env:"]
# Pool the options into different groups
section_items = {name: [] for name in override_order}
section_items = {
name: [] for name in override_order
} # type: Dict[str, List[Tuple[str, Any]]]
for section_key, val in self.config.items():
# ignore empty values
if not val:
logger.debug(
"Ignoring configuration key '%s' as it's value is empty.",
section_key
section_key,
)
continue
@ -200,6 +212,7 @@ class ConfigOptionParser(CustomOptionParser):
yield key, val
def _update_defaults(self, defaults):
# type: (Dict[str, Any]) -> Dict[str, Any]
"""Updates the given defaults with values from the config files and
the environ. Does a little special handling for certain types of
options (lists)."""
@ -210,7 +223,7 @@ class ConfigOptionParser(CustomOptionParser):
# Then set the options with those values
for key, val in self._get_ordered_configuration_items():
# '--' because configuration supports only long names
option = self.get_option('--' + key)
option = self.get_option("--" + key)
# Ignore options not present in this parser. E.g. non-globals put
# in [global] by users that want them to apply to all applicable
@ -218,31 +231,34 @@ class ConfigOptionParser(CustomOptionParser):
if option is None:
continue
if option.action in ('store_true', 'store_false'):
assert option.dest is not None
if option.action in ("store_true", "store_false"):
try:
val = strtobool(val)
except ValueError:
self.error(
'{} is not a valid value for {} option, ' # noqa
'please specify a boolean value like yes/no, '
'true/false or 1/0 instead.'.format(val, key)
"{} is not a valid value for {} option, " # noqa
"please specify a boolean value like yes/no, "
"true/false or 1/0 instead.".format(val, key)
)
elif option.action == 'count':
elif option.action == "count":
with suppress(ValueError):
val = strtobool(val)
with suppress(ValueError):
val = int(val)
if not isinstance(val, int) or val < 0:
self.error(
'{} is not a valid value for {} option, ' # noqa
'please instead specify either a non-negative integer '
'or a boolean value like yes/no or false/true '
'which is equivalent to 1/0.'.format(val, key)
"{} is not a valid value for {} option, " # noqa
"please instead specify either a non-negative integer "
"or a boolean value like yes/no or false/true "
"which is equivalent to 1/0.".format(val, key)
)
elif option.action == 'append':
elif option.action == "append":
val = val.split()
val = [self.check_default(option, key, v) for v in val]
elif option.action == 'callback':
elif option.action == "callback":
assert option.callback is not None
late_eval.add(option.dest)
opt_str = option.get_opt_string()
val = option.convert_value(opt_str, val)
@ -261,6 +277,7 @@ class ConfigOptionParser(CustomOptionParser):
return defaults
def get_default_values(self):
# type: () -> optparse.Values
"""Overriding to make updating the defaults after instantiation of
the option parser possible, _update_defaults() does the dirty work."""
if not self.process_default_values:
@ -275,6 +292,7 @@ class ConfigOptionParser(CustomOptionParser):
defaults = self._update_defaults(self.defaults.copy()) # ours
for option in self._get_all_options():
assert option.dest is not None
default = defaults.get(option.dest)
if isinstance(default, str):
opt_str = option.get_opt_string()
@ -282,5 +300,6 @@ class ConfigOptionParser(CustomOptionParser):
return optparse.Values(defaults)
def error(self, msg):
# type: (str) -> None
self.print_usage(sys.stderr)
self.exit(UNKNOWN_ERROR, f"{msg}\n")

View file

@ -108,7 +108,6 @@ class InterruptibleMixin:
class SilentBar(Bar):
def update(self):
# type: () -> None
pass
@ -123,14 +122,11 @@ class BlueEmojiBar(IncrementalBar):
class DownloadProgressMixin:
def __init__(self, *args, **kwargs):
# type: (List[Any], Dict[Any, Any]) -> None
# https://github.com/python/mypy/issues/5887
super().__init__(*args, **kwargs) # type: ignore
self.message = (" " * (
get_indentation() + 2
)) + self.message # type: str
self.message = (" " * (get_indentation() + 2)) + self.message # type: str
@property
def downloaded(self):
@ -162,7 +158,6 @@ class DownloadProgressMixin:
class WindowsMixin:
def __init__(self, *args, **kwargs):
# type: (List[Any], Dict[Any, Any]) -> None
# The Windows terminal does not support the hide/show cursor ANSI codes
@ -192,16 +187,14 @@ class WindowsMixin:
self.file.flush = lambda: self.file.wrapped.flush()
class BaseDownloadProgressBar(WindowsMixin, InterruptibleMixin,
DownloadProgressMixin):
class BaseDownloadProgressBar(WindowsMixin, InterruptibleMixin, DownloadProgressMixin):
file = sys.stdout
message = "%(percent)d%%"
suffix = "%(downloaded)s %(download_speed)s %(pretty_eta)s"
class DefaultDownloadProgressBar(BaseDownloadProgressBar,
_BaseBar):
class DefaultDownloadProgressBar(BaseDownloadProgressBar, _BaseBar):
pass
@ -209,23 +202,21 @@ class DownloadSilentBar(BaseDownloadProgressBar, SilentBar):
pass
class DownloadBar(BaseDownloadProgressBar,
Bar):
class DownloadBar(BaseDownloadProgressBar, Bar):
pass
class DownloadFillingCirclesBar(BaseDownloadProgressBar,
FillingCirclesBar):
class DownloadFillingCirclesBar(BaseDownloadProgressBar, FillingCirclesBar):
pass
class DownloadBlueEmojiProgressBar(BaseDownloadProgressBar,
BlueEmojiBar):
class DownloadBlueEmojiProgressBar(BaseDownloadProgressBar, BlueEmojiBar):
pass
class DownloadProgressSpinner(WindowsMixin, InterruptibleMixin,
DownloadProgressMixin, Spinner):
class DownloadProgressSpinner(
WindowsMixin, InterruptibleMixin, DownloadProgressMixin, Spinner
):
file = sys.stdout
suffix = "%(downloaded)s %(download_speed)s"
@ -241,13 +232,15 @@ class DownloadProgressSpinner(WindowsMixin, InterruptibleMixin,
message = self.message % self
phase = self.next_phase()
suffix = self.suffix % self
line = ''.join([
message,
" " if message else "",
phase,
" " if suffix else "",
suffix,
])
line = "".join(
[
message,
" " if message else "",
phase,
" " if suffix else "",
suffix,
]
)
self.writeln(line)
@ -257,7 +250,7 @@ BAR_TYPES = {
"on": (DefaultDownloadProgressBar, DownloadProgressSpinner),
"ascii": (DownloadBar, DownloadProgressSpinner),
"pretty": (DownloadFillingCirclesBar, DownloadProgressSpinner),
"emoji": (DownloadBlueEmojiProgressBar, DownloadProgressSpinner)
"emoji": (DownloadBlueEmojiProgressBar, DownloadProgressSpinner),
}

View file

@ -47,6 +47,7 @@ class SessionCommandMixin(CommandContextMixIn):
"""
A class mixin for command classes needing _build_session().
"""
def __init__(self):
# type: () -> None
super().__init__()
@ -83,8 +84,7 @@ class SessionCommandMixin(CommandContextMixIn):
assert not options.cache_dir or os.path.isabs(options.cache_dir)
session = PipSession(
cache=(
os.path.join(options.cache_dir, "http")
if options.cache_dir else None
os.path.join(options.cache_dir, "http") if options.cache_dir else None
),
retries=retries if retries is not None else options.retries,
trusted_hosts=options.trusted_hosts,
@ -101,9 +101,7 @@ class SessionCommandMixin(CommandContextMixIn):
# Handle timeouts
if options.timeout or timeout:
session.timeout = (
timeout if timeout is not None else options.timeout
)
session.timeout = timeout if timeout is not None else options.timeout
# Handle configured proxies
if options.proxy:
@ -134,16 +132,14 @@ class IndexGroupCommand(Command, SessionCommandMixin):
This overrides the default behavior of not doing the check.
"""
# Make sure the index_group options are present.
assert hasattr(options, 'no_index')
assert hasattr(options, "no_index")
if options.disable_pip_version_check or options.no_index:
return
# Otherwise, check if we're using the latest version of pip available.
session = self._build_session(
options,
retries=0,
timeout=min(5, options.timeout)
options, retries=0, timeout=min(5, options.timeout)
)
with session:
pip_self_version_check(session, options)
@ -161,6 +157,7 @@ def with_cleanup(func):
"""Decorator for common logic related to managing temporary
directories.
"""
def configure_tempdir_registry(registry):
# type: (TempDirectoryTypeRegistry) -> None
for t in KEEPABLE_TEMPDIR_TYPES:
@ -185,7 +182,6 @@ def with_cleanup(func):
class RequirementCommand(IndexGroupCommand):
def __init__(self, *args, **kw):
# type: (Any, Any) -> None
super().__init__(*args, **kw)
@ -204,13 +200,13 @@ class RequirementCommand(IndexGroupCommand):
@classmethod
def make_requirement_preparer(
cls,
temp_build_dir, # type: TempDirectory
options, # type: Values
req_tracker, # type: RequirementTracker
session, # type: PipSession
finder, # type: PackageFinder
use_user_site, # type: bool
download_dir=None, # type: str
temp_build_dir, # type: TempDirectory
options, # type: Values
req_tracker, # type: RequirementTracker
session, # type: PipSession
finder, # type: PackageFinder
use_user_site, # type: bool
download_dir=None, # type: str
):
# type: (...) -> RequirementPreparer
"""
@ -221,20 +217,20 @@ class RequirementCommand(IndexGroupCommand):
resolver_variant = cls.determine_resolver_variant(options)
if resolver_variant == "2020-resolver":
lazy_wheel = 'fast-deps' in options.features_enabled
lazy_wheel = "fast-deps" in options.features_enabled
if lazy_wheel:
logger.warning(
'pip is using lazily downloaded wheels using HTTP '
'range requests to obtain dependency information. '
'This experimental feature is enabled through '
'--use-feature=fast-deps and it is not ready for '
'production.'
"pip is using lazily downloaded wheels using HTTP "
"range requests to obtain dependency information. "
"This experimental feature is enabled through "
"--use-feature=fast-deps and it is not ready for "
"production."
)
else:
lazy_wheel = False
if 'fast-deps' in options.features_enabled:
if "fast-deps" in options.features_enabled:
logger.warning(
'fast-deps has no effect when used with the legacy resolver.'
"fast-deps has no effect when used with the legacy resolver."
)
return RequirementPreparer(
@ -249,22 +245,23 @@ class RequirementCommand(IndexGroupCommand):
require_hashes=options.require_hashes,
use_user_site=use_user_site,
lazy_wheel=lazy_wheel,
in_tree_build="in-tree-build" in options.features_enabled,
)
@classmethod
def make_resolver(
cls,
preparer, # type: RequirementPreparer
finder, # type: PackageFinder
options, # type: Values
wheel_cache=None, # type: Optional[WheelCache]
use_user_site=False, # type: bool
ignore_installed=True, # type: bool
ignore_requires_python=False, # type: bool
force_reinstall=False, # type: bool
preparer, # type: RequirementPreparer
finder, # type: PackageFinder
options, # type: Values
wheel_cache=None, # type: Optional[WheelCache]
use_user_site=False, # type: bool
ignore_installed=True, # type: bool
ignore_requires_python=False, # type: bool
force_reinstall=False, # type: bool
upgrade_strategy="to-satisfy-only", # type: str
use_pep517=None, # type: Optional[bool]
py_version_info=None, # type: Optional[Tuple[int, ...]]
use_pep517=None, # type: Optional[bool]
py_version_info=None, # type: Optional[Tuple[int, ...]]
):
# type: (...) -> BaseResolver
"""
@ -296,6 +293,7 @@ class RequirementCommand(IndexGroupCommand):
py_version_info=py_version_info,
)
import pip._internal.resolution.legacy.resolver
return pip._internal.resolution.legacy.resolver.Resolver(
preparer=preparer,
finder=finder,
@ -312,10 +310,10 @@ class RequirementCommand(IndexGroupCommand):
def get_requirements(
self,
args, # type: List[str]
options, # type: Values
finder, # type: PackageFinder
session, # type: PipSession
args, # type: List[str]
options, # type: Values
finder, # type: PackageFinder
session, # type: PipSession
):
# type: (...) -> List[InstallRequirement]
"""
@ -324,9 +322,12 @@ class RequirementCommand(IndexGroupCommand):
requirements = [] # type: List[InstallRequirement]
for filename in options.constraints:
for parsed_req in parse_requirements(
filename,
constraint=True, finder=finder, options=options,
session=session):
filename,
constraint=True,
finder=finder,
options=options,
session=session,
):
req_to_add = install_req_from_parsed_requirement(
parsed_req,
isolated=options.isolated_mode,
@ -336,7 +337,9 @@ class RequirementCommand(IndexGroupCommand):
for req in args:
req_to_add = install_req_from_line(
req, None, isolated=options.isolated_mode,
req,
None,
isolated=options.isolated_mode,
use_pep517=options.use_pep517,
user_supplied=True,
)
@ -354,8 +357,8 @@ class RequirementCommand(IndexGroupCommand):
# NOTE: options.require_hashes may be set if --require-hashes is True
for filename in options.requirements:
for parsed_req in parse_requirements(
filename,
finder=finder, options=options, session=session):
filename, finder=finder, options=options, session=session
):
req_to_add = install_req_from_parsed_requirement(
parsed_req,
isolated=options.isolated_mode,
@ -369,16 +372,19 @@ class RequirementCommand(IndexGroupCommand):
options.require_hashes = True
if not (args or options.editables or options.requirements):
opts = {'name': self.name}
opts = {"name": self.name}
if options.find_links:
raise CommandError(
'You must give at least one requirement to {name} '
"You must give at least one requirement to {name} "
'(maybe you meant "pip {name} {links}"?)'.format(
**dict(opts, links=' '.join(options.find_links))))
**dict(opts, links=" ".join(options.find_links))
)
)
else:
raise CommandError(
'You must give at least one requirement to {name} '
'(see "pip help {name}")'.format(**opts))
"You must give at least one requirement to {name} "
'(see "pip help {name}")'.format(**opts)
)
return requirements
@ -396,9 +402,9 @@ class RequirementCommand(IndexGroupCommand):
def _build_package_finder(
self,
options, # type: Values
session, # type: PipSession
target_python=None, # type: Optional[TargetPython]
options, # type: Values
session, # type: PipSession
target_python=None, # type: Optional[TargetPython]
ignore_requires_python=None, # type: Optional[bool]
):
# type: (...) -> PackageFinder

View file

@ -24,9 +24,14 @@ class SpinnerInterface:
class InteractiveSpinner(SpinnerInterface):
def __init__(self, message, file=None, spin_chars="-\\|/",
# Empirically, 8 updates/second looks nice
min_update_interval_seconds=0.125):
def __init__(
self,
message,
file=None,
spin_chars="-\\|/",
# Empirically, 8 updates/second looks nice
min_update_interval_seconds=0.125,
):
# type: (str, IO[str], str, float) -> None
self._message = message
if file is None:

View file

@ -22,10 +22,10 @@ from pip._internal.utils.misc import write_output
if TYPE_CHECKING:
from typing import TypedDict
TransformedHit = TypedDict(
'TransformedHit',
{'name': str, 'summary': str, 'versions': List[str]},
)
class TransformedHit(TypedDict):
name: str
summary: str
versions: List[str]
logger = logging.getLogger(__name__)

View file

@ -6,8 +6,7 @@ from pip._internal.req.req_install import InstallRequirement
def make_distribution_for_install_requirement(install_req):
# type: (InstallRequirement) -> AbstractDistribution
"""Returns a Distribution for the given InstallRequirement
"""
"""Returns a Distribution for the given InstallRequirement"""
# Editable requirements will always be source distributions. They use the
# legacy logic until we create a modern standard for them.
if install_req.editable:

View file

@ -22,6 +22,7 @@ class AbstractDistribution(metaclass=abc.ABCMeta):
- we must be able to create a Distribution object exposing the
above metadata.
"""
def __init__(self, req):
# type: (InstallRequirement) -> None
super().__init__()

View file

@ -46,10 +46,10 @@ class SourceDistribution(AbstractDistribution):
error_message = format_string.format(
requirement=self.req,
conflicting_with=conflicting_with,
description=', '.join(
f'{installed} is incompatible with {wanted}'
description=", ".join(
f"{installed} is incompatible with {wanted}"
for installed, wanted in sorted(conflicting)
)
),
)
raise InstallationError(error_message)
@ -60,15 +60,13 @@ class SourceDistribution(AbstractDistribution):
self.req.build_env = BuildEnvironment()
self.req.build_env.install_requirements(
finder, pyproject_requires, 'overlay',
"Installing build dependencies"
finder, pyproject_requires, "overlay", "Installing build dependencies"
)
conflicting, missing = self.req.build_env.check_requirements(
self.req.requirements_to_check
)
if conflicting:
_raise_conflicts("PEP 517/518 supported requirements",
conflicting)
_raise_conflicts("PEP 517/518 supported requirements", conflicting)
if missing:
logger.warning(
"Missing build requirements in pyproject.toml for %s.",
@ -77,15 +75,13 @@ class SourceDistribution(AbstractDistribution):
logger.warning(
"The project does not specify a build backend, and "
"pip cannot fall back to setuptools without %s.",
" and ".join(map(repr, sorted(missing)))
" and ".join(map(repr, sorted(missing))),
)
# Install any extra build dependencies that the backend requests.
# This must be done in a second pass, as the pyproject.toml
# dependencies must be installed before we can call the backend.
with self.req.build_env:
runner = runner_with_spinner_message(
"Getting requirements to build wheel"
)
runner = runner_with_spinner_message("Getting requirements to build wheel")
backend = self.req.pep517_backend
assert backend is not None
with backend.subprocess_runner(runner):
@ -95,6 +91,5 @@ class SourceDistribution(AbstractDistribution):
if conflicting:
_raise_conflicts("the backend dependencies", conflicting)
self.req.build_env.install_requirements(
finder, missing, 'normal',
"Installing backend dependencies"
finder, missing, "normal", "Installing backend dependencies"
)

View file

@ -8,7 +8,6 @@ from pip._internal.models.scheme import SCHEME_KEYS, Scheme
from . import _distutils, _sysconfig
from .base import (
USER_CACHE_DIR,
get_bin_user,
get_major_minor_version,
get_src_prefix,
site_packages,
@ -119,6 +118,11 @@ def get_bin_prefix():
return old
def get_bin_user():
# type: () -> str
return _sysconfig.get_scheme("", user=True).scripts
def get_purelib():
# type: () -> str
"""Return the default pure-Python lib location."""

View file

@ -137,7 +137,7 @@ def get_scheme(
paths["include"] = os.path.join(base, "include", "site", python_xy)
# Special user scripts path on Windows for compatibility to distutils.
# See ``distutils.commands.install.INSTALL_SCHEMES["nt_user"]["scritps"]``.
# See ``distutils.commands.install.INSTALL_SCHEMES["nt_user"]["scripts"]``.
if scheme_name == "nt_user":
base = variables.get("userbase", sys.prefix)
python_xy = f"Python{sys.version_info.major}{sys.version_info.minor}"

View file

@ -4,9 +4,7 @@ import sys
import sysconfig
import typing
from pip._internal.exceptions import UserInstallationInvalid
from pip._internal.utils import appdirs
from pip._internal.utils.compat import WINDOWS
from pip._internal.utils.virtualenv import running_under_virtualenv
# Application Directories
@ -48,21 +46,3 @@ try:
user_site = site.getusersitepackages() # type: typing.Optional[str]
except AttributeError:
user_site = site.USER_SITE
def get_bin_user():
# type: () -> str
"""Get the user-site scripts directory.
Pip puts the scripts directory in site-packages, not under userbase.
I'm honestly not sure if this is a bug (because ``get_scheme()`` puts it
correctly under userbase), but we need to keep backwards compatibility.
"""
if user_site is None:
raise UserInstallationInvalid()
if not WINDOWS:
return os.path.join(user_site, "bin")
# Special case for buildout, which uses 'bin' on Windows too?
if not os.path.exists(os.path.join(sys.prefix, "Scripts")):
os.path.join(user_site, "bin")
return os.path.join(user_site, "Scripts")

View file

@ -37,7 +37,7 @@ except Exception as exc:
def get_keyring_auth(url, username):
# type: (str, str) -> Optional[AuthInfo]
# type: (Optional[str], Optional[str]) -> Optional[AuthInfo]
"""Return the tuple auth for a given url from keyring."""
global keyring
if not url or not keyring:

View file

@ -2,8 +2,15 @@
network request configuration and behavior.
"""
# The following comment should be removed at some point in the future.
# mypy: disallow-untyped-defs=False
# When mypy runs on Windows the call to distro.linux_distribution() is skipped
# resulting in the failure:
#
# error: unused 'type: ignore' comment
#
# If the upstream module adds typing, this comment should be removed. See
# https://github.com/nir0s/distro/pull/269
#
# mypy: warn-unused-ignores=False
import email.utils
import ipaddress
@ -15,13 +22,14 @@ import platform
import sys
import urllib.parse
import warnings
from typing import Any, Iterator, List, Optional, Sequence, Tuple, Union
from typing import Any, Dict, Iterator, List, Mapping, Optional, Sequence, Tuple, Union
from pip._vendor import requests, urllib3
from pip._vendor.cachecontrol import CacheControlAdapter
from pip._vendor.requests.adapters import BaseAdapter, HTTPAdapter
from pip._vendor.requests.models import Response
from pip._vendor.requests.models import PreparedRequest, Response
from pip._vendor.requests.structures import CaseInsensitiveDict
from pip._vendor.urllib3.connectionpool import ConnectionPool
from pip._vendor.urllib3.exceptions import InsecureRequestWarning
from pip import __version__
@ -89,6 +97,7 @@ def looks_like_ci():
def user_agent():
# type: () -> str
"""
Return a string representing the user agent.
"""
@ -98,15 +107,14 @@ def user_agent():
"implementation": {
"name": platform.python_implementation(),
},
}
} # type: Dict[str, Any]
if data["implementation"]["name"] == 'CPython':
data["implementation"]["version"] = platform.python_version()
elif data["implementation"]["name"] == 'PyPy':
if sys.pypy_version_info.releaselevel == 'final':
pypy_version_info = sys.pypy_version_info[:3]
else:
pypy_version_info = sys.pypy_version_info
pypy_version_info = sys.pypy_version_info # type: ignore
if pypy_version_info.releaselevel == 'final':
pypy_version_info = pypy_version_info[:3]
data["implementation"]["version"] = ".".join(
[str(x) for x in pypy_version_info]
)
@ -119,9 +127,12 @@ def user_agent():
if sys.platform.startswith("linux"):
from pip._vendor import distro
# https://github.com/nir0s/distro/pull/269
linux_distribution = distro.linux_distribution() # type: ignore
distro_infos = dict(filter(
lambda x: x[1],
zip(["name", "version", "id"], distro.linux_distribution()),
zip(["name", "version", "id"], linux_distribution),
))
libc = dict(filter(
lambda x: x[1],
@ -170,8 +181,16 @@ def user_agent():
class LocalFSAdapter(BaseAdapter):
def send(self, request, stream=None, timeout=None, verify=None, cert=None,
proxies=None):
def send(
self,
request, # type: PreparedRequest
stream=False, # type: bool
timeout=None, # type: Optional[Union[float, Tuple[float, float]]]
verify=True, # type: Union[bool, str]
cert=None, # type: Optional[Union[str, Tuple[str, str]]]
proxies=None, # type:Optional[Mapping[str, str]]
):
# type: (...) -> Response
pathname = url_to_path(request.url)
resp = Response()
@ -198,18 +217,33 @@ class LocalFSAdapter(BaseAdapter):
return resp
def close(self):
# type: () -> None
pass
class InsecureHTTPAdapter(HTTPAdapter):
def cert_verify(self, conn, url, verify, cert):
def cert_verify(
self,
conn, # type: ConnectionPool
url, # type: str
verify, # type: Union[bool, str]
cert, # type: Optional[Union[str, Tuple[str, str]]]
):
# type: (...) -> None
super().cert_verify(conn=conn, url=url, verify=False, cert=cert)
class InsecureCacheControlAdapter(CacheControlAdapter):
def cert_verify(self, conn, url, verify, cert):
def cert_verify(
self,
conn, # type: ConnectionPool
url, # type: str
verify, # type: Union[bool, str]
cert, # type: Optional[Union[str, Tuple[str, str]]]
):
# type: (...) -> None
super().cert_verify(conn=conn, url=url, verify=False, cert=cert)
@ -407,6 +441,7 @@ class PipSession(requests.Session):
return False
def request(self, method, url, *args, **kwargs):
# type: (str, str, *Any, **Any) -> Response
# Allow setting a default timeout on a session
kwargs.setdefault("timeout", self.timeout)

View file

@ -35,6 +35,7 @@ from pip._internal.network.lazy_wheel import (
from pip._internal.network.session import PipSession
from pip._internal.req.req_install import InstallRequirement
from pip._internal.req.req_tracker import RequirementTracker
from pip._internal.utils.deprecation import deprecated
from pip._internal.utils.filesystem import copy2_fixed
from pip._internal.utils.hashes import Hashes, MissingHashes
from pip._internal.utils.logging import indent_log
@ -207,8 +208,23 @@ def unpack_url(
unpack_vcs_link(link, location)
return None
# If it's a url to a local directory
# Once out-of-tree-builds are no longer supported, could potentially
# replace the below condition with `assert not link.is_existing_dir`
# - unpack_url does not need to be called for in-tree-builds.
#
# As further cleanup, _copy_source_tree and accompanying tests can
# be removed.
if link.is_existing_dir():
deprecated(
"A future pip version will change local packages to be built "
"in-place without first copying to a temporary directory. "
"We recommend you use --use-feature=in-tree-build to test "
"your packages with this new behavior before it becomes the "
"default.\n",
replacement=None,
gone_in="21.3",
issue=7555
)
if os.path.isdir(location):
rmtree(location)
_copy_source_tree(link.file_path, location)
@ -278,6 +294,7 @@ class RequirementPreparer:
require_hashes, # type: bool
use_user_site, # type: bool
lazy_wheel, # type: bool
in_tree_build, # type: bool
):
# type: (...) -> None
super().__init__()
@ -306,6 +323,9 @@ class RequirementPreparer:
# Should wheels be downloaded lazily?
self.use_lazy_wheel = lazy_wheel
# Should in-tree builds be used for local paths?
self.in_tree_build = in_tree_build
# Memoized downloaded files, as mapping of url: (path, mime type)
self._downloaded = {} # type: Dict[str, Tuple[str, str]]
@ -339,6 +359,11 @@ class RequirementPreparer:
# directory.
return
assert req.source_dir is None
if req.link.is_existing_dir() and self.in_tree_build:
# build local directories in-tree
req.source_dir = req.link.file_path
return
# We always delete unpacked sdists after pip runs.
req.ensure_has_source_dir(
self.build_dir,
@ -517,11 +542,14 @@ class RequirementPreparer:
self._ensure_link_req_src_dir(req, parallel_builds)
hashes = self._get_linked_req_hashes(req)
if link.url not in self._downloaded:
if link.is_existing_dir() and self.in_tree_build:
local_file = None
elif link.url not in self._downloaded:
try:
local_file = unpack_url(
link, req.source_dir, self._download,
self.download_dir, hashes,
self.download_dir, hashes
)
except NetworkConnectionError as exc:
raise InstallationError(

View file

@ -140,7 +140,7 @@ def deduce_helpful_msg(req):
msg = " The path does exist. "
# Try to parse and check if it is a requirements file.
try:
with open(req, 'r') as fp:
with open(req) as fp:
# parse first line only
next(parse_requirements(fp.read()))
msg += (

View file

@ -17,7 +17,6 @@ from typing import (
List,
NoReturn,
Optional,
Text,
Tuple,
)
@ -34,9 +33,9 @@ if TYPE_CHECKING:
__all__ = ['parse_requirements']
ReqFileLines = Iterator[Tuple[int, Text]]
ReqFileLines = Iterator[Tuple[int, str]]
LineParser = Callable[[Text], Tuple[str, Values]]
LineParser = Callable[[str], Tuple[str, Values]]
SCHEME_RE = re.compile(r'^(http|https|file):', re.I)
COMMENT_RE = re.compile(r'(^|\s+)#.*$')

View file

@ -529,7 +529,7 @@ class UninstallPathSet:
elif develop_egg_link:
# develop egg
with open(develop_egg_link, 'r') as fh:
with open(develop_egg_link) as fh:
link_pointer = os.path.normcase(fh.readline().strip())
assert (link_pointer == dist.location), (
'Egg-link {} does not match installed location of {} '

View file

@ -12,13 +12,12 @@ for sub-dependencies
# The following comment should be removed at some point in the future.
# mypy: strict-optional=False
# mypy: disallow-untyped-defs=False
import logging
import sys
from collections import defaultdict
from itertools import chain
from typing import DefaultDict, List, Optional, Set, Tuple
from typing import DefaultDict, Iterable, List, Optional, Set, Tuple
from pip._vendor.packaging import specifiers
from pip._vendor.pkg_resources import Distribution
@ -388,6 +387,7 @@ class Resolver(BaseResolver):
more_reqs = [] # type: List[InstallRequirement]
def add_req(subreq, extras_requested):
# type: (Distribution, Iterable[str]) -> None
sub_install_req = self._make_install_req(
str(subreq),
req_to_install,
@ -447,6 +447,7 @@ class Resolver(BaseResolver):
ordered_reqs = set() # type: Set[InstallRequirement]
def schedule(req):
# type: (InstallRequirement) -> None
if req.satisfied_by or req in ordered_reqs:
return
if req.constraint:

View file

@ -404,8 +404,24 @@ class Factory:
)
return UnsupportedPythonVersion(message)
def get_installation_error(self, e):
# type: (ResolutionImpossible) -> InstallationError
def _report_single_requirement_conflict(self, req, parent):
# type: (Requirement, Candidate) -> DistributionNotFound
if parent is None:
req_disp = str(req)
else:
req_disp = f"{req} (from {parent.name})"
logger.critical(
"Could not find a version that satisfies the requirement %s",
req_disp,
)
return DistributionNotFound(f"No matching distribution found for {req}")
def get_installation_error(
self,
e, # type: ResolutionImpossible
constraints, # type: Dict[str, Constraint]
):
# type: (...) -> InstallationError
assert e.causes, "Installation error reported with no cause"
@ -425,15 +441,8 @@ class Factory:
# satisfied. We just report that case.
if len(e.causes) == 1:
req, parent = e.causes[0]
if parent is None:
req_disp = str(req)
else:
req_disp = f"{req} (from {parent.name})"
logger.critical(
"Could not find a version that satisfies the requirement %s",
req_disp,
)
return DistributionNotFound(f"No matching distribution found for {req}")
if req.name not in constraints:
return self._report_single_requirement_conflict(req, parent)
# OK, we now have a list of requirements that can't all be
# satisfied at once.
@ -475,13 +484,20 @@ class Factory:
)
logger.critical(msg)
msg = "\nThe conflict is caused by:"
relevant_constraints = set()
for req, parent in e.causes:
if req.name in constraints:
relevant_constraints.add(req.name)
msg = msg + "\n "
if parent:
msg = msg + "{} {} depends on ".format(parent.name, parent.version)
else:
msg = msg + "The user requested "
msg = msg + req.format_for_error()
for key in relevant_constraints:
spec = constraints[key].specifier
msg += f"\n The user requested (constraint) {key}{spec}"
msg = (
msg

View file

@ -3,7 +3,6 @@ import logging
import os
from typing import TYPE_CHECKING, Dict, List, Optional, Set, Tuple
from pip._vendor import six
from pip._vendor.packaging.utils import canonicalize_name
from pip._vendor.resolvelib import ResolutionImpossible
from pip._vendor.resolvelib import Resolver as RLResolver
@ -122,8 +121,8 @@ class Resolver(BaseResolver):
)
except ResolutionImpossible as e:
error = self.factory.get_installation_error(e)
six.raise_from(error, e)
error = self.factory.get_installation_error(e, constraints)
raise error from e
req_set = RequirementSet(check_supported_wheels=check_supported_wheels)
for candidate in self._result.mapping.values():

View file

@ -1,9 +1,6 @@
"""Stuff that differs in different Python versions and platform
distributions."""
# The following comment should be removed at some point in the future.
# mypy: disallow-untyped-defs=False
import logging
import os
import sys

View file

@ -2,12 +2,9 @@
A module that implements tooling to enable easy warnings about deprecations.
"""
# The following comment should be removed at some point in the future.
# mypy: disallow-untyped-defs=False
import logging
import warnings
from typing import Any, Optional
from typing import Any, Optional, TextIO, Type, Union
from pip._vendor.packaging.version import parse
@ -24,7 +21,15 @@ _original_showwarning = None # type: Any
# Warnings <-> Logging Integration
def _showwarning(message, category, filename, lineno, file=None, line=None):
def _showwarning(
message, # type: Union[Warning, str]
category, # type: Type[Warning]
filename, # type: str
lineno, # type: int
file=None, # type: Optional[TextIO]
line=None, # type: Optional[str]
):
# type: (...) -> None
if file is not None:
if _original_showwarning is not None:
_original_showwarning(message, category, filename, lineno, file, line)

View file

@ -1,6 +1,3 @@
# The following comment should be removed at some point in the future.
# mypy: disallow-untyped-defs=False
import contextlib
import errno
import logging
@ -8,7 +5,7 @@ import logging.handlers
import os
import sys
from logging import Filter, getLogger
from typing import Any
from typing import IO, Any, Callable, Iterator, Optional, TextIO, Type, cast
from pip._internal.utils.compat import WINDOWS
from pip._internal.utils.deprecation import DEPRECATION_MSG_PREFIX
@ -46,15 +43,17 @@ if WINDOWS:
# https://bugs.python.org/issue19612
# https://bugs.python.org/issue30418
def _is_broken_pipe_error(exc_class, exc):
# type: (Type[BaseException], BaseException) -> bool
"""See the docstring for non-Windows below."""
return (exc_class is BrokenPipeError) or (
exc_class is OSError and exc.errno in (errno.EINVAL, errno.EPIPE)
isinstance(exc, OSError) and exc.errno in (errno.EINVAL, errno.EPIPE)
)
else:
# Then we are in the non-Windows case.
def _is_broken_pipe_error(exc_class, exc):
# type: (Type[BaseException], BaseException) -> bool
"""
Return whether an exception is a broken pipe error.
@ -67,6 +66,7 @@ else:
@contextlib.contextmanager
def indent_log(num=2):
# type: (int) -> Iterator[None]
"""
A context manager which will cause the log output to be indented for any
log messages emitted inside it.
@ -81,6 +81,7 @@ def indent_log(num=2):
def get_indentation():
# type: () -> int
return getattr(_log_state, "indentation", 0)
@ -104,6 +105,7 @@ class IndentingFormatter(logging.Formatter):
super().__init__(*args, **kwargs)
def get_message_start(self, formatted, levelno):
# type: (str, int) -> str
"""
Return the start of the formatted log message (not counting the
prefix to add to each line).
@ -120,6 +122,7 @@ class IndentingFormatter(logging.Formatter):
return "ERROR: "
def format(self, record):
# type: (logging.LogRecord) -> str
"""
Calls the standard formatter, but will indent all of the log message
lines by our current indentation level.
@ -137,7 +140,9 @@ class IndentingFormatter(logging.Formatter):
def _color_wrap(*colors):
# type: (*str) -> Callable[[str], str]
def wrapped(inp):
# type: (str) -> str
return "".join(list(colors) + [inp, colorama.Style.RESET_ALL])
return wrapped
@ -156,6 +161,7 @@ class ColorizedStreamHandler(logging.StreamHandler):
COLORS = []
def __init__(self, stream=None, no_color=None):
# type: (Optional[TextIO], bool) -> None
super().__init__(stream)
self._no_color = no_color
@ -163,16 +169,19 @@ class ColorizedStreamHandler(logging.StreamHandler):
self.stream = colorama.AnsiToWin32(self.stream)
def _using_stdout(self):
# type: () -> bool
"""
Return whether the handler is using sys.stdout.
"""
if WINDOWS and colorama:
# Then self.stream is an AnsiToWin32 object.
return self.stream.wrapped is sys.stdout
stream = cast(colorama.AnsiToWin32, self.stream)
return stream.wrapped is sys.stdout
return self.stream is sys.stdout
def should_color(self):
# type: () -> bool
# Don't colorize things if we do not have colorama or if told not to
if not colorama or self._no_color:
return False
@ -195,7 +204,8 @@ class ColorizedStreamHandler(logging.StreamHandler):
return False
def format(self, record):
msg = logging.StreamHandler.format(self, record)
# type: (logging.LogRecord) -> str
msg = super().format(record)
if self.should_color():
for level, color in self.COLORS:
@ -207,12 +217,18 @@ class ColorizedStreamHandler(logging.StreamHandler):
# The logging module says handleError() can be customized.
def handleError(self, record):
# type: (logging.LogRecord) -> None
exc_class, exc = sys.exc_info()[:2]
# If a broken pipe occurred while calling write() or flush() on the
# stdout stream in logging's Handler.emit(), then raise our special
# exception so we can handle it in main() instead of logging the
# broken pipe error and continuing.
if exc_class and self._using_stdout() and _is_broken_pipe_error(exc_class, exc):
if (
exc_class
and exc
and self._using_stdout()
and _is_broken_pipe_error(exc_class, exc)
):
raise BrokenStdoutLoggingError()
return super().handleError(record)
@ -220,15 +236,18 @@ class ColorizedStreamHandler(logging.StreamHandler):
class BetterRotatingFileHandler(logging.handlers.RotatingFileHandler):
def _open(self):
# type: () -> IO[Any]
ensure_dir(os.path.dirname(self.baseFilename))
return logging.handlers.RotatingFileHandler._open(self)
return super()._open()
class MaxLevelFilter(Filter):
def __init__(self, level):
# type: (int) -> None
self.level = level
def filter(self, record):
# type: (logging.LogRecord) -> bool
return record.levelno < self.level
@ -239,12 +258,14 @@ class ExcludeLoggerFilter(Filter):
"""
def filter(self, record):
# type: (logging.LogRecord) -> bool
# The base Filter class allows only records from a logger (or its
# children).
return not super().filter(record)
def setup_logging(verbosity, no_color, user_log_file):
# type: (int, bool, Optional[str]) -> int
"""Configures and sets up all of the logging
Returns the requested logging level, as its integer value.

View file

@ -1,6 +1,5 @@
# The following comment should be removed at some point in the future.
# mypy: strict-optional=False
# mypy: disallow-untyped-defs=False
import contextlib
import errno
@ -16,16 +15,21 @@ import sys
import urllib.parse
from io import StringIO
from itertools import filterfalse, tee, zip_longest
from types import TracebackType
from typing import (
Any,
AnyStr,
BinaryIO,
Callable,
Container,
ContextManager,
Iterable,
Iterator,
List,
Optional,
TextIO,
Tuple,
Type,
TypeVar,
cast,
)
@ -64,8 +68,10 @@ __all__ = [
logger = logging.getLogger(__name__)
VersionInfo = Tuple[int, int, int]
T = TypeVar("T")
ExcInfo = Tuple[Type[BaseException], BaseException, TracebackType]
VersionInfo = Tuple[int, int, int]
NetlocTuple = Tuple[str, Tuple[Optional[str], Optional[str]]]
def get_pip_version():
@ -132,6 +138,7 @@ def rmtree(dir, ignore_errors=False):
def rmtree_errorhandler(func, path, exc_info):
# type: (Callable[..., Any], str, ExcInfo) -> None
"""On Windows, the files in .svn are read-only, so when rmtree() tries to
remove them, an exception is thrown. We catch that here, remove the
read-only attribute, and hopefully continue without problems."""
@ -279,6 +286,7 @@ def is_installable_dir(path):
def read_chunks(file, size=io.DEFAULT_BUFFER_SIZE):
# type: (BinaryIO, int) -> Iterator[bytes]
"""Yield pieces of data from a file-like object until EOF."""
while True:
chunk = file.read(size)
@ -491,19 +499,24 @@ def write_output(msg, *args):
class StreamWrapper(StringIO):
orig_stream = None # type: TextIO
@classmethod
def from_stream(cls, orig_stream):
# type: (TextIO) -> StreamWrapper
cls.orig_stream = orig_stream
return cls()
# compileall.compile_dir() needs stdout.encoding to print to stdout
# https://github.com/python/mypy/issues/4125
@property
def encoding(self):
def encoding(self): # type: ignore
return self.orig_stream.encoding
@contextlib.contextmanager
def captured_output(stream_name):
# type: (str) -> Iterator[StreamWrapper]
"""Return a context manager used by captured_stdout/stdin/stderr
that temporarily replaces the sys stream *stream_name* with a StringIO.
@ -518,6 +531,7 @@ def captured_output(stream_name):
def captured_stdout():
# type: () -> ContextManager[StreamWrapper]
"""Capture the output of sys.stdout:
with captured_stdout() as stdout:
@ -530,6 +544,7 @@ def captured_stdout():
def captured_stderr():
# type: () -> ContextManager[StreamWrapper]
"""
See captured_stdout().
"""
@ -538,6 +553,7 @@ def captured_stderr():
# Simulates an enum
def enum(*sequential, **named):
# type: (*Any, **Any) -> Type[Any]
enums = dict(zip(sequential, range(len(sequential))), **named)
reverse = {value: key for key, value in enums.items()}
enums["reverse_mapping"] = reverse
@ -579,6 +595,7 @@ def parse_netloc(netloc):
def split_auth_from_netloc(netloc):
# type: (str) -> NetlocTuple
"""
Parse out and remove the auth information from a netloc.
@ -591,17 +608,20 @@ def split_auth_from_netloc(netloc):
# behaves if more than one @ is present (which can be checked using
# the password attribute of urlsplit()'s return value).
auth, netloc = netloc.rsplit("@", 1)
pw = None # type: Optional[str]
if ":" in auth:
# Split from the left because that's how urllib.parse.urlsplit()
# behaves if more than one : is present (which again can be checked
# using the password attribute of the return value)
user_pass = auth.split(":", 1)
user, pw = auth.split(":", 1)
else:
user_pass = auth, None
user, pw = auth, None
user_pass = tuple(None if x is None else urllib.parse.unquote(x) for x in user_pass)
user = urllib.parse.unquote(user)
if pw is not None:
pw = urllib.parse.unquote(pw)
return netloc, user_pass
return netloc, (user, pw)
def redact_netloc(netloc):
@ -628,6 +648,7 @@ def redact_netloc(netloc):
def _transform_url(url, transform_netloc):
# type: (str, Callable[[str], Tuple[Any, ...]]) -> Tuple[str, NetlocTuple]
"""Transform and replace netloc in a url.
transform_netloc is a function taking the netloc and returning a
@ -642,14 +663,16 @@ def _transform_url(url, transform_netloc):
# stripped url
url_pieces = (purl.scheme, netloc_tuple[0], purl.path, purl.query, purl.fragment)
surl = urllib.parse.urlunsplit(url_pieces)
return surl, netloc_tuple
return surl, cast("NetlocTuple", netloc_tuple)
def _get_netloc(netloc):
# type: (str) -> NetlocTuple
return split_auth_from_netloc(netloc)
def _redact_netloc(netloc):
# type: (str) -> Tuple[str,]
return (redact_netloc(netloc),)
@ -765,6 +788,7 @@ def hash_file(path, blocksize=1 << 20):
def is_wheel_installed():
# type: () -> bool
"""
Return whether the wheel package is installed.
"""

View file

@ -1,9 +1,8 @@
"""Utilities for defining models
"""
# The following comment should be removed at some point in the future.
# mypy: disallow-untyped-defs=False
import operator
from typing import Any, Callable, Type
class KeyBasedCompareMixin:
@ -12,28 +11,36 @@ class KeyBasedCompareMixin:
__slots__ = ["_compare_key", "_defining_class"]
def __init__(self, key, defining_class):
# type: (Any, Type[KeyBasedCompareMixin]) -> None
self._compare_key = key
self._defining_class = defining_class
def __hash__(self):
# type: () -> int
return hash(self._compare_key)
def __lt__(self, other):
# type: (Any) -> bool
return self._compare(other, operator.__lt__)
def __le__(self, other):
# type: (Any) -> bool
return self._compare(other, operator.__le__)
def __gt__(self, other):
# type: (Any) -> bool
return self._compare(other, operator.__gt__)
def __ge__(self, other):
# type: (Any) -> bool
return self._compare(other, operator.__ge__)
def __eq__(self, other):
# type: (Any) -> bool
return self._compare(other, operator.__eq__)
def _compare(self, other, method):
# type: (Any, Callable[[Any, Any], bool]) -> bool
if not isinstance(other, self._defining_class):
return NotImplemented

View file

@ -18,8 +18,6 @@ from typing import (
Union,
)
from pip._vendor import pkg_resources
from pip._internal.cli.spinners import SpinnerInterface
from pip._internal.exceptions import BadCommand, InstallationError
from pip._internal.utils.misc import (
@ -62,7 +60,7 @@ def make_vcs_requirement_url(repo_url, rev, project_name, subdir=None):
repo_url: the remote VCS url, with any needed VCS prefix (e.g. "git+").
project_name: the (unescaped) project name.
"""
egg_project_name = pkg_resources.to_filename(project_name)
egg_project_name = project_name.replace("-", "_")
req = f'{repo_url}@{rev}#egg={egg_project_name}'
if subdir:
req += f'&subdirectory={subdir}'

View file

@ -1,5 +1,3 @@
# -*- coding: utf-8 -*-
from setuptools import setup
from setuptools.command import egg_info as orig_egg_info

View file

@ -1,5 +1,3 @@
# -*- coding: utf-8 -*-
from distutils.core import setup
setup(name="SetupPyUTF8",

View file

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
from setuptools import setup
setup(

View file

@ -23,9 +23,7 @@ from tests.lib import (
need_svn,
path_to_url,
pyversion,
pyversion_tuple,
requirements_file,
windows_workaround_7667,
)
from tests.lib.filesystem import make_socket_file
from tests.lib.local_repos import local_checkout
@ -193,16 +191,10 @@ def test_pip_second_command_line_interface_works(
"""
# Re-install pip so we get the launchers.
script.pip_install_local('-f', common_wheels, pip_src)
# On old versions of Python, urllib3/requests will raise a warning about
# the lack of an SSLContext.
kwargs = {'expect_stderr': deprecated_python}
if pyversion_tuple < (2, 7, 9):
kwargs['expect_stderr'] = True
args = [f'pip{pyversion}']
args.extend(['install', 'INITools==0.2'])
args.extend(['-f', data.packages])
result = script.run(*args, **kwargs)
result = script.run(*args)
dist_info_folder = (
script.site_packages /
'INITools-0.2.dist-info'
@ -581,7 +573,29 @@ def test_install_from_local_directory_with_symlinks_to_directories(
result.did_create(dist_info_folder)
@pytest.mark.skipif("sys.platform == 'win32' or sys.version_info < (3,)")
def test_install_from_local_directory_with_in_tree_build(
script, data, with_wheel
):
"""
Test installing from a local directory with --use-feature=in-tree-build.
"""
to_install = data.packages.joinpath("FSPkg")
args = ["install", "--use-feature=in-tree-build", to_install]
in_tree_build_dir = to_install / "build"
assert not in_tree_build_dir.exists()
result = script.pip(*args)
fspkg_folder = script.site_packages / 'fspkg'
dist_info_folder = (
script.site_packages /
'FSPkg-0.1.dev0.dist-info'
)
result.did_create(fspkg_folder)
result.did_create(dist_info_folder)
assert in_tree_build_dir.exists()
@pytest.mark.skipif("sys.platform == 'win32'")
def test_install_from_local_directory_with_socket_file(
script, data, tmpdir, with_wheel
):
@ -742,7 +756,6 @@ def test_install_using_install_option_and_editable(script, tmpdir):
@pytest.mark.xfail
@pytest.mark.network
@need_mercurial
@windows_workaround_7667
def test_install_global_option_using_editable(script, tmpdir):
"""
Test using global distutils options, but in an editable installation
@ -1291,7 +1304,7 @@ def test_install_log(script, data, tmpdir):
'install', data.src.joinpath('chattymodule')]
result = script.pip(*args)
assert 0 == result.stdout.count("HELLO FROM CHATTYMODULE")
with open(f, 'r') as fp:
with open(f) as fp:
# one from egg_info, one from install
assert 2 == fp.read().count("HELLO FROM CHATTYMODULE")
@ -1314,7 +1327,8 @@ def test_cleanup_after_failed_wheel(script, with_wheel):
# One of the effects of not cleaning up is broken scripts:
script_py = script.bin_path / "script.py"
assert script_py.exists(), script_py
shebang = open(script_py, 'r').readline().strip()
with open(script_py) as f:
shebang = f.readline().strip()
assert shebang != '#!python', shebang
# OK, assert that we *said* we were cleaning up:
# /!\ if in need to change this, also change test_pep517_no_legacy_cleanup
@ -1385,7 +1399,6 @@ def test_install_no_binary_disables_building_wheels(script, data, with_wheel):
@pytest.mark.network
@windows_workaround_7667
def test_install_no_binary_builds_pep_517_wheel(script, data, with_wheel):
to_install = data.packages.joinpath('pep517_setup_and_pyproject')
res = script.pip(
@ -1400,7 +1413,6 @@ def test_install_no_binary_builds_pep_517_wheel(script, data, with_wheel):
@pytest.mark.network
@windows_workaround_7667
def test_install_no_binary_uses_local_backend(
script, data, with_wheel, tmpdir):
to_install = data.packages.joinpath('pep517_wrapper_buildsys')

View file

@ -357,7 +357,7 @@ def test_constraints_local_editable_install_causes_error(
assert 'Could not satisfy constraints' in result.stderr, str(result)
else:
# Because singlemodule only has 0.0.1 available.
assert 'No matching distribution found' in result.stderr, str(result)
assert 'Cannot install singlemodule 0.0.1' in result.stderr, str(result)
@pytest.mark.network
@ -386,7 +386,7 @@ def test_constraints_local_install_causes_error(
assert 'Could not satisfy constraints' in result.stderr, str(result)
else:
# Because singlemodule only has 0.0.1 available.
assert 'No matching distribution found' in result.stderr, str(result)
assert 'Cannot install singlemodule 0.0.1' in result.stderr, str(result)
def test_constraints_constrain_to_local_editable(

View file

@ -687,7 +687,7 @@ def test_new_resolver_constraint_on_dependency(script):
@pytest.mark.parametrize(
"constraint_version, expect_error, message",
[
("1.0", True, "ERROR: No matching distribution found for foo 2.0"),
("1.0", True, "Cannot install foo 2.0"),
("2.0", False, "Successfully installed foo-2.0"),
],
)

View file

@ -24,3 +24,24 @@ def test_new_resolver_conflict_requirements_file(tmpdir, script):
message = "package versions have conflicting dependencies"
assert message in result.stderr, str(result)
def test_new_resolver_conflict_constraints_file(tmpdir, script):
create_basic_wheel_for_package(script, "pkg", "1.0")
constrats_file = tmpdir.joinpath("constraints.txt")
constrats_file.write_text("pkg!=1.0")
result = script.pip(
"install",
"--no-cache-dir", "--no-index",
"--find-links", script.scratch_path,
"-c", constrats_file,
"pkg==1.0",
expect_error=True,
)
assert "ResolutionImpossible" in result.stderr, str(result)
message = "The user requested (constraint) pkg!=1.0"
assert message in result.stdout, str(result)

View file

@ -33,7 +33,7 @@ def test_no_color(script):
pytest.skip("Unable to capture output using script: " + cmd)
try:
with open("/tmp/pip-test-no-color.txt", "r") as output_file:
with open("/tmp/pip-test-no-color.txt") as output_file:
retval = output_file.read()
return retval
finally:

View file

@ -3,7 +3,7 @@ from pip._vendor import toml
from pip._internal.build_env import BuildEnvironment
from pip._internal.req import InstallRequirement
from tests.lib import make_test_finder, path_to_url, windows_workaround_7667
from tests.lib import make_test_finder, path_to_url
def make_project(tmpdir, requires=None, backend=None, backend_path=None):
@ -255,7 +255,6 @@ def test_explicit_setuptools_backend(script, tmpdir, data, common_wheels):
@pytest.mark.network
@windows_workaround_7667
def test_pep517_and_build_options(script, tmpdir, data, common_wheels):
"""Backend generated requirements are installed in the build env"""
project_dir, name = make_pyproject_with_setup(tmpdir)

View file

@ -64,7 +64,7 @@ def test_export_rev(script, tmpdir):
url = hide_url('bzr+' + _test_path_to_file_url(source_dir) + '@1')
Bazaar().export(str(export_dir), url=url)
with open(export_dir / 'test_file', 'r') as f:
with open(export_dir / 'test_file') as f:
assert f.read() == 'something initial'

View file

@ -31,7 +31,6 @@ DATA_DIR = Path(__file__).parent.parent.joinpath("data").resolve()
SRC_DIR = Path(__file__).resolve().parent.parent.parent
pyversion = get_major_minor_version()
pyversion_tuple = sys.version_info
CURRENT_PY_VERSION_INFO = sys.version_info[:3]
@ -1145,10 +1144,3 @@ def need_mercurial(fn):
return pytest.mark.mercurial(need_executable(
'Mercurial', ('hg', 'version')
)(fn))
# Workaround for test failures after new wheel release.
windows_workaround_7667 = pytest.mark.skipif(
"sys.platform == 'win32' and sys.version_info < (3,)",
reason="Workaround for #7667",
)

View file

@ -22,8 +22,8 @@ class Path(str):
def __new__(cls, *paths):
if len(paths):
return str.__new__(cls, os.path.join(*paths))
return str.__new__(cls)
return super().__new__(cls, os.path.join(*paths))
return super().__new__(cls)
def __div__(self, path):
"""
@ -73,9 +73,6 @@ class Path(str):
def __repr__(self):
return "Path({inner})".format(inner=str.__repr__(self))
def __hash__(self):
return str.__hash__(self)
@property
def name(self):
"""

View file

@ -102,7 +102,7 @@ def test_copy_source_tree(clean_project, tmpdir):
assert expected_files == copied_files
@pytest.mark.skipif("sys.platform == 'win32' or sys.version_info < (3,)")
@pytest.mark.skipif("sys.platform == 'win32'")
def test_copy_source_tree_with_socket(clean_project, tmpdir, caplog):
target = tmpdir.joinpath("target")
expected_files = get_filelist(clean_project)
@ -121,7 +121,7 @@ def test_copy_source_tree_with_socket(clean_project, tmpdir, caplog):
assert socket_path in record.message
@pytest.mark.skipif("sys.platform == 'win32' or sys.version_info < (3,)")
@pytest.mark.skipif("sys.platform == 'win32'")
def test_copy_source_tree_with_socket_fails_with_no_socket_error(
clean_project, tmpdir
):

View file

@ -89,6 +89,7 @@ class TestRequirementSet:
require_hashes=require_hashes,
use_user_site=False,
lazy_wheel=False,
in_tree_build=False,
)
yield Resolver(
preparer=preparer,

View file

@ -57,11 +57,6 @@ def test_make_subprocess_output_error__non_ascii_command_arg(monkeypatch):
Test a command argument with a non-ascii character.
"""
cmd_args = ['foo', 'déf']
if sys.version_info[0] == 2:
# Check in Python 2 that the str (bytes object) with the non-ascii
# character has the encoding we expect. (This comes from the source
# code encoding at the top of the file.)
assert cmd_args[1].decode('utf-8') == 'déf'
# We need to monkeypatch so the encoding will be correct on Windows.
monkeypatch.setattr(locale, 'getpreferredencoding', lambda: 'utf-8')
@ -80,7 +75,6 @@ def test_make_subprocess_output_error__non_ascii_command_arg(monkeypatch):
assert actual == expected, f'actual: {actual}'
@pytest.mark.skipif("sys.version_info < (3,)")
def test_make_subprocess_output_error__non_ascii_cwd_python_3(monkeypatch):
"""
Test a str (text) cwd with a non-ascii character in Python 3.
@ -102,36 +96,6 @@ def test_make_subprocess_output_error__non_ascii_cwd_python_3(monkeypatch):
assert actual == expected, f'actual: {actual}'
@pytest.mark.parametrize('encoding', [
'utf-8',
# Test a Windows encoding.
'cp1252',
])
@pytest.mark.skipif("sys.version_info >= (3,)")
def test_make_subprocess_output_error__non_ascii_cwd_python_2(
monkeypatch, encoding,
):
"""
Test a str (bytes object) cwd with a non-ascii character in Python 2.
"""
cmd_args = ['test']
cwd = '/path/to/cwd/déf'.encode(encoding)
monkeypatch.setattr(sys, 'getfilesystemencoding', lambda: encoding)
actual = make_subprocess_output_error(
cmd_args=cmd_args,
cwd=cwd,
lines=[],
exit_status=1,
)
expected = dedent("""\
Command errored out with exit status 1:
command: test
cwd: /path/to/cwd/déf
Complete output (0 lines):
----------------------------------------""")
assert actual == expected, f'actual: {actual}'
# This test is mainly important for checking unicode in Python 2.
def test_make_subprocess_output_error__non_ascii_line():
"""

View file

@ -91,7 +91,7 @@ def generate_news(session: Session, version: str) -> None:
def update_version_file(version: str, filepath: str) -> None:
with open(filepath, "r", encoding="utf-8") as f:
with open(filepath, encoding="utf-8") as f:
content = list(f)
file_modified = False

View file

@ -1,7 +1,6 @@
--use-feature=2020-resolver
cryptography==2.8
freezegun
mock
pretend
pytest
pytest-cov

View file

@ -1,17 +1,16 @@
# The following comment should be removed at some point in the future.
# mypy: disallow-untyped-defs=False
import os
import shutil
import subprocess
import sys
from glob import glob
from typing import List
VIRTUAL_ENV = os.environ['VIRTUAL_ENV']
TOX_PIP_DIR = os.path.join(VIRTUAL_ENV, 'pip')
def pip(args):
# type: (List[str]) -> None
# First things first, get a recent (stable) version of pip.
if not os.path.exists(TOX_PIP_DIR):
subprocess.check_call([sys.executable, '-m', 'pip',
@ -20,8 +19,8 @@ def pip(args):
'pip'])
shutil.rmtree(glob(os.path.join(TOX_PIP_DIR, 'pip-*.dist-info'))[0])
# And use that version.
pypath = os.environ.get('PYTHONPATH')
pypath = pypath.split(os.pathsep) if pypath is not None else []
pypath_env = os.environ.get('PYTHONPATH')
pypath = pypath_env.split(os.pathsep) if pypath_env is not None else []
pypath.insert(0, TOX_PIP_DIR)
os.environ['PYTHONPATH'] = os.pathsep.join(pypath)
subprocess.check_call([sys.executable, '-m', 'pip'] + args)