mirror of https://github.com/pypa/pip
Merge with master
This commit is contained in:
commit
ea391b2aab
|
@ -13,6 +13,7 @@ Reference Guide
|
|||
pip_list
|
||||
pip_show
|
||||
pip_search
|
||||
pip_cache
|
||||
pip_check
|
||||
pip_config
|
||||
pip_wheel
|
||||
|
|
|
@ -0,0 +1,22 @@
|
|||
|
||||
.. _`pip cache`:
|
||||
|
||||
pip cache
|
||||
---------
|
||||
|
||||
.. contents::
|
||||
|
||||
Usage
|
||||
*****
|
||||
|
||||
.. pip-command-usage:: cache
|
||||
|
||||
Description
|
||||
***********
|
||||
|
||||
.. pip-command-description:: cache
|
||||
|
||||
Options
|
||||
*******
|
||||
|
||||
.. pip-command-options:: cache
|
|
@ -728,8 +728,13 @@ You can install local projects by specifying the project path to pip::
|
|||
|
||||
$ pip install path/to/SomeProject
|
||||
|
||||
During regular installation, pip will copy the entire project directory to a temporary location and install from there.
|
||||
The exception is that pip will exclude .tox and .nox directories present in the top level of the project from being copied.
|
||||
Until version 20.0, pip copied the entire project directory to a temporary
|
||||
location and installed from there. This approach was the cause of several
|
||||
performance and correctness issues. As of version 20.1 pip installs from the
|
||||
local project directory. Depending on the build backend used by the project,
|
||||
this may generate secondary build artifacts in the project directory, such as
|
||||
the ``.egg-info`` and ``build`` directories in the case of the setuptools
|
||||
backend.
|
||||
|
||||
|
||||
.. _`editable-installs`:
|
||||
|
|
|
@ -0,0 +1,20 @@
|
|||
:orphan:
|
||||
|
||||
=========
|
||||
pip-cache
|
||||
=========
|
||||
|
||||
Description
|
||||
***********
|
||||
|
||||
.. pip-command-description:: cache
|
||||
|
||||
Usage
|
||||
*****
|
||||
|
||||
.. pip-command-usage:: cache
|
||||
|
||||
Options
|
||||
*******
|
||||
|
||||
.. pip-command-options:: cache
|
|
@ -0,0 +1 @@
|
|||
Add ``pip cache`` command for inspecting/managing pip's wheel cache.
|
|
@ -0,0 +1,9 @@
|
|||
Building of local directories is now done in place. Previously pip did copy the
|
||||
local directory tree to a temporary location before building. That approach had
|
||||
a number of drawbacks, among which performance issues, as well as various
|
||||
issues arising when the python project directory depends on its parent
|
||||
directory (such as the presence of a VCS directory). The user visible effect of
|
||||
this change is that secondary build artifacts, if any, may therefore be created
|
||||
in the local directory, whereas before they were created in a temporary copy of
|
||||
the directory and then deleted. This notably includes the ``build`` and
|
||||
``.egg-info`` directories in the case of the setuptools build backend.
|
|
@ -0,0 +1 @@
|
|||
Print vendored libraries version in pip debug.
|
|
@ -0,0 +1 @@
|
|||
Change default behaviour to always cache responses from trusted-host source.
|
|
@ -0,0 +1 @@
|
|||
Significantly speedup ``pip list --outdated`` through parallelizing index interaction.
|
1
setup.py
1
setup.py
|
@ -67,6 +67,7 @@ setup(
|
|||
exclude=["contrib", "docs", "tests*", "tasks"],
|
||||
),
|
||||
package_data={
|
||||
"pip._vendor": ["vendor.txt"],
|
||||
"pip._vendor.certifi": ["*.pem"],
|
||||
"pip._vendor.requests": ["*.pem"],
|
||||
"pip._vendor.distlib._backport": ["sysconfig.cfg"],
|
||||
|
|
|
@ -68,6 +68,10 @@ commands_dict = OrderedDict([
|
|||
'pip._internal.commands.search', 'SearchCommand',
|
||||
'Search PyPI for packages.',
|
||||
)),
|
||||
('cache', CommandInfo(
|
||||
'pip._internal.commands.cache', 'CacheCommand',
|
||||
"Inspect and manage pip's wheel cache.",
|
||||
)),
|
||||
('wheel', CommandInfo(
|
||||
'pip._internal.commands.wheel', 'WheelCommand',
|
||||
'Build wheels from your requirements.',
|
||||
|
|
|
@ -0,0 +1,165 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
import logging
|
||||
import os
|
||||
import textwrap
|
||||
|
||||
import pip._internal.utils.filesystem as filesystem
|
||||
from pip._internal.cli.base_command import Command
|
||||
from pip._internal.cli.status_codes import ERROR, SUCCESS
|
||||
from pip._internal.exceptions import CommandError, PipError
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
|
||||
if MYPY_CHECK_RUNNING:
|
||||
from optparse import Values
|
||||
from typing import Any, List
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CacheCommand(Command):
|
||||
"""
|
||||
Inspect and manage pip's wheel cache.
|
||||
|
||||
Subcommands:
|
||||
|
||||
info: Show information about the cache.
|
||||
list: List filenames of packages stored in the cache.
|
||||
remove: Remove one or more package from the cache.
|
||||
purge: Remove all items from the cache.
|
||||
|
||||
<pattern> can be a glob expression or a package name.
|
||||
"""
|
||||
|
||||
usage = """
|
||||
%prog info
|
||||
%prog list [<pattern>]
|
||||
%prog remove <pattern>
|
||||
%prog purge
|
||||
"""
|
||||
|
||||
def run(self, options, args):
|
||||
# type: (Values, List[Any]) -> int
|
||||
handlers = {
|
||||
"info": self.get_cache_info,
|
||||
"list": self.list_cache_items,
|
||||
"remove": self.remove_cache_items,
|
||||
"purge": self.purge_cache,
|
||||
}
|
||||
|
||||
# Determine action
|
||||
if not args or args[0] not in handlers:
|
||||
logger.error("Need an action ({}) to perform.".format(
|
||||
", ".join(sorted(handlers)))
|
||||
)
|
||||
return ERROR
|
||||
|
||||
action = args[0]
|
||||
|
||||
# Error handling happens here, not in the action-handlers.
|
||||
try:
|
||||
handlers[action](options, args[1:])
|
||||
except PipError as e:
|
||||
logger.error(e.args[0])
|
||||
return ERROR
|
||||
|
||||
return SUCCESS
|
||||
|
||||
def get_cache_info(self, options, args):
|
||||
# type: (Values, List[Any]) -> None
|
||||
if args:
|
||||
raise CommandError('Too many arguments')
|
||||
|
||||
num_packages = len(self._find_wheels(options, '*'))
|
||||
|
||||
cache_location = self._wheels_cache_dir(options)
|
||||
cache_size = filesystem.format_directory_size(cache_location)
|
||||
|
||||
message = textwrap.dedent("""
|
||||
Location: {location}
|
||||
Size: {size}
|
||||
Number of wheels: {package_count}
|
||||
""").format(
|
||||
location=cache_location,
|
||||
package_count=num_packages,
|
||||
size=cache_size,
|
||||
).strip()
|
||||
|
||||
logger.info(message)
|
||||
|
||||
def list_cache_items(self, options, args):
|
||||
# type: (Values, List[Any]) -> None
|
||||
if len(args) > 1:
|
||||
raise CommandError('Too many arguments')
|
||||
|
||||
if args:
|
||||
pattern = args[0]
|
||||
else:
|
||||
pattern = '*'
|
||||
|
||||
files = self._find_wheels(options, pattern)
|
||||
|
||||
if not files:
|
||||
logger.info('Nothing cached.')
|
||||
return
|
||||
|
||||
results = []
|
||||
for filename in files:
|
||||
wheel = os.path.basename(filename)
|
||||
size = filesystem.format_file_size(filename)
|
||||
results.append(' - {} ({})'.format(wheel, size))
|
||||
logger.info('Cache contents:\n')
|
||||
logger.info('\n'.join(sorted(results)))
|
||||
|
||||
def remove_cache_items(self, options, args):
|
||||
# type: (Values, List[Any]) -> None
|
||||
if len(args) > 1:
|
||||
raise CommandError('Too many arguments')
|
||||
|
||||
if not args:
|
||||
raise CommandError('Please provide a pattern')
|
||||
|
||||
files = self._find_wheels(options, args[0])
|
||||
if not files:
|
||||
raise CommandError('No matching packages')
|
||||
|
||||
for filename in files:
|
||||
os.unlink(filename)
|
||||
logger.debug('Removed %s', filename)
|
||||
logger.info('Files removed: %s', len(files))
|
||||
|
||||
def purge_cache(self, options, args):
|
||||
# type: (Values, List[Any]) -> None
|
||||
if args:
|
||||
raise CommandError('Too many arguments')
|
||||
|
||||
return self.remove_cache_items(options, ['*'])
|
||||
|
||||
def _wheels_cache_dir(self, options):
|
||||
# type: (Values) -> str
|
||||
return os.path.join(options.cache_dir, 'wheels')
|
||||
|
||||
def _find_wheels(self, options, pattern):
|
||||
# type: (Values, str) -> List[str]
|
||||
wheel_dir = self._wheels_cache_dir(options)
|
||||
|
||||
# The wheel filename format, as specified in PEP 427, is:
|
||||
# {distribution}-{version}(-{build})?-{python}-{abi}-{platform}.whl
|
||||
#
|
||||
# Additionally, non-alphanumeric values in the distribution are
|
||||
# normalized to underscores (_), meaning hyphens can never occur
|
||||
# before `-{version}`.
|
||||
#
|
||||
# Given that information:
|
||||
# - If the pattern we're given contains a hyphen (-), the user is
|
||||
# providing at least the version. Thus, we can just append `*.whl`
|
||||
# to match the rest of it.
|
||||
# - If the pattern we're given doesn't contain a hyphen (-), the
|
||||
# user is only providing the name. Thus, we append `-*.whl` to
|
||||
# match the hyphen before the version, followed by anything else.
|
||||
#
|
||||
# PEP 427: https://www.python.org/dev/peps/pep-0427/
|
||||
pattern = pattern + ("*.whl" if "-" in pattern else "-*.whl")
|
||||
|
||||
return filesystem.find_files(wheel_dir, pattern)
|
|
@ -8,8 +8,11 @@ import logging
|
|||
import os
|
||||
import sys
|
||||
|
||||
import pip._vendor
|
||||
from pip._vendor import pkg_resources
|
||||
from pip._vendor.certifi import where
|
||||
|
||||
from pip import __file__ as pip_location
|
||||
from pip._internal.cli import cmdoptions
|
||||
from pip._internal.cli.base_command import Command
|
||||
from pip._internal.cli.cmdoptions import make_target_python
|
||||
|
@ -19,7 +22,8 @@ from pip._internal.utils.misc import get_pip_version
|
|||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
|
||||
if MYPY_CHECK_RUNNING:
|
||||
from typing import Any, List, Optional
|
||||
from types import ModuleType
|
||||
from typing import Any, List, Optional, Dict
|
||||
from optparse import Values
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
@ -43,6 +47,93 @@ def show_sys_implementation():
|
|||
show_value('name', implementation_name)
|
||||
|
||||
|
||||
def create_vendor_txt_map():
|
||||
# type: () -> Dict[str, str]
|
||||
vendor_txt_path = os.path.join(
|
||||
os.path.dirname(pip_location),
|
||||
'_vendor',
|
||||
'vendor.txt'
|
||||
)
|
||||
|
||||
with open(vendor_txt_path) as f:
|
||||
# Purge non version specifying lines.
|
||||
# Also, remove any space prefix or suffixes (including comments).
|
||||
lines = [line.strip().split(' ', 1)[0]
|
||||
for line in f.readlines() if '==' in line]
|
||||
|
||||
# Transform into "module" -> version dict.
|
||||
return dict(line.split('==', 1) for line in lines) # type: ignore
|
||||
|
||||
|
||||
def get_module_from_module_name(module_name):
|
||||
# type: (str) -> ModuleType
|
||||
|
||||
# Module name can be uppercase in vendor.txt for some reason...
|
||||
module_name = module_name.lower()
|
||||
# PATCH: setuptools is actually only pkg_resources.
|
||||
if module_name == 'setuptools':
|
||||
module_name = 'pkg_resources'
|
||||
|
||||
__import__(
|
||||
'pip._vendor.{}'.format(module_name),
|
||||
globals(),
|
||||
locals(),
|
||||
level=0
|
||||
)
|
||||
return getattr(pip._vendor, module_name)
|
||||
|
||||
|
||||
def get_vendor_version_from_module(module_name):
|
||||
# type: (str) -> str
|
||||
|
||||
module = get_module_from_module_name(module_name)
|
||||
version = getattr(module, '__version__', None)
|
||||
|
||||
if not version:
|
||||
# Try to find version in debundled module info
|
||||
pkg_set = pkg_resources.WorkingSet(
|
||||
[os.path.dirname(getattr(module, '__file__'))]
|
||||
)
|
||||
package = pkg_set.find(pkg_resources.Requirement.parse(module_name))
|
||||
version = getattr(package, 'version', None)
|
||||
|
||||
return version
|
||||
|
||||
|
||||
def show_actual_vendor_versions(vendor_txt_versions):
|
||||
# type: (Dict[str, str]) -> None
|
||||
# Logs the actual version and print extra info
|
||||
# if there is a conflict or if the actual version could not be imported.
|
||||
|
||||
for module_name, expected_version in vendor_txt_versions.items():
|
||||
extra_message = ''
|
||||
actual_version = get_vendor_version_from_module(module_name)
|
||||
if not actual_version:
|
||||
extra_message = ' (Unable to locate actual module version, using'\
|
||||
' vendor.txt specified version)'
|
||||
actual_version = expected_version
|
||||
elif actual_version != expected_version:
|
||||
extra_message = ' (CONFLICT: vendor.txt suggests version should'\
|
||||
' be {})'.format(expected_version)
|
||||
|
||||
logger.info(
|
||||
'{name}=={actual}{extra}'.format(
|
||||
name=module_name,
|
||||
actual=actual_version,
|
||||
extra=extra_message
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def show_vendor_versions():
|
||||
# type: () -> None
|
||||
logger.info('vendored library versions:')
|
||||
|
||||
vendor_txt_versions = create_vendor_txt_map()
|
||||
with indent_log():
|
||||
show_actual_vendor_versions(vendor_txt_versions)
|
||||
|
||||
|
||||
def show_tags(options):
|
||||
# type: (Values) -> None
|
||||
tag_limit = 10
|
||||
|
@ -136,6 +227,9 @@ class DebugCommand(Command):
|
|||
show_value("REQUESTS_CA_BUNDLE", os.environ.get('REQUESTS_CA_BUNDLE'))
|
||||
show_value("CURL_CA_BUNDLE", os.environ.get('CURL_CA_BUNDLE'))
|
||||
show_value("pip._vendor.certifi.where()", where())
|
||||
show_value("pip._vendor.DEBUNDLED", pip._vendor.DEBUNDLED)
|
||||
|
||||
show_vendor_versions()
|
||||
|
||||
show_tags(options)
|
||||
|
||||
|
|
|
@ -5,8 +5,10 @@ from __future__ import absolute_import
|
|||
|
||||
import json
|
||||
import logging
|
||||
from multiprocessing.dummy import Pool
|
||||
|
||||
from pip._vendor import six
|
||||
from pip._vendor.requests.adapters import DEFAULT_POOLSIZE
|
||||
|
||||
from pip._internal.cli import cmdoptions
|
||||
from pip._internal.cli.req_command import IndexGroupCommand
|
||||
|
@ -183,7 +185,7 @@ class ListCommand(IndexGroupCommand):
|
|||
with self._build_session(options) as session:
|
||||
finder = self._build_package_finder(options, session)
|
||||
|
||||
for dist in packages:
|
||||
def latest_info(dist):
|
||||
typ = 'unknown'
|
||||
all_candidates = finder.find_all_candidates(dist.key)
|
||||
if not options.pre:
|
||||
|
@ -196,7 +198,7 @@ class ListCommand(IndexGroupCommand):
|
|||
)
|
||||
best_candidate = evaluator.sort_best_candidate(all_candidates)
|
||||
if best_candidate is None:
|
||||
continue
|
||||
return None
|
||||
|
||||
remote_version = best_candidate.version
|
||||
if best_candidate.link.is_wheel:
|
||||
|
@ -206,7 +208,19 @@ class ListCommand(IndexGroupCommand):
|
|||
# This is dirty but makes the rest of the code much cleaner
|
||||
dist.latest_version = remote_version
|
||||
dist.latest_filetype = typ
|
||||
yield dist
|
||||
return dist
|
||||
|
||||
# This is done for 2x speed up of requests to pypi.org
|
||||
# so that "real time" of this function
|
||||
# is almost equal to "user time"
|
||||
pool = Pool(DEFAULT_POOLSIZE)
|
||||
|
||||
for dist in pool.imap_unordered(latest_info, packages):
|
||||
if dist is not None:
|
||||
yield dist
|
||||
|
||||
pool.close()
|
||||
pool.join()
|
||||
|
||||
def output_package_listing(self, packages, options):
|
||||
packages = sorted(
|
||||
|
|
|
@ -217,6 +217,14 @@ class InsecureHTTPAdapter(HTTPAdapter):
|
|||
)
|
||||
|
||||
|
||||
class InsecureCacheControlAdapter(CacheControlAdapter):
|
||||
|
||||
def cert_verify(self, conn, url, verify, cert):
|
||||
super(InsecureCacheControlAdapter, self).cert_verify(
|
||||
conn=conn, url=url, verify=False, cert=cert
|
||||
)
|
||||
|
||||
|
||||
class PipSession(requests.Session):
|
||||
|
||||
timeout = None # type: Optional[int]
|
||||
|
@ -263,8 +271,16 @@ class PipSession(requests.Session):
|
|||
backoff_factor=0.25,
|
||||
)
|
||||
|
||||
# We want to _only_ cache responses on securely fetched origins. We do
|
||||
# this because we can't validate the response of an insecurely fetched
|
||||
# Our Insecure HTTPAdapter disables HTTPS validation. It does not
|
||||
# support caching so we'll use it for all http:// URLs.
|
||||
# If caching is disabled, we will also use it for
|
||||
# https:// hosts that we've marked as ignoring
|
||||
# TLS errors for (trusted-hosts).
|
||||
insecure_adapter = InsecureHTTPAdapter(max_retries=retries)
|
||||
|
||||
# We want to _only_ cache responses on securely fetched origins or when
|
||||
# the host is specified as trusted. We do this because
|
||||
# we can't validate the response of an insecurely/untrusted fetched
|
||||
# origin, and we don't want someone to be able to poison the cache and
|
||||
# require manual eviction from the cache to fix it.
|
||||
if cache:
|
||||
|
@ -272,16 +288,13 @@ class PipSession(requests.Session):
|
|||
cache=SafeFileCache(cache),
|
||||
max_retries=retries,
|
||||
)
|
||||
self._trusted_host_adapter = InsecureCacheControlAdapter(
|
||||
cache=SafeFileCache(cache),
|
||||
max_retries=retries,
|
||||
)
|
||||
else:
|
||||
secure_adapter = HTTPAdapter(max_retries=retries)
|
||||
|
||||
# Our Insecure HTTPAdapter disables HTTPS validation. It does not
|
||||
# support caching (see above) so we'll use it for all http:// URLs as
|
||||
# well as any https:// host that we've marked as ignoring TLS errors
|
||||
# for.
|
||||
insecure_adapter = InsecureHTTPAdapter(max_retries=retries)
|
||||
# Save this for later use in add_insecure_host().
|
||||
self._insecure_adapter = insecure_adapter
|
||||
self._trusted_host_adapter = insecure_adapter
|
||||
|
||||
self.mount("https://", secure_adapter)
|
||||
self.mount("http://", insecure_adapter)
|
||||
|
@ -310,12 +323,15 @@ class PipSession(requests.Session):
|
|||
if host_port not in self.pip_trusted_origins:
|
||||
self.pip_trusted_origins.append(host_port)
|
||||
|
||||
self.mount(build_url_from_netloc(host) + '/', self._insecure_adapter)
|
||||
self.mount(
|
||||
build_url_from_netloc(host) + '/',
|
||||
self._trusted_host_adapter
|
||||
)
|
||||
if not host_port[1]:
|
||||
# Mount wildcard ports for the same host.
|
||||
self.mount(
|
||||
build_url_from_netloc(host) + ':',
|
||||
self._insecure_adapter
|
||||
self._trusted_host_adapter
|
||||
)
|
||||
|
||||
def iter_secure_origins(self):
|
||||
|
|
|
@ -24,15 +24,9 @@ from pip._internal.exceptions import (
|
|||
PreviousBuildDirError,
|
||||
VcsHashUnsupported,
|
||||
)
|
||||
from pip._internal.utils.filesystem import copy2_fixed
|
||||
from pip._internal.utils.hashes import MissingHashes
|
||||
from pip._internal.utils.logging import indent_log
|
||||
from pip._internal.utils.misc import (
|
||||
display_path,
|
||||
hide_url,
|
||||
path_to_display,
|
||||
rmtree,
|
||||
)
|
||||
from pip._internal.utils.misc import display_path, hide_url
|
||||
from pip._internal.utils.temp_dir import TempDirectory
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
from pip._internal.utils.unpacking import unpack_file
|
||||
|
@ -133,59 +127,6 @@ def get_http_url(
|
|||
return File(from_path, content_type)
|
||||
|
||||
|
||||
def _copy2_ignoring_special_files(src, dest):
|
||||
# type: (str, str) -> None
|
||||
"""Copying special files is not supported, but as a convenience to users
|
||||
we skip errors copying them. This supports tools that may create e.g.
|
||||
socket files in the project source directory.
|
||||
"""
|
||||
try:
|
||||
copy2_fixed(src, dest)
|
||||
except shutil.SpecialFileError as e:
|
||||
# SpecialFileError may be raised due to either the source or
|
||||
# destination. If the destination was the cause then we would actually
|
||||
# care, but since the destination directory is deleted prior to
|
||||
# copy we ignore all of them assuming it is caused by the source.
|
||||
logger.warning(
|
||||
"Ignoring special file error '%s' encountered copying %s to %s.",
|
||||
str(e),
|
||||
path_to_display(src),
|
||||
path_to_display(dest),
|
||||
)
|
||||
|
||||
|
||||
def _copy_source_tree(source, target):
|
||||
# type: (str, str) -> None
|
||||
target_abspath = os.path.abspath(target)
|
||||
target_basename = os.path.basename(target_abspath)
|
||||
target_dirname = os.path.dirname(target_abspath)
|
||||
|
||||
def ignore(d, names):
|
||||
# type: (str, List[str]) -> List[str]
|
||||
skipped = [] # type: List[str]
|
||||
if d == source:
|
||||
# Pulling in those directories can potentially be very slow,
|
||||
# exclude the following directories if they appear in the top
|
||||
# level dir (and only it).
|
||||
# See discussion at https://github.com/pypa/pip/pull/6770
|
||||
skipped += ['.tox', '.nox']
|
||||
if os.path.abspath(d) == target_dirname:
|
||||
# Prevent an infinite recursion if the target is in source.
|
||||
# This can happen when TMPDIR is set to ${PWD}/...
|
||||
# and we copy PWD to TMPDIR.
|
||||
skipped += [target_basename]
|
||||
return skipped
|
||||
|
||||
kwargs = dict(ignore=ignore, symlinks=True) # type: CopytreeKwargs
|
||||
|
||||
if not PY2:
|
||||
# Python 2 does not support copy_function, so we only ignore
|
||||
# errors on special file copy in Python 3.
|
||||
kwargs['copy_function'] = _copy2_ignoring_special_files
|
||||
|
||||
shutil.copytree(source, target, **kwargs)
|
||||
|
||||
|
||||
def get_file_url(
|
||||
link, # type: Link
|
||||
download_dir=None, # type: Optional[str]
|
||||
|
@ -239,11 +180,9 @@ def unpack_url(
|
|||
unpack_vcs_link(link, location)
|
||||
return None
|
||||
|
||||
# If it's a url to a local directory
|
||||
# If it's a url to a local directory, we build in-place.
|
||||
# There is nothing to be done here.
|
||||
if link.is_existing_dir():
|
||||
if os.path.isdir(location):
|
||||
rmtree(location)
|
||||
_copy_source_tree(link.file_path, location)
|
||||
return None
|
||||
|
||||
# file urls
|
||||
|
@ -415,21 +354,25 @@ class RequirementPreparer(object):
|
|||
with indent_log():
|
||||
# Since source_dir is only set for editable requirements.
|
||||
assert req.source_dir is None
|
||||
req.ensure_has_source_dir(self.build_dir, autodelete_unpacked)
|
||||
# If a checkout exists, it's unwise to keep going. version
|
||||
# inconsistencies are logged later, but do not fail the
|
||||
# installation.
|
||||
# FIXME: this won't upgrade when there's an existing
|
||||
# package unpacked in `req.source_dir`
|
||||
if os.path.exists(os.path.join(req.source_dir, 'setup.py')):
|
||||
raise PreviousBuildDirError(
|
||||
"pip can't proceed with requirements '{}' due to a"
|
||||
" pre-existing build directory ({}). This is "
|
||||
"likely due to a previous installation that failed"
|
||||
". pip is being responsible and not assuming it "
|
||||
"can delete this. Please delete it and try again."
|
||||
.format(req, req.source_dir)
|
||||
)
|
||||
if link.is_existing_dir():
|
||||
# Build local directories in place.
|
||||
req.source_dir = link.file_path
|
||||
else:
|
||||
req.ensure_has_source_dir(self.build_dir, autodelete_unpacked)
|
||||
# If a checkout exists, it's unwise to keep going. version
|
||||
# inconsistencies are logged later, but do not fail the
|
||||
# installation.
|
||||
# FIXME: this won't upgrade when there's an existing
|
||||
# package unpacked in `req.source_dir`
|
||||
if os.path.exists(os.path.join(req.source_dir, 'setup.py')):
|
||||
raise PreviousBuildDirError(
|
||||
"pip can't proceed with requirements '{}' due to a"
|
||||
" pre-existing build directory ({}). This is "
|
||||
"likely due to a previous installation that failed"
|
||||
". pip is being responsible and not assuming it "
|
||||
"can delete this. Please delete it and try again."
|
||||
.format(req, req.source_dir)
|
||||
)
|
||||
|
||||
# Now that we have the real link, we can tell what kind of
|
||||
# requirements we have and raise some more informative errors
|
||||
|
|
|
@ -1,5 +1,9 @@
|
|||
from pip._vendor.packaging.utils import canonicalize_name
|
||||
|
||||
from pip._internal.exceptions import (
|
||||
InstallationError,
|
||||
UnsupportedPythonVersion,
|
||||
)
|
||||
from pip._internal.utils.misc import get_installed_distributions
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
|
||||
|
@ -12,7 +16,7 @@ from .candidates import (
|
|||
)
|
||||
from .requirements import (
|
||||
ExplicitRequirement,
|
||||
NoMatchRequirement,
|
||||
RequiresPythonRequirement,
|
||||
SpecifierRequirement,
|
||||
)
|
||||
|
||||
|
@ -22,6 +26,7 @@ if MYPY_CHECK_RUNNING:
|
|||
from pip._vendor.packaging.specifiers import SpecifierSet
|
||||
from pip._vendor.packaging.version import _BaseVersion
|
||||
from pip._vendor.pkg_resources import Distribution
|
||||
from pip._vendor.resolvelib import ResolutionImpossible
|
||||
|
||||
from pip._internal.index.package_finder import PackageFinder
|
||||
from pip._internal.models.candidate import InstallationCandidate
|
||||
|
@ -152,16 +157,36 @@ class Factory(object):
|
|||
# type: (Optional[SpecifierSet]) -> Optional[Requirement]
|
||||
if self._ignore_requires_python or specifier is None:
|
||||
return None
|
||||
# The logic here is different from SpecifierRequirement, for which we
|
||||
# "find" candidates matching the specifier. But for Requires-Python,
|
||||
# there is always exactly one candidate (the one specified with
|
||||
# py_version_info). Here we decide whether to return that based on
|
||||
# whether Requires-Python matches that one candidate or not.
|
||||
if self._python_candidate.version in specifier:
|
||||
return ExplicitRequirement(self._python_candidate)
|
||||
return NoMatchRequirement(self._python_candidate.name)
|
||||
return RequiresPythonRequirement(specifier, self._python_candidate)
|
||||
|
||||
def should_reinstall(self, candidate):
|
||||
# type: (Candidate) -> bool
|
||||
# TODO: Are there more cases this needs to return True? Editable?
|
||||
return candidate.name in self._installed_dists
|
||||
|
||||
def _report_requires_python_error(
|
||||
self,
|
||||
requirement, # type: RequiresPythonRequirement
|
||||
parent, # type: Candidate
|
||||
):
|
||||
# type: (...) -> UnsupportedPythonVersion
|
||||
template = (
|
||||
"Package {package!r} requires a different Python: "
|
||||
"{version} not in {specifier!r}"
|
||||
)
|
||||
message = template.format(
|
||||
package=parent.name,
|
||||
version=self._python_candidate.version,
|
||||
specifier=str(requirement.specifier),
|
||||
)
|
||||
return UnsupportedPythonVersion(message)
|
||||
|
||||
def get_installation_error(self, e):
|
||||
# type: (ResolutionImpossible) -> Optional[InstallationError]
|
||||
for cause in e.causes:
|
||||
if isinstance(cause.requirement, RequiresPythonRequirement):
|
||||
return self._report_requires_python_error(
|
||||
cause.requirement,
|
||||
cause.parent,
|
||||
)
|
||||
return None
|
||||
|
|
|
@ -7,6 +7,8 @@ from .base import Requirement, format_name
|
|||
if MYPY_CHECK_RUNNING:
|
||||
from typing import Sequence
|
||||
|
||||
from pip._vendor.packaging.specifiers import SpecifierSet
|
||||
|
||||
from pip._internal.req.req_install import InstallRequirement
|
||||
|
||||
from .base import Candidate
|
||||
|
@ -40,37 +42,6 @@ class ExplicitRequirement(Requirement):
|
|||
return candidate == self.candidate
|
||||
|
||||
|
||||
class NoMatchRequirement(Requirement):
|
||||
"""A requirement that never matches anything.
|
||||
|
||||
Note: Similar to ExplicitRequirement, the caller should handle name
|
||||
canonicalisation; this class does not perform it.
|
||||
"""
|
||||
def __init__(self, name):
|
||||
# type: (str) -> None
|
||||
self._name = name
|
||||
|
||||
def __repr__(self):
|
||||
# type: () -> str
|
||||
return "{class_name}(name={name!r})".format(
|
||||
class_name=self.__class__.__name__,
|
||||
name=self._name,
|
||||
)
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
# type: () -> str
|
||||
return self._name
|
||||
|
||||
def find_matches(self):
|
||||
# type: () -> Sequence[Candidate]
|
||||
return []
|
||||
|
||||
def is_satisfied_by(self, candidate):
|
||||
# type: (Candidate) -> bool
|
||||
return False
|
||||
|
||||
|
||||
class SpecifierRequirement(Requirement):
|
||||
def __init__(self, ireq, factory):
|
||||
# type: (InstallRequirement, Factory) -> None
|
||||
|
@ -118,3 +89,35 @@ class SpecifierRequirement(Requirement):
|
|||
"Internal issue: Candidate is not for this requirement " \
|
||||
" {} vs {}".format(candidate.name, self.name)
|
||||
return candidate.version in self._ireq.req.specifier
|
||||
|
||||
|
||||
class RequiresPythonRequirement(Requirement):
|
||||
"""A requirement representing Requires-Python metadata.
|
||||
"""
|
||||
def __init__(self, specifier, match):
|
||||
# type: (SpecifierSet, Candidate) -> None
|
||||
self.specifier = specifier
|
||||
self._candidate = match
|
||||
|
||||
def __repr__(self):
|
||||
# type: () -> str
|
||||
return "{class_name}({specifier!r})".format(
|
||||
class_name=self.__class__.__name__,
|
||||
specifier=str(self.specifier),
|
||||
)
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
# type: () -> str
|
||||
return self._candidate.name
|
||||
|
||||
def find_matches(self):
|
||||
# type: () -> Sequence[Candidate]
|
||||
if self._candidate.version in self.specifier:
|
||||
return [self._candidate]
|
||||
return []
|
||||
|
||||
def is_satisfied_by(self, candidate):
|
||||
# type: (Candidate) -> bool
|
||||
assert candidate.name == self._candidate.name, "Not Python candidate"
|
||||
return candidate.version in self.specifier
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import functools
|
||||
import logging
|
||||
|
||||
from pip._vendor import six
|
||||
from pip._vendor.packaging.utils import canonicalize_name
|
||||
from pip._vendor.resolvelib import BaseReporter, ResolutionImpossible
|
||||
from pip._vendor.resolvelib import Resolver as RLResolver
|
||||
|
@ -58,6 +59,11 @@ class Resolver(BaseResolver):
|
|||
|
||||
def resolve(self, root_reqs, check_supported_wheels):
|
||||
# type: (List[InstallRequirement], bool) -> RequirementSet
|
||||
|
||||
# FIXME: Implement constraints.
|
||||
if any(r.constraint for r in root_reqs):
|
||||
raise InstallationError("Constraints are not yet supported.")
|
||||
|
||||
provider = PipProvider(
|
||||
factory=self.factory,
|
||||
ignore_dependencies=self.ignore_dependencies,
|
||||
|
@ -72,23 +78,29 @@ class Resolver(BaseResolver):
|
|||
|
||||
try:
|
||||
self._result = resolver.resolve(requirements)
|
||||
except ResolutionImpossible as exc:
|
||||
# TODO: This is just an initial version. May need more work.
|
||||
# Also could do with rewriting to fit better into 80-char
|
||||
# lines :-(
|
||||
for req, parent in exc.causes:
|
||||
logger.critical(
|
||||
"Could not find a version that satisfies " +
|
||||
"the requirement " +
|
||||
str(req) +
|
||||
("" if parent is None else " (from {})".format(
|
||||
parent.name
|
||||
))
|
||||
|
||||
except ResolutionImpossible as e:
|
||||
error = self.factory.get_installation_error(e)
|
||||
if not error:
|
||||
# TODO: This needs fixing, we need to look at the
|
||||
# factory.get_installation_error infrastructure, as that
|
||||
# doesn't really allow for the logger.critical calls I'm
|
||||
# using here.
|
||||
for req, parent in e.causes:
|
||||
logger.critical(
|
||||
"Could not find a version that satisfies " +
|
||||
"the requirement " +
|
||||
str(req) +
|
||||
("" if parent is None else " (from {})".format(
|
||||
parent.name
|
||||
))
|
||||
)
|
||||
raise InstallationError(
|
||||
"No matching distribution found for " +
|
||||
", ".join([r.name for r, _ in e.causes])
|
||||
)
|
||||
raise InstallationError(
|
||||
"No matching distribution found for " +
|
||||
", ".join([r.name for r, _ in exc.causes])
|
||||
)
|
||||
raise
|
||||
six.raise_from(error, e)
|
||||
|
||||
req_set = RequirementSet(check_supported_wheels=check_supported_wheels)
|
||||
for candidate in self._result.mapping.values():
|
||||
|
@ -133,7 +145,11 @@ class Resolver(BaseResolver):
|
|||
|
||||
# FIXME: This check will fail if there are unbreakable cycles.
|
||||
# Implement something to forcifully break them up to continue.
|
||||
assert progressed, "Order calculation stuck in dependency loop."
|
||||
if not progressed:
|
||||
raise InstallationError(
|
||||
"Could not determine installation order due to cicular "
|
||||
"dependency."
|
||||
)
|
||||
|
||||
sorted_items = sorted(
|
||||
req_set.requirements.items(),
|
||||
|
|
|
@ -1,9 +1,8 @@
|
|||
import errno
|
||||
import fnmatch
|
||||
import os
|
||||
import os.path
|
||||
import random
|
||||
import shutil
|
||||
import stat
|
||||
import sys
|
||||
from contextlib import contextmanager
|
||||
from tempfile import NamedTemporaryFile
|
||||
|
@ -14,10 +13,11 @@ from pip._vendor.retrying import retry # type: ignore
|
|||
from pip._vendor.six import PY2
|
||||
|
||||
from pip._internal.utils.compat import get_path_uid
|
||||
from pip._internal.utils.misc import format_size
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING, cast
|
||||
|
||||
if MYPY_CHECK_RUNNING:
|
||||
from typing import Any, BinaryIO, Iterator
|
||||
from typing import Any, BinaryIO, Iterator, List, Union
|
||||
|
||||
class NamedTemporaryFileResult(BinaryIO):
|
||||
@property
|
||||
|
@ -54,36 +54,6 @@ def check_path_owner(path):
|
|||
return False # assume we don't own the path
|
||||
|
||||
|
||||
def copy2_fixed(src, dest):
|
||||
# type: (str, str) -> None
|
||||
"""Wrap shutil.copy2() but map errors copying socket files to
|
||||
SpecialFileError as expected.
|
||||
|
||||
See also https://bugs.python.org/issue37700.
|
||||
"""
|
||||
try:
|
||||
shutil.copy2(src, dest)
|
||||
except (OSError, IOError):
|
||||
for f in [src, dest]:
|
||||
try:
|
||||
is_socket_file = is_socket(f)
|
||||
except OSError:
|
||||
# An error has already occurred. Another error here is not
|
||||
# a problem and we can ignore it.
|
||||
pass
|
||||
else:
|
||||
if is_socket_file:
|
||||
raise shutil.SpecialFileError(
|
||||
"`{f}` is a socket".format(**locals()))
|
||||
|
||||
raise
|
||||
|
||||
|
||||
def is_socket(path):
|
||||
# type: (str) -> bool
|
||||
return stat.S_ISSOCK(os.lstat(path).st_mode)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def adjacent_tmp_file(path, **kwargs):
|
||||
# type: (str, **Any) -> Iterator[NamedTemporaryFileResult]
|
||||
|
@ -176,3 +146,42 @@ def _test_writable_dir_win(path):
|
|||
raise EnvironmentError(
|
||||
'Unexpected condition testing for writable directory'
|
||||
)
|
||||
|
||||
|
||||
def find_files(path, pattern):
|
||||
# type: (str, str) -> List[str]
|
||||
"""Returns a list of absolute paths of files beneath path, recursively,
|
||||
with filenames which match the UNIX-style shell glob pattern."""
|
||||
result = [] # type: List[str]
|
||||
for root, dirs, files in os.walk(path):
|
||||
matches = fnmatch.filter(files, pattern)
|
||||
result.extend(os.path.join(root, f) for f in matches)
|
||||
return result
|
||||
|
||||
|
||||
def file_size(path):
|
||||
# type: (str) -> Union[int, float]
|
||||
# If it's a symlink, return 0.
|
||||
if os.path.islink(path):
|
||||
return 0
|
||||
return os.path.getsize(path)
|
||||
|
||||
|
||||
def format_file_size(path):
|
||||
# type: (str) -> str
|
||||
return format_size(file_size(path))
|
||||
|
||||
|
||||
def directory_size(path):
|
||||
# type: (str) -> Union[int, float]
|
||||
size = 0.0
|
||||
for root, _dirs, files in os.walk(path):
|
||||
for filename in files:
|
||||
file_path = os.path.join(root, filename)
|
||||
size += file_size(file_path)
|
||||
return size
|
||||
|
||||
|
||||
def format_directory_size(path):
|
||||
# type: (str) -> str
|
||||
return format_size(directory_size(path))
|
||||
|
|
|
@ -52,7 +52,6 @@ else:
|
|||
|
||||
|
||||
_log_state = threading.local()
|
||||
_log_state.indentation = 0
|
||||
subprocess_logger = getLogger('pip.subprocessor')
|
||||
|
||||
|
||||
|
@ -104,6 +103,8 @@ def indent_log(num=2):
|
|||
A context manager which will cause the log output to be indented for any
|
||||
log messages emitted inside it.
|
||||
"""
|
||||
# For thread-safety
|
||||
_log_state.indentation = get_indentation()
|
||||
_log_state.indentation += num
|
||||
try:
|
||||
yield
|
||||
|
|
|
@ -3,6 +3,7 @@ __all__ = [
|
|||
"AbstractProvider",
|
||||
"AbstractResolver",
|
||||
"BaseReporter",
|
||||
"InconsistentCandidate",
|
||||
"Resolver",
|
||||
"RequirementsConflicted",
|
||||
"ResolutionError",
|
||||
|
@ -10,12 +11,13 @@ __all__ = [
|
|||
"ResolutionTooDeep",
|
||||
]
|
||||
|
||||
__version__ = "0.2.3.dev0"
|
||||
__version__ = "0.3.0"
|
||||
|
||||
|
||||
from .providers import AbstractProvider, AbstractResolver
|
||||
from .reporters import BaseReporter
|
||||
from .resolvers import (
|
||||
InconsistentCandidate,
|
||||
RequirementsConflicted,
|
||||
Resolver,
|
||||
ResolutionError,
|
||||
|
|
|
@ -22,3 +22,15 @@ class BaseReporter(object):
|
|||
def ending(self, state):
|
||||
"""Called before the resolution ends successfully.
|
||||
"""
|
||||
|
||||
def adding_requirement(self, requirement):
|
||||
"""Called when the resolver adds a new requirement into the resolve criteria.
|
||||
"""
|
||||
|
||||
def backtracking(self, candidate):
|
||||
"""Called when the resolver rejects a candidate during backtracking.
|
||||
"""
|
||||
|
||||
def pinning(self, candidate):
|
||||
"""Called when adding a candidate to the potential solution.
|
||||
"""
|
||||
|
|
|
@ -22,6 +22,24 @@ class RequirementsConflicted(ResolverException):
|
|||
super(RequirementsConflicted, self).__init__(criterion)
|
||||
self.criterion = criterion
|
||||
|
||||
def __str__(self):
|
||||
return "Requirements conflict: {}".format(
|
||||
", ".join(repr(r) for r in self.criterion.iter_requirement()),
|
||||
)
|
||||
|
||||
|
||||
class InconsistentCandidate(ResolverException):
|
||||
def __init__(self, candidate, criterion):
|
||||
super(InconsistentCandidate, self).__init__(candidate, criterion)
|
||||
self.candidate = candidate
|
||||
self.criterion = criterion
|
||||
|
||||
def __str__(self):
|
||||
return "Provided candidate {!r} does not satisfy {}".format(
|
||||
self.candidate,
|
||||
", ".join(repr(r) for r in self.criterion.iter_requirement()),
|
||||
)
|
||||
|
||||
|
||||
class Criterion(object):
|
||||
"""Representation of possible resolution results of a package.
|
||||
|
@ -48,6 +66,13 @@ class Criterion(object):
|
|||
self.information = information
|
||||
self.incompatibilities = incompatibilities
|
||||
|
||||
def __repr__(self):
|
||||
requirements = ", ".join(
|
||||
"{!r} from {!r}".format(req, parent)
|
||||
for req, parent in self.information
|
||||
)
|
||||
return "<Criterion {}>".format(requirements)
|
||||
|
||||
@classmethod
|
||||
def from_requirement(cls, provider, requirement, parent):
|
||||
"""Build an instance from a requirement.
|
||||
|
@ -85,13 +110,15 @@ class Criterion(object):
|
|||
|
||||
def excluded_of(self, candidate):
|
||||
"""Build a new instance from this, but excluding specified candidate.
|
||||
|
||||
Returns the new instance, or None if we still have no valid candidates.
|
||||
"""
|
||||
incompats = list(self.incompatibilities)
|
||||
incompats.append(candidate)
|
||||
candidates = [c for c in self.candidates if c != candidate]
|
||||
criterion = type(self)(candidates, list(self.information), incompats)
|
||||
if not candidates:
|
||||
raise RequirementsConflicted(criterion)
|
||||
return None
|
||||
criterion = type(self)(candidates, list(self.information), incompats)
|
||||
return criterion
|
||||
|
||||
|
||||
|
@ -100,9 +127,10 @@ class ResolutionError(ResolverException):
|
|||
|
||||
|
||||
class ResolutionImpossible(ResolutionError):
|
||||
def __init__(self, requirements):
|
||||
super(ResolutionImpossible, self).__init__(requirements)
|
||||
self.requirements = requirements
|
||||
def __init__(self, causes):
|
||||
super(ResolutionImpossible, self).__init__(causes)
|
||||
# causes is a list of RequirementInformation objects
|
||||
self.causes = causes
|
||||
|
||||
|
||||
class ResolutionTooDeep(ResolutionError):
|
||||
|
@ -151,6 +179,7 @@ class Resolution(object):
|
|||
self._states.append(state)
|
||||
|
||||
def _merge_into_criterion(self, requirement, parent):
|
||||
self._r.adding_requirement(requirement)
|
||||
name = self._p.identify(requirement)
|
||||
try:
|
||||
crit = self.state.criteria[name]
|
||||
|
@ -195,11 +224,21 @@ class Resolution(object):
|
|||
except RequirementsConflicted as e:
|
||||
causes.append(e.criterion)
|
||||
continue
|
||||
|
||||
# Put newly-pinned candidate at the end. This is essential because
|
||||
# backtracking looks at this mapping to get the last pin.
|
||||
self._r.pinning(candidate)
|
||||
self.state.mapping.pop(name, None)
|
||||
self.state.mapping[name] = candidate
|
||||
self.state.criteria.update(criteria)
|
||||
|
||||
# Check the newly-pinned candidate actually works. This should
|
||||
# always pass under normal circumstances, but in the case of a
|
||||
# faulty provider, we will raise an error to notify the implementer
|
||||
# to fix find_matches() and/or is_satisfied_by().
|
||||
if not self._is_current_pin_satisfying(name, criterion):
|
||||
raise InconsistentCandidate(candidate, criterion)
|
||||
|
||||
return []
|
||||
|
||||
# All candidates tried, nothing works. This criterion is a dead
|
||||
|
@ -217,12 +256,12 @@ class Resolution(object):
|
|||
|
||||
# Retract the last candidate pin, and create a new (b).
|
||||
name, candidate = self._states.pop().mapping.popitem()
|
||||
self._r.backtracking(candidate)
|
||||
self._push_new_state()
|
||||
|
||||
try:
|
||||
# Mark the retracted candidate as incompatible.
|
||||
criterion = self.state.criteria[name].excluded_of(candidate)
|
||||
except RequirementsConflicted:
|
||||
# Mark the retracted candidate as incompatible.
|
||||
criterion = self.state.criteria[name].excluded_of(candidate)
|
||||
if criterion is None:
|
||||
# This state still does not work. Try the still previous state.
|
||||
continue
|
||||
self.state.criteria[name] = criterion
|
||||
|
@ -240,8 +279,7 @@ class Resolution(object):
|
|||
try:
|
||||
name, crit = self._merge_into_criterion(r, parent=None)
|
||||
except RequirementsConflicted as e:
|
||||
# If initial requirements conflict, nothing would ever work.
|
||||
raise ResolutionImpossible(e.requirements + [r])
|
||||
raise ResolutionImpossible(e.criterion.information)
|
||||
self.state.criteria[name] = crit
|
||||
|
||||
self._r.starting()
|
||||
|
@ -275,12 +313,10 @@ class Resolution(object):
|
|||
if failure_causes:
|
||||
result = self._backtrack()
|
||||
if not result:
|
||||
requirements = [
|
||||
requirement
|
||||
for crit in failure_causes
|
||||
for requirement in crit.iter_requirement()
|
||||
causes = [
|
||||
i for crit in failure_causes for i in crit.information
|
||||
]
|
||||
raise ResolutionImpossible(requirements)
|
||||
raise ResolutionImpossible(causes)
|
||||
|
||||
self._r.ending_round(round_index, curr)
|
||||
|
||||
|
@ -365,7 +401,9 @@ class Resolver(AbstractResolver):
|
|||
The following exceptions may be raised if a resolution cannot be found:
|
||||
|
||||
* `ResolutionImpossible`: A resolution cannot be found for the given
|
||||
combination of requirements.
|
||||
combination of requirements. The `causes` attribute of the
|
||||
exception is a list of (requirement, parent), giving the
|
||||
requirements that could not be satisfied.
|
||||
* `ResolutionTooDeep`: The dependency tree is too deeply nested and
|
||||
the resolver gave up. This is usually caused by a circular
|
||||
dependency, but you can try to resolve this by increasing the
|
||||
|
|
|
@ -17,9 +17,8 @@ requests==2.22.0
|
|||
chardet==3.0.4
|
||||
idna==2.8
|
||||
urllib3==1.25.7
|
||||
resolvelib==0.3.0
|
||||
retrying==1.3.3
|
||||
setuptools==44.0.0
|
||||
six==1.14.0
|
||||
webencodings==0.5.1
|
||||
|
||||
git+https://github.com/sarugaku/resolvelib.git@fbc8bb28d6cff98b2#egg=resolvelib
|
||||
|
|
|
@ -0,0 +1,218 @@
|
|||
import os
|
||||
import shutil
|
||||
from glob import glob
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def cache_dir(script):
|
||||
result = script.run(
|
||||
'python', '-c',
|
||||
'from pip._internal.locations import USER_CACHE_DIR;'
|
||||
'print(USER_CACHE_DIR)'
|
||||
)
|
||||
return result.stdout.strip()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def wheel_cache_dir(cache_dir):
|
||||
return os.path.normcase(os.path.join(cache_dir, 'wheels'))
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def wheel_cache_files(wheel_cache_dir):
|
||||
destination = os.path.join(wheel_cache_dir, 'arbitrary', 'pathname')
|
||||
|
||||
if not os.path.exists(destination):
|
||||
return []
|
||||
|
||||
filenames = glob(os.path.join(destination, '*.whl'))
|
||||
files = []
|
||||
for filename in filenames:
|
||||
files.append(os.path.join(destination, filename))
|
||||
return files
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def populate_wheel_cache(wheel_cache_dir):
|
||||
destination = os.path.join(wheel_cache_dir, 'arbitrary', 'pathname')
|
||||
os.makedirs(destination)
|
||||
|
||||
files = [
|
||||
('yyy-1.2.3', os.path.join(destination, 'yyy-1.2.3-py3-none-any.whl')),
|
||||
('zzz-4.5.6', os.path.join(destination, 'zzz-4.5.6-py3-none-any.whl')),
|
||||
('zzz-4.5.7', os.path.join(destination, 'zzz-4.5.7-py3-none-any.whl')),
|
||||
('zzz-7.8.9', os.path.join(destination, 'zzz-7.8.9-py3-none-any.whl')),
|
||||
]
|
||||
|
||||
for _name, filename in files:
|
||||
with open(filename, 'w'):
|
||||
pass
|
||||
|
||||
return files
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def empty_wheel_cache(wheel_cache_dir):
|
||||
if os.path.exists(wheel_cache_dir):
|
||||
shutil.rmtree(wheel_cache_dir)
|
||||
|
||||
|
||||
def list_matches_wheel(wheel_name, result):
|
||||
"""Returns True if any line in `result`, which should be the output of
|
||||
a `pip cache list` call, matches `wheel_name`.
|
||||
|
||||
E.g., If wheel_name is `foo-1.2.3` it searches for a line starting with
|
||||
`- foo-1.2.3-py3-none-any.whl `."""
|
||||
lines = result.stdout.splitlines()
|
||||
expected = ' - {}-py3-none-any.whl '.format(wheel_name)
|
||||
return any(map(lambda l: l.startswith(expected), lines))
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def remove_matches_wheel(wheel_cache_dir):
|
||||
"""Returns True if any line in `result`, which should be the output of
|
||||
a `pip cache remove`/`pip cache purge` call, matches `wheel_name`.
|
||||
|
||||
E.g., If wheel_name is `foo-1.2.3`, it searches for a line equal to
|
||||
`Removed <wheel cache dir>/arbitrary/pathname/foo-1.2.3-py3-none-any.whl`.
|
||||
"""
|
||||
|
||||
def _remove_matches_wheel(wheel_name, result):
|
||||
lines = result.stdout.splitlines()
|
||||
|
||||
wheel_filename = '{}-py3-none-any.whl'.format(wheel_name)
|
||||
|
||||
# The "/arbitrary/pathname/" bit is an implementation detail of how
|
||||
# the `populate_wheel_cache` fixture is implemented.
|
||||
path = os.path.join(
|
||||
wheel_cache_dir, 'arbitrary', 'pathname', wheel_filename,
|
||||
)
|
||||
expected = 'Removed {}'.format(path)
|
||||
return expected in lines
|
||||
|
||||
return _remove_matches_wheel
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("populate_wheel_cache")
|
||||
def test_cache_info(script, wheel_cache_dir, wheel_cache_files):
|
||||
result = script.pip('cache', 'info')
|
||||
|
||||
assert 'Location: {}'.format(wheel_cache_dir) in result.stdout
|
||||
num_wheels = len(wheel_cache_files)
|
||||
assert 'Number of wheels: {}'.format(num_wheels) in result.stdout
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("populate_wheel_cache")
|
||||
def test_cache_list(script):
|
||||
"""Running `pip cache list` should return exactly what the
|
||||
populate_wheel_cache fixture adds."""
|
||||
result = script.pip('cache', 'list')
|
||||
|
||||
assert list_matches_wheel('yyy-1.2.3', result)
|
||||
assert list_matches_wheel('zzz-4.5.6', result)
|
||||
assert list_matches_wheel('zzz-4.5.7', result)
|
||||
assert list_matches_wheel('zzz-7.8.9', result)
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("empty_wheel_cache")
|
||||
def test_cache_list_with_empty_cache(script):
|
||||
"""Running `pip cache list` with an empty cache should print
|
||||
"Nothing cached." and exit."""
|
||||
result = script.pip('cache', 'list')
|
||||
assert result.stdout == "Nothing cached.\n"
|
||||
|
||||
|
||||
def test_cache_list_too_many_args(script):
|
||||
"""Passing `pip cache list` too many arguments should cause an error."""
|
||||
script.pip('cache', 'list', 'aaa', 'bbb',
|
||||
expect_error=True)
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("populate_wheel_cache")
|
||||
def test_cache_list_name_match(script):
|
||||
"""Running `pip cache list zzz` should list zzz-4.5.6, zzz-4.5.7,
|
||||
zzz-7.8.9, but nothing else."""
|
||||
result = script.pip('cache', 'list', 'zzz', '--verbose')
|
||||
|
||||
assert not list_matches_wheel('yyy-1.2.3', result)
|
||||
assert list_matches_wheel('zzz-4.5.6', result)
|
||||
assert list_matches_wheel('zzz-4.5.7', result)
|
||||
assert list_matches_wheel('zzz-7.8.9', result)
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("populate_wheel_cache")
|
||||
def test_cache_list_name_and_version_match(script):
|
||||
"""Running `pip cache list zzz-4.5.6` should list zzz-4.5.6, but
|
||||
nothing else."""
|
||||
result = script.pip('cache', 'list', 'zzz-4.5.6', '--verbose')
|
||||
|
||||
assert not list_matches_wheel('yyy-1.2.3', result)
|
||||
assert list_matches_wheel('zzz-4.5.6', result)
|
||||
assert not list_matches_wheel('zzz-4.5.7', result)
|
||||
assert not list_matches_wheel('zzz-7.8.9', result)
|
||||
|
||||
|
||||
@pytest.mark.usefixture("populate_wheel_cache")
|
||||
def test_cache_remove_no_arguments(script):
|
||||
"""Running `pip cache remove` with no arguments should cause an error."""
|
||||
script.pip('cache', 'remove', expect_error=True)
|
||||
|
||||
|
||||
def test_cache_remove_too_many_args(script):
|
||||
"""Passing `pip cache remove` too many arguments should cause an error."""
|
||||
script.pip('cache', 'remove', 'aaa', 'bbb',
|
||||
expect_error=True)
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("populate_wheel_cache")
|
||||
def test_cache_remove_name_match(script, remove_matches_wheel):
|
||||
"""Running `pip cache remove zzz` should remove zzz-4.5.6 and zzz-7.8.9,
|
||||
but nothing else."""
|
||||
result = script.pip('cache', 'remove', 'zzz', '--verbose')
|
||||
|
||||
assert not remove_matches_wheel('yyy-1.2.3', result)
|
||||
assert remove_matches_wheel('zzz-4.5.6', result)
|
||||
assert remove_matches_wheel('zzz-4.5.7', result)
|
||||
assert remove_matches_wheel('zzz-7.8.9', result)
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("populate_wheel_cache")
|
||||
def test_cache_remove_name_and_version_match(script, remove_matches_wheel):
|
||||
"""Running `pip cache remove zzz-4.5.6` should remove zzz-4.5.6, but
|
||||
nothing else."""
|
||||
result = script.pip('cache', 'remove', 'zzz-4.5.6', '--verbose')
|
||||
|
||||
assert not remove_matches_wheel('yyy-1.2.3', result)
|
||||
assert remove_matches_wheel('zzz-4.5.6', result)
|
||||
assert not remove_matches_wheel('zzz-4.5.7', result)
|
||||
assert not remove_matches_wheel('zzz-7.8.9', result)
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("populate_wheel_cache")
|
||||
def test_cache_purge(script, remove_matches_wheel):
|
||||
"""Running `pip cache purge` should remove all cached wheels."""
|
||||
result = script.pip('cache', 'purge', '--verbose')
|
||||
|
||||
assert remove_matches_wheel('yyy-1.2.3', result)
|
||||
assert remove_matches_wheel('zzz-4.5.6', result)
|
||||
assert remove_matches_wheel('zzz-4.5.7', result)
|
||||
assert remove_matches_wheel('zzz-7.8.9', result)
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("populate_wheel_cache")
|
||||
def test_cache_purge_too_many_args(script, wheel_cache_files):
|
||||
"""Running `pip cache purge aaa` should raise an error and remove no
|
||||
cached wheels."""
|
||||
result = script.pip('cache', 'purge', 'aaa', '--verbose',
|
||||
expect_error=True)
|
||||
assert result.stdout == ''
|
||||
|
||||
# This would be `result.stderr == ...`, but Pip prints deprecation
|
||||
# warnings on Python 2.7, so we check if the _line_ is in stderr.
|
||||
assert 'ERROR: Too many arguments' in result.stderr.splitlines()
|
||||
|
||||
# Make sure nothing was deleted.
|
||||
for filename in wheel_cache_files:
|
||||
assert os.path.exists(filename)
|
|
@ -27,7 +27,9 @@ def test_entrypoints_work(entrypoint, script):
|
|||
)
|
||||
""".format(entrypoint)))
|
||||
|
||||
script.pip("install", "-vvv", str(fake_pkg))
|
||||
# expect_temp=True, because pip install calls setup.py which
|
||||
# in turn creates fake_pkg.egg-info.
|
||||
script.pip("install", "-vvv", str(fake_pkg), expect_temp=True)
|
||||
result = script.pip("-V")
|
||||
result2 = script.run("fake_pip", "-V", allow_stderr_warning=True)
|
||||
assert result.stdout == result2.stdout
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import pytest
|
||||
|
||||
from pip._internal.commands.debug import create_vendor_txt_map
|
||||
from pip._internal.utils import compatibility_tags
|
||||
|
||||
|
||||
|
@ -14,6 +15,8 @@ from pip._internal.utils import compatibility_tags
|
|||
'REQUESTS_CA_BUNDLE: ',
|
||||
'CURL_CA_BUNDLE: ',
|
||||
'pip._vendor.certifi.where(): ',
|
||||
'pip._vendor.DEBUNDLED: ',
|
||||
'vendored library versions:',
|
||||
|
||||
])
|
||||
def test_debug(script, expected_text):
|
||||
|
@ -27,6 +30,18 @@ def test_debug(script, expected_text):
|
|||
assert expected_text in stdout
|
||||
|
||||
|
||||
def test_debug__library_versions(script):
|
||||
"""
|
||||
Check the library versions normal output.
|
||||
"""
|
||||
args = ['debug']
|
||||
result = script.pip(*args, allow_stderr_warning=True)
|
||||
stdout = result.stdout
|
||||
vendored_versions = create_vendor_txt_map()
|
||||
for name, value in vendored_versions.items():
|
||||
assert '{}=={}'.format(name, value) in stdout
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'args',
|
||||
[
|
||||
|
|
|
@ -2,7 +2,6 @@ import distutils
|
|||
import glob
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import ssl
|
||||
import sys
|
||||
import textwrap
|
||||
|
@ -29,7 +28,6 @@ from tests.lib import (
|
|||
skip_if_python2,
|
||||
windows_workaround_7667,
|
||||
)
|
||||
from tests.lib.filesystem import make_socket_file
|
||||
from tests.lib.local_repos import local_checkout
|
||||
from tests.lib.path import Path
|
||||
from tests.lib.server import (
|
||||
|
@ -576,30 +574,6 @@ def test_install_from_local_directory_with_symlinks_to_directories(
|
|||
assert egg_info_folder in result.files_created, str(result)
|
||||
|
||||
|
||||
@pytest.mark.skipif("sys.platform == 'win32' or sys.version_info < (3,)")
|
||||
def test_install_from_local_directory_with_socket_file(script, data, tmpdir):
|
||||
"""
|
||||
Test installing from a local directory containing a socket file.
|
||||
"""
|
||||
egg_info_file = (
|
||||
script.site_packages /
|
||||
"FSPkg-0.1.dev0-py{pyversion}.egg-info".format(**globals())
|
||||
)
|
||||
package_folder = script.site_packages / "fspkg"
|
||||
to_copy = data.packages.joinpath("FSPkg")
|
||||
to_install = tmpdir.joinpath("src")
|
||||
|
||||
shutil.copytree(to_copy, to_install)
|
||||
# Socket file, should be ignored.
|
||||
socket_file_path = os.path.join(to_install, "example")
|
||||
make_socket_file(socket_file_path)
|
||||
|
||||
result = script.pip("install", "--verbose", to_install)
|
||||
assert package_folder in result.files_created, str(result.stdout)
|
||||
assert egg_info_file in result.files_created, str(result)
|
||||
assert str(socket_file_path) in result.stderr
|
||||
|
||||
|
||||
def test_install_from_local_directory_with_no_setup_py(script, data):
|
||||
"""
|
||||
Test installing from a local directory with no 'setup.py'.
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import json
|
||||
import os
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
|
@ -263,6 +264,28 @@ def test_new_resolver_requires_python(
|
|||
assert_installed(script, base="0.1.0", dep=dep_version)
|
||||
|
||||
|
||||
def test_new_resolver_requires_python_error(script):
|
||||
create_basic_wheel_for_package(
|
||||
script,
|
||||
"base",
|
||||
"0.1.0",
|
||||
requires_python="<2",
|
||||
)
|
||||
result = script.pip(
|
||||
"install", "--unstable-feature=resolver",
|
||||
"--no-cache-dir", "--no-index",
|
||||
"--find-links", script.scratch_path,
|
||||
"base",
|
||||
expect_error=True,
|
||||
)
|
||||
|
||||
message = (
|
||||
"Package 'base' requires a different Python: "
|
||||
"{}.{}.{} not in '<2'".format(*sys.version_info[:3])
|
||||
)
|
||||
assert message in result.stderr, str(result)
|
||||
|
||||
|
||||
def test_new_resolver_installed(script):
|
||||
create_basic_wheel_for_package(
|
||||
script,
|
||||
|
|
|
@ -271,7 +271,15 @@ def test_uninstall_console_scripts(script):
|
|||
sorted(result.files_created.keys())
|
||||
)
|
||||
result2 = script.pip('uninstall', 'discover', '-y')
|
||||
assert_all_changes(result, result2, [script.venv / 'build', 'cache'])
|
||||
assert_all_changes(
|
||||
result,
|
||||
result2,
|
||||
[
|
||||
script.venv / 'build',
|
||||
'cache',
|
||||
script.scratch / 'discover' / 'discover.egg-info',
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def test_uninstall_console_scripts_uppercase_name(script):
|
||||
|
|
|
@ -1,48 +0,0 @@
|
|||
"""Helpers for filesystem-dependent tests.
|
||||
"""
|
||||
import os
|
||||
import socket
|
||||
import subprocess
|
||||
import sys
|
||||
from functools import partial
|
||||
from itertools import chain
|
||||
|
||||
from .path import Path
|
||||
|
||||
|
||||
def make_socket_file(path):
|
||||
# Socket paths are limited to 108 characters (sometimes less) so we
|
||||
# chdir before creating it and use a relative path name.
|
||||
cwd = os.getcwd()
|
||||
os.chdir(os.path.dirname(path))
|
||||
try:
|
||||
sock = socket.socket(socket.AF_UNIX)
|
||||
sock.bind(os.path.basename(path))
|
||||
finally:
|
||||
os.chdir(cwd)
|
||||
|
||||
|
||||
def make_unreadable_file(path):
|
||||
Path(path).touch()
|
||||
os.chmod(path, 0o000)
|
||||
if sys.platform == "win32":
|
||||
# Once we drop PY2 we can use `os.getlogin()` instead.
|
||||
username = os.environ["USERNAME"]
|
||||
# Remove "Read Data/List Directory" permission for current user, but
|
||||
# leave everything else.
|
||||
args = ["icacls", path, "/deny", username + ":(RD)"]
|
||||
subprocess.check_call(args)
|
||||
|
||||
|
||||
def get_filelist(base):
|
||||
def join(dirpath, dirnames, filenames):
|
||||
relative_dirpath = os.path.relpath(dirpath, base)
|
||||
join_dirpath = partial(os.path.join, relative_dirpath)
|
||||
return chain(
|
||||
(join_dirpath(p) for p in dirnames),
|
||||
(join_dirpath(p) for p in filenames),
|
||||
)
|
||||
|
||||
return set(chain.from_iterable(
|
||||
join(*dirinfo) for dirinfo in os.walk(base)
|
||||
))
|
|
@ -2,6 +2,7 @@ import errno
|
|||
import logging
|
||||
import os
|
||||
import time
|
||||
from threading import Thread
|
||||
|
||||
import pytest
|
||||
from mock import patch
|
||||
|
@ -11,6 +12,7 @@ from pip._internal.utils.logging import (
|
|||
BrokenStdoutLoggingError,
|
||||
ColorizedStreamHandler,
|
||||
IndentingFormatter,
|
||||
indent_log,
|
||||
)
|
||||
from pip._internal.utils.misc import captured_stderr, captured_stdout
|
||||
|
||||
|
@ -108,6 +110,39 @@ class TestIndentingFormatter(object):
|
|||
f = IndentingFormatter(fmt="%(message)s")
|
||||
assert f.format(record) == expected
|
||||
|
||||
def test_thread_safety_base(self):
|
||||
record = self.make_record(
|
||||
'DEPRECATION: hello\nworld', level_name='WARNING',
|
||||
)
|
||||
f = IndentingFormatter(fmt="%(message)s")
|
||||
results = []
|
||||
|
||||
def thread_function():
|
||||
results.append(f.format(record))
|
||||
|
||||
thread_function()
|
||||
thread = Thread(target=thread_function)
|
||||
thread.start()
|
||||
thread.join()
|
||||
assert results[0] == results[1]
|
||||
|
||||
def test_thread_safety_indent_log(self):
|
||||
record = self.make_record(
|
||||
'DEPRECATION: hello\nworld', level_name='WARNING',
|
||||
)
|
||||
f = IndentingFormatter(fmt="%(message)s")
|
||||
results = []
|
||||
|
||||
def thread_function():
|
||||
with indent_log():
|
||||
results.append(f.format(record))
|
||||
|
||||
thread_function()
|
||||
thread = Thread(target=thread_function)
|
||||
thread.start()
|
||||
thread.join()
|
||||
assert results[0] == results[1]
|
||||
|
||||
|
||||
class TestColorizedStreamHandler(object):
|
||||
|
||||
|
|
|
@ -72,7 +72,7 @@ class TestPipSession:
|
|||
|
||||
assert not hasattr(session.adapters["http://"], "cache")
|
||||
|
||||
def test_insecure_host_adapter(self, tmpdir):
|
||||
def test_trusted_hosts_adapter(self, tmpdir):
|
||||
session = PipSession(
|
||||
cache=tmpdir.joinpath("test-cache"),
|
||||
trusted_hosts=["example.com"],
|
||||
|
@ -81,14 +81,14 @@ class TestPipSession:
|
|||
assert "https://example.com/" in session.adapters
|
||||
# Check that the "port wildcard" is present.
|
||||
assert "https://example.com:" in session.adapters
|
||||
# Check that the cache isn't enabled.
|
||||
assert not hasattr(session.adapters["https://example.com/"], "cache")
|
||||
# Check that the cache is enabled.
|
||||
assert hasattr(session.adapters["https://example.com/"], "cache")
|
||||
|
||||
def test_add_trusted_host(self):
|
||||
# Leave a gap to test how the ordering is affected.
|
||||
trusted_hosts = ['host1', 'host3']
|
||||
session = PipSession(trusted_hosts=trusted_hosts)
|
||||
insecure_adapter = session._insecure_adapter
|
||||
trusted_host_adapter = session._trusted_host_adapter
|
||||
prefix2 = 'https://host2/'
|
||||
prefix3 = 'https://host3/'
|
||||
prefix3_wildcard = 'https://host3:'
|
||||
|
@ -97,8 +97,8 @@ class TestPipSession:
|
|||
assert session.pip_trusted_origins == [
|
||||
('host1', None), ('host3', None)
|
||||
]
|
||||
assert session.adapters[prefix3] is insecure_adapter
|
||||
assert session.adapters[prefix3_wildcard] is insecure_adapter
|
||||
assert session.adapters[prefix3] is trusted_host_adapter
|
||||
assert session.adapters[prefix3_wildcard] is trusted_host_adapter
|
||||
|
||||
assert prefix2 not in session.adapters
|
||||
|
||||
|
@ -108,8 +108,8 @@ class TestPipSession:
|
|||
('host1', None), ('host3', None), ('host2', None)
|
||||
]
|
||||
# Check that prefix3 is still present.
|
||||
assert session.adapters[prefix3] is insecure_adapter
|
||||
assert session.adapters[prefix2] is insecure_adapter
|
||||
assert session.adapters[prefix3] is trusted_host_adapter
|
||||
assert session.adapters[prefix2] is trusted_host_adapter
|
||||
|
||||
# Test that adding the same host doesn't create a duplicate.
|
||||
session.add_trusted_host('host3')
|
||||
|
@ -123,7 +123,7 @@ class TestPipSession:
|
|||
('host1', None), ('host3', None),
|
||||
('host2', None), ('host4', 8080)
|
||||
]
|
||||
assert session.adapters[prefix4] is insecure_adapter
|
||||
assert session.adapters[prefix4] is trusted_host_adapter
|
||||
|
||||
def test_add_trusted_host__logging(self, caplog):
|
||||
"""
|
||||
|
|
|
@ -10,18 +10,9 @@ from pip._internal.exceptions import HashMismatch
|
|||
from pip._internal.models.link import Link
|
||||
from pip._internal.network.download import Downloader
|
||||
from pip._internal.network.session import PipSession
|
||||
from pip._internal.operations.prepare import (
|
||||
_copy_source_tree,
|
||||
_download_http_url,
|
||||
unpack_url,
|
||||
)
|
||||
from pip._internal.operations.prepare import _download_http_url, unpack_url
|
||||
from pip._internal.utils.hashes import Hashes
|
||||
from pip._internal.utils.urls import path_to_url
|
||||
from tests.lib.filesystem import (
|
||||
get_filelist,
|
||||
make_socket_file,
|
||||
make_unreadable_file,
|
||||
)
|
||||
from tests.lib.path import Path
|
||||
from tests.lib.requests_mocks import MockResponse
|
||||
|
||||
|
@ -101,76 +92,6 @@ def clean_project(tmpdir_factory, data):
|
|||
return new_project_dir
|
||||
|
||||
|
||||
def test_copy_source_tree(clean_project, tmpdir):
|
||||
target = tmpdir.joinpath("target")
|
||||
expected_files = get_filelist(clean_project)
|
||||
assert len(expected_files) == 3
|
||||
|
||||
_copy_source_tree(clean_project, target)
|
||||
|
||||
copied_files = get_filelist(target)
|
||||
assert expected_files == copied_files
|
||||
|
||||
|
||||
@pytest.mark.skipif("sys.platform == 'win32' or sys.version_info < (3,)")
|
||||
def test_copy_source_tree_with_socket(clean_project, tmpdir, caplog):
|
||||
target = tmpdir.joinpath("target")
|
||||
expected_files = get_filelist(clean_project)
|
||||
socket_path = str(clean_project.joinpath("aaa"))
|
||||
make_socket_file(socket_path)
|
||||
|
||||
_copy_source_tree(clean_project, target)
|
||||
|
||||
copied_files = get_filelist(target)
|
||||
assert expected_files == copied_files
|
||||
|
||||
# Warning should have been logged.
|
||||
assert len(caplog.records) == 1
|
||||
record = caplog.records[0]
|
||||
assert record.levelname == 'WARNING'
|
||||
assert socket_path in record.message
|
||||
|
||||
|
||||
@pytest.mark.skipif("sys.platform == 'win32' or sys.version_info < (3,)")
|
||||
def test_copy_source_tree_with_socket_fails_with_no_socket_error(
|
||||
clean_project, tmpdir
|
||||
):
|
||||
target = tmpdir.joinpath("target")
|
||||
expected_files = get_filelist(clean_project)
|
||||
make_socket_file(clean_project.joinpath("aaa"))
|
||||
unreadable_file = clean_project.joinpath("bbb")
|
||||
make_unreadable_file(unreadable_file)
|
||||
|
||||
with pytest.raises(shutil.Error) as e:
|
||||
_copy_source_tree(clean_project, target)
|
||||
|
||||
errored_files = [err[0] for err in e.value.args[0]]
|
||||
assert len(errored_files) == 1
|
||||
assert unreadable_file in errored_files
|
||||
|
||||
copied_files = get_filelist(target)
|
||||
# All files without errors should have been copied.
|
||||
assert expected_files == copied_files
|
||||
|
||||
|
||||
def test_copy_source_tree_with_unreadable_dir_fails(clean_project, tmpdir):
|
||||
target = tmpdir.joinpath("target")
|
||||
expected_files = get_filelist(clean_project)
|
||||
unreadable_file = clean_project.joinpath("bbb")
|
||||
make_unreadable_file(unreadable_file)
|
||||
|
||||
with pytest.raises(shutil.Error) as e:
|
||||
_copy_source_tree(clean_project, target)
|
||||
|
||||
errored_files = [err[0] for err in e.value.args[0]]
|
||||
assert len(errored_files) == 1
|
||||
assert unreadable_file in errored_files
|
||||
|
||||
copied_files = get_filelist(target)
|
||||
# All files without errors should have been copied.
|
||||
assert expected_files == copied_files
|
||||
|
||||
|
||||
class Test_unpack_url(object):
|
||||
|
||||
def prep(self, tmpdir, data):
|
||||
|
@ -214,40 +135,5 @@ class Test_unpack_url(object):
|
|||
unpack_url(dist_url, self.build_dir,
|
||||
downloader=self.no_downloader,
|
||||
download_dir=self.download_dir)
|
||||
assert os.path.isdir(os.path.join(self.build_dir, 'fspkg'))
|
||||
|
||||
|
||||
@pytest.mark.parametrize('exclude_dir', [
|
||||
'.nox',
|
||||
'.tox'
|
||||
])
|
||||
def test_unpack_url_excludes_expected_dirs(tmpdir, exclude_dir):
|
||||
src_dir = tmpdir / 'src'
|
||||
dst_dir = tmpdir / 'dst'
|
||||
src_included_file = src_dir.joinpath('file.txt')
|
||||
src_excluded_dir = src_dir.joinpath(exclude_dir)
|
||||
src_excluded_file = src_dir.joinpath(exclude_dir, 'file.txt')
|
||||
src_included_dir = src_dir.joinpath('subdir', exclude_dir)
|
||||
|
||||
# set up source directory
|
||||
src_excluded_dir.mkdir(parents=True)
|
||||
src_included_dir.mkdir(parents=True)
|
||||
src_included_file.touch()
|
||||
src_excluded_file.touch()
|
||||
|
||||
dst_included_file = dst_dir.joinpath('file.txt')
|
||||
dst_excluded_dir = dst_dir.joinpath(exclude_dir)
|
||||
dst_excluded_file = dst_dir.joinpath(exclude_dir, 'file.txt')
|
||||
dst_included_dir = dst_dir.joinpath('subdir', exclude_dir)
|
||||
|
||||
src_link = Link(path_to_url(src_dir))
|
||||
unpack_url(
|
||||
src_link,
|
||||
dst_dir,
|
||||
Mock(side_effect=AssertionError),
|
||||
download_dir=None
|
||||
)
|
||||
assert not os.path.isdir(dst_excluded_dir)
|
||||
assert not os.path.isfile(dst_excluded_file)
|
||||
assert os.path.isfile(dst_included_file)
|
||||
assert os.path.isdir(dst_included_dir)
|
||||
# test that nothing was copied to build_dir since we build in place
|
||||
assert not os.path.exists(os.path.join(self.build_dir, 'fspkg'))
|
||||
|
|
|
@ -401,10 +401,13 @@ class TestProcessLine(object):
|
|||
)
|
||||
assert list(finder.trusted_hosts) == ['host1', 'host2:8080']
|
||||
session = finder._link_collector.session
|
||||
assert session.adapters['https://host1/'] is session._insecure_adapter
|
||||
assert (
|
||||
session.adapters['https://host1/']
|
||||
is session._trusted_host_adapter
|
||||
)
|
||||
assert (
|
||||
session.adapters['https://host2:8080/']
|
||||
is session._insecure_adapter
|
||||
is session._trusted_host_adapter
|
||||
)
|
||||
|
||||
# Test the log message.
|
||||
|
|
|
@ -1,61 +0,0 @@
|
|||
import os
|
||||
import shutil
|
||||
|
||||
import pytest
|
||||
|
||||
from pip._internal.utils.filesystem import copy2_fixed, is_socket
|
||||
from tests.lib.filesystem import make_socket_file, make_unreadable_file
|
||||
from tests.lib.path import Path
|
||||
|
||||
|
||||
def make_file(path):
|
||||
Path(path).touch()
|
||||
|
||||
|
||||
def make_valid_symlink(path):
|
||||
target = path + "1"
|
||||
make_file(target)
|
||||
os.symlink(target, path)
|
||||
|
||||
|
||||
def make_broken_symlink(path):
|
||||
os.symlink("foo", path)
|
||||
|
||||
|
||||
def make_dir(path):
|
||||
os.mkdir(path)
|
||||
|
||||
|
||||
skip_on_windows = pytest.mark.skipif("sys.platform == 'win32'")
|
||||
|
||||
|
||||
@skip_on_windows
|
||||
@pytest.mark.parametrize("create,result", [
|
||||
(make_socket_file, True),
|
||||
(make_file, False),
|
||||
(make_valid_symlink, False),
|
||||
(make_broken_symlink, False),
|
||||
(make_dir, False),
|
||||
])
|
||||
def test_is_socket(create, result, tmpdir):
|
||||
target = tmpdir.joinpath("target")
|
||||
create(target)
|
||||
assert os.path.lexists(target)
|
||||
assert is_socket(target) == result
|
||||
|
||||
|
||||
@pytest.mark.parametrize("create,error_type", [
|
||||
pytest.param(
|
||||
make_socket_file, shutil.SpecialFileError, marks=skip_on_windows
|
||||
),
|
||||
(make_unreadable_file, OSError),
|
||||
])
|
||||
def test_copy2_fixed_raises_appropriate_errors(create, error_type, tmpdir):
|
||||
src = tmpdir.joinpath("src")
|
||||
create(src)
|
||||
dest = tmpdir.joinpath("dest")
|
||||
|
||||
with pytest.raises(error_type):
|
||||
copy2_fixed(src, dest)
|
||||
|
||||
assert not dest.exists()
|
Loading…
Reference in New Issue