1
1
Fork 0
mirror of https://github.com/pypa/pip synced 2023-12-13 21:30:23 +01:00

Merge branch 'master' into refactor_legacy_install

This commit is contained in:
Paul Moore 2020-02-07 14:29:46 +00:00
commit 1aa27c4595
20 changed files with 200 additions and 264 deletions

View file

@ -15,7 +15,7 @@ from pip._vendor.packaging.utils import canonicalize_name
from pip._internal.exceptions import InvalidWheelFilename
from pip._internal.models.link import Link
from pip._internal.models.wheel import Wheel
from pip._internal.utils.temp_dir import TempDirectory
from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
from pip._internal.utils.urls import path_to_url
@ -171,10 +171,6 @@ class Cache(object):
"""
raise NotImplementedError()
def cleanup(self):
# type: () -> None
pass
class SimpleWheelCache(Cache):
"""A cache of wheels for future installs.
@ -264,16 +260,15 @@ class EphemWheelCache(SimpleWheelCache):
def __init__(self, format_control):
# type: (FormatControl) -> None
self._temp_dir = TempDirectory(kind="ephem-wheel-cache")
self._temp_dir = TempDirectory(
kind=tempdir_kinds.EPHEM_WHEEL_CACHE,
globally_managed=True,
)
super(EphemWheelCache, self).__init__(
self._temp_dir.path, format_control
)
def cleanup(self):
# type: () -> None
self._temp_dir.cleanup()
class WheelCache(Cache):
"""Wraps EphemWheelCache and SimpleWheelCache into a single Cache
@ -322,8 +317,3 @@ class WheelCache(Cache):
package_name=package_name,
supported_tags=supported_tags,
)
def cleanup(self):
# type: () -> None
self._wheel_cache.cleanup()
self._ephem_cache.cleanup()

View file

@ -25,6 +25,7 @@ from pip._internal.req.constructors import (
install_req_from_req_string,
)
from pip._internal.req.req_file import parse_requirements
from pip._internal.req.req_set import RequirementSet
from pip._internal.self_outdated_check import (
make_link_collector,
pip_self_version_check,
@ -38,7 +39,7 @@ if MYPY_CHECK_RUNNING:
from pip._internal.cache import WheelCache
from pip._internal.models.target_python import TargetPython
from pip._internal.req.req_set import RequirementSet
from pip._internal.req.req_install import InstallRequirement
from pip._internal.req.req_tracker import RequirementTracker
from pip._internal.utils.temp_dir import (
TempDirectory,
@ -156,7 +157,11 @@ class IndexGroupCommand(Command, SessionCommandMixin):
pip_self_version_check(session, options)
KEEPABLE_TEMPDIR_TYPES = [tempdir_kinds.BUILD_ENV, tempdir_kinds.REQ_BUILD]
KEEPABLE_TEMPDIR_TYPES = [
tempdir_kinds.BUILD_ENV,
tempdir_kinds.EPHEM_WHEEL_CACHE,
tempdir_kinds.REQ_BUILD,
]
def with_cleanup(func):
@ -265,19 +270,22 @@ class RequirementCommand(IndexGroupCommand):
py_version_info=py_version_info,
)
def populate_requirement_set(
def get_requirements(
self,
requirement_set, # type: RequirementSet
args, # type: List[str]
options, # type: Values
finder, # type: PackageFinder
session, # type: PipSession
wheel_cache, # type: Optional[WheelCache]
check_supported_wheels=True, # type: bool
):
# type: (...) -> None
# type: (...) -> List[InstallRequirement]
"""
Marshal cmd line args into a requirement set.
Parse command-line arguments into the corresponding requirements.
"""
requirement_set = RequirementSet(
check_supported_wheels=check_supported_wheels
)
for filename in options.constraints:
for req_to_add in parse_requirements(
filename,
@ -316,10 +324,7 @@ class RequirementCommand(IndexGroupCommand):
requirement_set.add_requirement(req_to_add)
# If any requirement has hash options, enable hash checking.
requirements = (
requirement_set.unnamed_requirements +
list(requirement_set.requirements.values())
)
requirements = requirement_set.all_requirements
if any(req.has_hash_options for req in requirements):
options.require_hashes = True
@ -335,6 +340,8 @@ class RequirementCommand(IndexGroupCommand):
'You must give at least one requirement to %(name)s '
'(see "pip help %(name)s")' % opts)
return requirements
@staticmethod
def trace_basic_info(finder):
# type: (PackageFinder) -> None

View file

@ -9,7 +9,6 @@ import os
from pip._internal.cli import cmdoptions
from pip._internal.cli.cmdoptions import make_target_python
from pip._internal.cli.req_command import RequirementCommand, with_cleanup
from pip._internal.req import RequirementSet
from pip._internal.req.req_tracker import get_requirement_tracker
from pip._internal.utils.misc import ensure_dir, normalize_path, write_output
from pip._internal.utils.temp_dir import TempDirectory
@ -102,10 +101,7 @@ class DownloadCommand(RequirementCommand):
with get_requirement_tracker() as req_tracker, TempDirectory(
options.build_dir, delete=build_delete, kind="download"
) as directory:
requirement_set = RequirementSet()
self.populate_requirement_set(
requirement_set,
reqs = self.get_requirements(
args,
options,
finder,
@ -132,7 +128,9 @@ class DownloadCommand(RequirementCommand):
self.trace_basic_info(finder)
resolver.resolve(requirement_set)
requirement_set = resolver.resolve(
reqs, check_supported_wheels=True
)
downloaded = ' '.join([
req.name for req in requirement_set.successfully_downloaded
@ -140,8 +138,4 @@ class DownloadCommand(RequirementCommand):
if downloaded:
write_output('Successfully downloaded %s', downloaded)
# Clean up
if not options.no_clean:
requirement_set.cleanup_files()
return requirement_set

View file

@ -96,8 +96,5 @@ class FreezeCommand(Command):
exclude_editable=options.exclude_editable,
)
try:
for line in freeze(**freeze_kwargs):
sys.stdout.write(line + '\n')
finally:
wheel_cache.cleanup()
for line in freeze(**freeze_kwargs):
sys.stdout.write(line + '\n')

View file

@ -30,7 +30,7 @@ from pip._internal.exceptions import (
)
from pip._internal.locations import distutils_scheme
from pip._internal.operations.check import check_install_conflicts
from pip._internal.req import RequirementSet, install_given_reqs
from pip._internal.req import install_given_reqs
from pip._internal.req.req_tracker import get_requirement_tracker
from pip._internal.utils.deprecation import deprecated
from pip._internal.utils.distutils_args import parse_distutils_args
@ -291,18 +291,14 @@ class InstallCommand(RequirementCommand):
with get_requirement_tracker() as req_tracker, TempDirectory(
options.build_dir, delete=build_delete, kind="install"
) as directory:
requirement_set = RequirementSet(
check_supported_wheels=not options.target_dir,
)
try:
self.populate_requirement_set(
requirement_set, args, options, finder, session,
wheel_cache
reqs = self.get_requirements(
args, options, finder, session,
wheel_cache, check_supported_wheels=not options.target_dir,
)
warn_deprecated_install_options(
requirement_set, options.install_options
reqs, options.install_options
)
preparer = self.make_requirement_preparer(
@ -328,7 +324,9 @@ class InstallCommand(RequirementCommand):
self.trace_basic_info(finder)
resolver.resolve(requirement_set)
requirement_set = resolver.resolve(
reqs, check_supported_wheels=not options.target_dir
)
try:
pip_req = requirement_set.get_requirement("pip")
@ -443,11 +441,6 @@ class InstallCommand(RequirementCommand):
except PreviousBuildDirError:
options.no_clean = True
raise
finally:
# Clean up
if not options.no_clean:
requirement_set.cleanup_files()
wheel_cache.cleanup()
if options.target_dir:
self._handle_target_dir(
@ -610,8 +603,8 @@ def decide_user_install(
return True
def warn_deprecated_install_options(requirement_set, options):
# type: (RequirementSet, Optional[List[str]]) -> None
def warn_deprecated_install_options(requirements, options):
# type: (List[InstallRequirement], Optional[List[str]]) -> None
"""If any location-changing --install-option arguments were passed for
requirements or on the command-line, then show a deprecation warning.
"""
@ -619,15 +612,10 @@ def warn_deprecated_install_options(requirement_set, options):
# type: (Iterable[str]) -> List[str]
return ["--{}".format(name.replace("_", "-")) for name in option_names]
requirements = (
requirement_set.unnamed_requirements +
list(requirement_set.requirements.values())
)
offenders = []
for requirement in requirements:
install_options = requirement.options.get("install_options", [])
install_options = requirement.install_options
location_options = parse_distutils_args(install_options)
if location_options:
offenders.append(

View file

@ -13,7 +13,6 @@ from pip._internal.cache import WheelCache
from pip._internal.cli import cmdoptions
from pip._internal.cli.req_command import RequirementCommand, with_cleanup
from pip._internal.exceptions import CommandError, PreviousBuildDirError
from pip._internal.req import RequirementSet
from pip._internal.req.req_tracker import get_requirement_tracker
from pip._internal.utils.misc import ensure_dir, normalize_path
from pip._internal.utils.temp_dir import TempDirectory
@ -128,12 +127,9 @@ class WheelCommand(RequirementCommand):
with get_requirement_tracker() as req_tracker, TempDirectory(
options.build_dir, delete=build_delete, kind="wheel"
) as directory:
requirement_set = RequirementSet()
try:
self.populate_requirement_set(
requirement_set, args, options, finder, session,
reqs = self.get_requirements(
args, options, finder, session,
wheel_cache
)
@ -158,7 +154,9 @@ class WheelCommand(RequirementCommand):
self.trace_basic_info(finder)
resolver.resolve(requirement_set)
requirement_set = resolver.resolve(
reqs, check_supported_wheels=True
)
reqs_to_build = [
r for r in requirement_set.requirements.values()
@ -191,7 +189,3 @@ class WheelCommand(RequirementCommand):
except PreviousBuildDirError:
options.no_clean = True
raise
finally:
if not options.no_clean:
requirement_set.cleanup_files()
wheel_cache.cleanup()

View file

@ -28,6 +28,7 @@ from pip._internal.exceptions import (
HashErrors,
UnsupportedPythonVersion,
)
from pip._internal.req.req_set import RequirementSet
from pip._internal.utils.logging import indent_log
from pip._internal.utils.misc import dist_in_usersite, normalize_version_info
from pip._internal.utils.packaging import (
@ -44,7 +45,6 @@ if MYPY_CHECK_RUNNING:
from pip._internal.index.package_finder import PackageFinder
from pip._internal.operations.prepare import RequirementPreparer
from pip._internal.req.req_install import InstallRequirement
from pip._internal.req.req_set import RequirementSet
InstallRequirementProvider = Callable[
[str, InstallRequirement], InstallRequirement
@ -147,8 +147,8 @@ class Resolver(object):
self._discovered_dependencies = \
defaultdict(list) # type: DiscoveredDependencies
def resolve(self, requirement_set):
# type: (RequirementSet) -> None
def resolve(self, root_reqs, check_supported_wheels):
# type: (List[InstallRequirement], bool) -> RequirementSet
"""Resolve what operations need to be done
As a side-effect of this method, the packages (and their dependencies)
@ -159,12 +159,11 @@ class Resolver(object):
possible to move the preparation to become a step separated from
dependency resolution.
"""
# If any top-level requirement has a hash specified, enter
# hash-checking mode, which requires hashes from all.
root_reqs = (
requirement_set.unnamed_requirements +
list(requirement_set.requirements.values())
requirement_set = RequirementSet(
check_supported_wheels=check_supported_wheels
)
for req in root_reqs:
requirement_set.add_requirement(req)
# Actually prepare the files, and collect any exceptions. Most hash
# exceptions cannot be checked ahead of time, because
@ -182,6 +181,8 @@ class Resolver(object):
if hash_errors:
raise hash_errors
return requirement_set
def _is_upgrade_allowed(self, req):
# type: (InstallRequirement) -> bool
if self.upgrade_strategy == "to-satisfy-only":
@ -327,9 +328,6 @@ class Resolver(object):
req_to_install.prepared = True
# register tmp src for cleanup in case something goes wrong
requirement_set.reqs_to_cleanup.append(req_to_install)
abstract_dist = self._get_abstract_dist_for(req_to_install)
# Parse and return dependencies

View file

@ -28,7 +28,6 @@ from pip._internal.exceptions import (
from pip._internal.utils.filesystem import copy2_fixed
from pip._internal.utils.hashes import MissingHashes
from pip._internal.utils.logging import indent_log
from pip._internal.utils.marker_files import write_delete_marker_file
from pip._internal.utils.misc import (
ask_path_exists,
backup_dir,
@ -134,14 +133,20 @@ def _copy_file(filename, location, link):
logger.info('Saved %s', display_path(download_location))
def unpack_http_url(
class File(object):
def __init__(self, path, content_type):
# type: (str, str) -> None
self.path = path
self.content_type = content_type
def get_http_url(
link, # type: Link
location, # type: str
downloader, # type: Downloader
download_dir=None, # type: Optional[str]
hashes=None, # type: Optional[Hashes]
):
# type: (...) -> str
# type: (...) -> File
temp_dir = TempDirectory(kind="unpack", globally_managed=True)
# If a download dir is specified, is the file already downloaded there?
already_downloaded_path = None
@ -159,11 +164,7 @@ def unpack_http_url(
link, downloader, temp_dir.path, hashes
)
# unpack the archive to the build dir location. even when only
# downloading archives, they have to be unpacked to parse dependencies
unpack_file(from_path, location, content_type)
return from_path
return File(from_path, content_type)
def _copy2_ignoring_special_files(src, dest):
@ -207,23 +208,14 @@ def _copy_source_tree(source, target):
shutil.copytree(source, target, **kwargs)
def unpack_file_url(
def get_file_url(
link, # type: Link
location, # type: str
download_dir=None, # type: Optional[str]
hashes=None # type: Optional[Hashes]
):
# type: (...) -> Optional[str]
"""Unpack link into location.
# type: (...) -> File
"""Get file and optionally check its hash.
"""
link_path = link.file_path
# If it's a url to a local directory
if link.is_existing_dir():
if os.path.isdir(location):
rmtree(location)
_copy_source_tree(link_path, location)
return None
# If a download dir is specified, is the file already there and valid?
already_downloaded_path = None
if download_dir:
@ -234,7 +226,7 @@ def unpack_file_url(
if already_downloaded_path:
from_path = already_downloaded_path
else:
from_path = link_path
from_path = link.file_path
# If --require-hashes is off, `hashes` is either empty, the
# link's embedded hash, or MissingHashes; it is required to
@ -246,11 +238,7 @@ def unpack_file_url(
content_type = mimetypes.guess_type(from_path)[0]
# unpack the archive to the build dir location. even when only downloading
# archives, they have to be unpacked to parse dependencies
unpack_file(from_path, location, content_type)
return from_path
return File(from_path, content_type)
def unpack_url(
@ -260,7 +248,7 @@ def unpack_url(
download_dir=None, # type: Optional[str]
hashes=None, # type: Optional[Hashes]
):
# type: (...) -> Optional[str]
# type: (...) -> Optional[File]
"""Unpack link into location, downloading if required.
:param hashes: A Hashes object, one of whose embedded hashes must match,
@ -273,20 +261,32 @@ def unpack_url(
unpack_vcs_link(link, location)
return None
# If it's a url to a local directory
if link.is_existing_dir():
if os.path.isdir(location):
rmtree(location)
_copy_source_tree(link.file_path, location)
return None
# file urls
elif link.is_file:
return unpack_file_url(link, location, download_dir, hashes=hashes)
if link.is_file:
file = get_file_url(link, download_dir, hashes=hashes)
# http urls
else:
return unpack_http_url(
file = get_http_url(
link,
location,
downloader,
download_dir,
hashes=hashes,
)
# unpack the archive to the build dir location. even when only downloading
# archives, they have to be unpacked to parse dependencies
unpack_file(file.path, location, file.content_type)
return file
def _download_http_url(
link, # type: Link
@ -415,14 +415,29 @@ class RequirementPreparer(object):
else:
logger.info('Collecting %s', req.req or req)
download_dir = self.download_dir
if link.is_wheel and self.wheel_download_dir:
# when doing 'pip wheel` we download wheels to a
# dedicated dir.
download_dir = self.wheel_download_dir
if link.is_wheel:
if download_dir:
# When downloading, we only unpack wheels to get
# metadata.
autodelete_unpacked = True
else:
# When installing a wheel, we use the unpacked
# wheel.
autodelete_unpacked = False
else:
# We always delete unpacked sdists after pip runs.
autodelete_unpacked = True
with indent_log():
# @@ if filesystem packages are not marked
# editable in a req, a non deterministic error
# occurs when the script attempts to unpack the
# build directory
# Since source_dir is only set for editable requirements.
assert req.source_dir is None
req.ensure_has_source_dir(self.build_dir)
req.ensure_has_source_dir(self.build_dir, autodelete_unpacked)
# If a checkout exists, it's unwise to keep going. version
# inconsistencies are logged later, but do not fail the
# installation.
@ -470,14 +485,8 @@ class RequirementPreparer(object):
# showing the user what the hash should be.
hashes = MissingHashes()
download_dir = self.download_dir
if link.is_wheel and self.wheel_download_dir:
# when doing 'pip wheel` we download wheels to a
# dedicated dir.
download_dir = self.wheel_download_dir
try:
local_path = unpack_url(
local_file = unpack_url(
link, req.source_dir, self.downloader, download_dir,
hashes=hashes,
)
@ -494,23 +503,8 @@ class RequirementPreparer(object):
# For use in later processing, preserve the file path on the
# requirement.
if local_path:
req.local_file_path = local_path
if link.is_wheel:
if download_dir:
# When downloading, we only unpack wheels to get
# metadata.
autodelete_unpacked = True
else:
# When installing a wheel, we use the unpacked
# wheel.
autodelete_unpacked = False
else:
# We always delete unpacked sdists after pip runs.
autodelete_unpacked = True
if autodelete_unpacked:
write_delete_marker_file(req.source_dir)
if local_file:
req.local_file_path = local_file.path
abstract_dist = _get_prepared_distribution(
req, self.req_tracker, self.finder, self.build_isolation,
@ -519,10 +513,10 @@ class RequirementPreparer(object):
if download_dir:
if link.is_existing_dir():
logger.info('Link is a directory, ignoring download_dir')
elif local_path and not os.path.exists(
elif local_file and not os.path.exists(
os.path.join(download_dir, link.filename)
):
_copy_file(local_path, download_dir, link)
_copy_file(local_file.path, download_dir, link)
if self._download_should_save:
# Make a .zip of the source_dir we already created.

View file

@ -238,7 +238,9 @@ def install_req_from_editable(
constraint=constraint,
use_pep517=use_pep517,
isolated=isolated,
options=options if options else {},
install_options=options.get("install_options", []) if options else [],
global_options=options.get("global_options", []) if options else [],
hash_options=options.get("hashes", {}) if options else {},
wheel_cache=wheel_cache,
extras=parts.extras,
)
@ -400,7 +402,9 @@ def install_req_from_line(
return InstallRequirement(
parts.requirement, comes_from, link=parts.link, markers=parts.markers,
use_pep517=use_pep517, isolated=isolated,
options=options if options else {},
install_options=options.get("install_options", []) if options else [],
global_options=options.get("global_options", []) if options else [],
hash_options=options.get("hashes", {}) if options else {},
wheel_cache=wheel_cache,
constraint=constraint,
extras=parts.extras,

View file

@ -34,10 +34,6 @@ from pip._internal.req.req_uninstall import UninstallPathSet
from pip._internal.utils.deprecation import deprecated
from pip._internal.utils.hashes import Hashes
from pip._internal.utils.logging import indent_log
from pip._internal.utils.marker_files import (
PIP_DELETE_MARKER_FILENAME,
has_delete_marker_file,
)
from pip._internal.utils.misc import (
ask_path_exists,
backup_dir,
@ -47,7 +43,6 @@ from pip._internal.utils.misc import (
get_installed_version,
hide_url,
redact_auth_from_url,
rmtree,
)
from pip._internal.utils.packaging import get_metadata
from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
@ -113,7 +108,9 @@ class InstallRequirement(object):
markers=None, # type: Optional[Marker]
use_pep517=None, # type: Optional[bool]
isolated=False, # type: bool
options=None, # type: Optional[Dict[str, Any]]
install_options=None, # type: Optional[List[str]]
global_options=None, # type: Optional[List[str]]
hash_options=None, # type: Optional[Dict[str, List[str]]]
wheel_cache=None, # type: Optional[WheelCache]
constraint=False, # type: bool
extras=() # type: Iterable[str]
@ -161,7 +158,10 @@ class InstallRequirement(object):
self._temp_build_dir = None # type: Optional[TempDirectory]
# Set to True after successful installation
self.install_succeeded = None # type: Optional[bool]
self.options = options if options else {}
# Supplied options
self.install_options = install_options if install_options else []
self.global_options = global_options if global_options else []
self.hash_options = hash_options if hash_options else {}
# Set to True after successful preparation of this requirement
self.prepared = False
self.is_direct = False
@ -309,7 +309,7 @@ class InstallRequirement(object):
URL do not.
"""
return bool(self.options.get('hashes', {}))
return bool(self.hash_options)
def hashes(self, trust_internet=True):
# type: (bool) -> Hashes
@ -327,7 +327,7 @@ class InstallRequirement(object):
downloaded from the internet, as by populate_link()
"""
good_hashes = self.options.get('hashes', {}).copy()
good_hashes = self.hash_options.copy()
link = self.link if trust_internet else self.original_link
if link and link.hash:
good_hashes.setdefault(link.hash_name, []).append(link.hash)
@ -349,8 +349,8 @@ class InstallRequirement(object):
s += '->' + comes_from
return s
def ensure_build_location(self, build_dir):
# type: (str) -> str
def ensure_build_location(self, build_dir, autodelete):
# type: (str, bool) -> str
assert build_dir is not None
if self._temp_build_dir is not None:
assert self._temp_build_dir.path
@ -373,7 +373,16 @@ class InstallRequirement(object):
if not os.path.exists(build_dir):
logger.debug('Creating directory %s', build_dir)
os.makedirs(build_dir)
return os.path.join(build_dir, name)
actual_build_dir = os.path.join(build_dir, name)
# `None` indicates that we respect the globally-configured deletion
# settings, which is what we actually want when auto-deleting.
delete_arg = None if autodelete else False
return TempDirectory(
path=actual_build_dir,
delete=delete_arg,
kind=tempdir_kinds.REQ_BUILD,
globally_managed=True,
).path
def _set_requirement(self):
# type: () -> None
@ -413,16 +422,6 @@ class InstallRequirement(object):
)
self.req = Requirement(metadata_name)
def remove_temporary_source(self):
# type: () -> None
"""Remove the source files from this requirement, if they are marked
for deletion"""
if self.source_dir and has_delete_marker_file(self.source_dir):
logger.debug('Removing source in %s', self.source_dir)
rmtree(self.source_dir)
self.source_dir = None
self._temp_build_dir = None
def check_if_exists(self, use_user_site):
# type: (bool) -> None
"""Find an installed distribution that satisfies or conflicts
@ -600,8 +599,8 @@ class InstallRequirement(object):
)
# For both source distributions and editables
def ensure_has_source_dir(self, parent_dir):
# type: (str) -> None
def ensure_has_source_dir(self, parent_dir, autodelete=False):
# type: (str, bool) -> None
"""Ensure that a source_dir is set.
This will create a temporary build dir if the name of the requirement
@ -612,7 +611,9 @@ class InstallRequirement(object):
:return: self.source_dir
"""
if self.source_dir is None:
self.source_dir = self.ensure_build_location(parent_dir)
self.source_dir = self.ensure_build_location(
parent_dir, autodelete
)
# For editable installations
def update_editable(self, obtain=True):
@ -756,8 +757,6 @@ class InstallRequirement(object):
zipdir.external_attr = 0x1ED << 16 # 0o755
zip_output.writestr(zipdir, '')
for filename in filenames:
if filename == PIP_DELETE_MARKER_FILENAME:
continue
file_arcname = self._get_archive_name(
filename, parentdir=dirpath, rootdir=dir,
)
@ -824,10 +823,8 @@ class InstallRequirement(object):
# Options specified in requirements file override those
# specified on the command line, since the last option given
# to setup.py is the one that is used.
global_options = list(global_options) + \
self.options.get('global_options', [])
install_options = list(install_options) + \
self.options.get('install_options', [])
global_options = list(global_options) + self.global_options
install_options = list(install_options) + self.install_options
try:
success = install_legacy(

View file

@ -11,7 +11,6 @@ from pip._vendor.packaging.utils import canonicalize_name
from pip._internal import pep425tags
from pip._internal.exceptions import InstallationError
from pip._internal.models.wheel import Wheel
from pip._internal.utils.logging import indent_log
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
if MYPY_CHECK_RUNNING:
@ -34,7 +33,6 @@ class RequirementSet(object):
self.unnamed_requirements = [] # type: List[InstallRequirement]
self.successfully_downloaded = [] # type: List[InstallRequirement]
self.reqs_to_cleanup = [] # type: List[InstallRequirement]
def __str__(self):
# type: () -> str
@ -162,7 +160,6 @@ class RequirementSet(object):
)
)
if does_not_satisfy_constraint:
self.reqs_to_cleanup.append(install_req)
raise InstallationError(
"Could not satisfy constraints for '{}': "
"installation from path or url cannot be "
@ -200,10 +197,7 @@ class RequirementSet(object):
raise KeyError("No project with the name %r" % name)
def cleanup_files(self):
# type: () -> None
"""Clean up files, remove builds."""
logger.debug('Cleaning up...')
with indent_log():
for req in self.reqs_to_cleanup:
req.remove_temporary_source()
@property
def all_requirements(self):
# type: () -> List[InstallRequirement]
return self.unnamed_requirements + list(self.requirements.values())

View file

@ -1,25 +0,0 @@
import os.path
DELETE_MARKER_MESSAGE = '''\
This file is placed here by pip to indicate the source was put
here by pip.
Once this package is successfully installed this source code will be
deleted (unless you remove this file).
'''
PIP_DELETE_MARKER_FILENAME = 'pip-delete-this-directory.txt'
def has_delete_marker_file(directory):
# type: (str) -> bool
return os.path.exists(os.path.join(directory, PIP_DELETE_MARKER_FILENAME))
def write_delete_marker_file(directory):
# type: (str) -> None
"""
Write the pip delete marker file into this directory.
"""
filepath = os.path.join(directory, PIP_DELETE_MARKER_FILENAME)
with open(filepath, 'w') as marker_fp:
marker_fp.write(DELETE_MARKER_MESSAGE)

View file

@ -25,6 +25,7 @@ logger = logging.getLogger(__name__)
# globally-managed.
tempdir_kinds = enum(
BUILD_ENV="build-env",
EPHEM_WHEEL_CACHE="ephem-wheel-cache",
REQ_BUILD="req-build",
)

View file

@ -4,7 +4,6 @@ from os.path import exists
import pytest
from pip._internal.cli.status_codes import PREVIOUS_BUILD_DIR_ERROR
from pip._internal.utils.marker_files import write_delete_marker_file
from tests.lib import need_mercurial, windows_workaround_7667
from tests.lib.local_repos import local_checkout
@ -126,7 +125,6 @@ def test_cleanup_prevented_upon_build_dir_exception(script, data):
build = script.venv_path / 'build'
build_simple = build / 'simple'
os.makedirs(build_simple)
write_delete_marker_file(build_simple)
build_simple.joinpath("setup.py").write_text("#")
result = script.pip(
'install', '-f', data.find_links, '--no-index', 'simple',

View file

@ -6,7 +6,6 @@ from os.path import exists
import pytest
from pip._internal.cli.status_codes import ERROR, PREVIOUS_BUILD_DIR_ERROR
from pip._internal.utils.marker_files import write_delete_marker_file
from tests.lib import pyversion
@ -225,7 +224,6 @@ def test_pip_wheel_fail_cause_of_previous_build_dir(script, data):
# Given that I have a previous build dir of the `simple` package
build = script.venv_path / 'build' / 'simple'
os.makedirs(build)
write_delete_marker_file(script.venv_path / 'build' / 'simple')
build.joinpath('setup.py').write_text('#')
# When I call pip trying to install things again

View file

@ -10,7 +10,6 @@ from pip._internal.commands.install import (
warn_deprecated_install_options,
)
from pip._internal.req.req_install import InstallRequirement
from pip._internal.req.req_set import RequirementSet
class TestDecideUserInstall:
@ -47,8 +46,7 @@ class TestDecideUserInstall:
def test_deprecation_notice_for_pip_install_options(recwarn):
install_options = ["--prefix=/hello"]
req_set = RequirementSet()
warn_deprecated_install_options(req_set, install_options)
warn_deprecated_install_options([], install_options)
assert len(recwarn) == 1
message = recwarn[0].message.args[0]
@ -57,21 +55,21 @@ def test_deprecation_notice_for_pip_install_options(recwarn):
def test_deprecation_notice_for_requirement_options(recwarn):
install_options = []
req_set = RequirementSet()
bad_named_req_options = {"install_options": ["--home=/wow"]}
bad_named_req_options = ["--home=/wow"]
bad_named_req = InstallRequirement(
Requirement("hello"), "requirements.txt", options=bad_named_req_options
Requirement("hello"), "requirements.txt",
install_options=bad_named_req_options
)
req_set.add_named_requirement(bad_named_req)
bad_unnamed_req_options = {"install_options": ["--install-lib=/lib"]}
bad_unnamed_req_options = ["--install-lib=/lib"]
bad_unnamed_req = InstallRequirement(
None, "requirements2.txt", options=bad_unnamed_req_options
None, "requirements2.txt", install_options=bad_unnamed_req_options
)
req_set.add_unnamed_requirement(bad_unnamed_req)
warn_deprecated_install_options(req_set, install_options)
warn_deprecated_install_options(
[bad_named_req, bad_unnamed_req], install_options
)
assert len(recwarn) == 1
message = recwarn[0].message.args[0]

View file

@ -13,8 +13,7 @@ from pip._internal.network.session import PipSession
from pip._internal.operations.prepare import (
_copy_source_tree,
_download_http_url,
unpack_file_url,
unpack_http_url,
unpack_url,
)
from pip._internal.utils.hashes import Hashes
from pip._internal.utils.urls import path_to_url
@ -27,7 +26,7 @@ from tests.lib.path import Path
from tests.lib.requests_mocks import MockResponse
def test_unpack_http_url_with_urllib_response_without_content_type(data):
def test_unpack_url_with_urllib_response_without_content_type(data):
"""
It should download and unpack files even if no Content-Type header exists
"""
@ -46,7 +45,7 @@ def test_unpack_http_url_with_urllib_response_without_content_type(data):
link = Link(uri)
temp_dir = mkdtemp()
try:
unpack_http_url(
unpack_url(
link,
temp_dir,
downloader=downloader,
@ -172,7 +171,7 @@ def test_copy_source_tree_with_unreadable_dir_fails(clean_project, tmpdir):
assert expected_files == copied_files
class Test_unpack_file_url(object):
class Test_unpack_url(object):
def prep(self, tmpdir, data):
self.build_dir = tmpdir.joinpath('build')
@ -185,16 +184,17 @@ class Test_unpack_file_url(object):
self.dist_path2 = data.packages.joinpath(self.dist_file2)
self.dist_url = Link(path_to_url(self.dist_path))
self.dist_url2 = Link(path_to_url(self.dist_path2))
self.no_downloader = Mock(side_effect=AssertionError)
def test_unpack_file_url_no_download(self, tmpdir, data):
def test_unpack_url_no_download(self, tmpdir, data):
self.prep(tmpdir, data)
unpack_file_url(self.dist_url, self.build_dir)
unpack_url(self.dist_url, self.build_dir, self.no_downloader)
assert os.path.isdir(os.path.join(self.build_dir, 'simple'))
assert not os.path.isfile(
os.path.join(self.download_dir, self.dist_file))
def test_unpack_file_url_bad_hash(self, tmpdir, data,
monkeypatch):
def test_unpack_url_bad_hash(self, tmpdir, data,
monkeypatch):
"""
Test when the file url hash fragment is wrong
"""
@ -202,16 +202,18 @@ class Test_unpack_file_url(object):
url = '{}#md5=bogus'.format(self.dist_url.url)
dist_url = Link(url)
with pytest.raises(HashMismatch):
unpack_file_url(dist_url,
self.build_dir,
hashes=Hashes({'md5': ['bogus']}))
unpack_url(dist_url,
self.build_dir,
downloader=self.no_downloader,
hashes=Hashes({'md5': ['bogus']}))
def test_unpack_file_url_thats_a_dir(self, tmpdir, data):
def test_unpack_url_thats_a_dir(self, tmpdir, data):
self.prep(tmpdir, data)
dist_path = data.packages.joinpath("FSPkg")
dist_url = Link(path_to_url(dist_path))
unpack_file_url(dist_url, self.build_dir,
download_dir=self.download_dir)
unpack_url(dist_url, self.build_dir,
downloader=self.no_downloader,
download_dir=self.download_dir)
assert os.path.isdir(os.path.join(self.build_dir, 'fspkg'))
@ -219,7 +221,7 @@ class Test_unpack_file_url(object):
'.nox',
'.tox'
])
def test_unpack_file_url_excludes_expected_dirs(tmpdir, exclude_dir):
def test_unpack_url_excludes_expected_dirs(tmpdir, exclude_dir):
src_dir = tmpdir / 'src'
dst_dir = tmpdir / 'dst'
src_included_file = src_dir.joinpath('file.txt')
@ -239,9 +241,10 @@ def test_unpack_file_url_excludes_expected_dirs(tmpdir, exclude_dir):
dst_included_dir = dst_dir.joinpath('subdir', exclude_dir)
src_link = Link(path_to_url(src_dir))
unpack_file_url(
unpack_url(
src_link,
dst_dir,
Mock(side_effect=AssertionError),
download_dir=None
)
assert not os.path.isdir(dst_excluded_dir)

View file

@ -112,7 +112,8 @@ class TestRequirementSet(object):
r"pip can't proceed with [\s\S]*%s[\s\S]*%s" %
(req, build_dir.replace('\\', '\\\\')),
resolver.resolve,
reqset,
reqset.all_requirements,
True,
)
# TODO: Update test when Python 2.7 is dropped.
@ -129,7 +130,7 @@ class TestRequirementSet(object):
reqset.add_requirement(req)
finder = make_test_finder(find_links=[data.find_links])
with self._basic_resolver(finder) as resolver:
resolver.resolve(reqset)
reqset = resolver.resolve(reqset.all_requirements, True)
# This is hacky but does test both case in py2 and py3
if sys.version_info[:2] == (2, 7):
assert reqset.has_requirement('simple')
@ -155,21 +156,21 @@ class TestRequirementSet(object):
r' simple==1.0 --hash=sha256:393043e672415891885c9a2a0929b1'
r'af95fb866d6ca016b42d2e6ce53619b653$',
resolver.resolve,
reqset
reqset.all_requirements,
True,
)
def test_missing_hash_with_require_hashes_in_reqs_file(self, data, tmpdir):
"""--require-hashes in a requirements file should make its way to the
RequirementSet.
"""
req_set = RequirementSet()
finder = make_test_finder(find_links=[data.find_links])
session = finder._link_collector.session
command = create_command('install')
with requirements_file('--require-hashes', tmpdir) as reqs_file:
options, args = command.parse_args(['-r', reqs_file])
command.populate_requirement_set(
req_set, args, options, finder, session, wheel_cache=None,
command.get_requirements(
args, options, finder, session, wheel_cache=None,
)
assert options.require_hashes
@ -209,7 +210,8 @@ class TestRequirementSet(object):
r" file://.*{sep}data{sep}packages{sep}FSPkg "
r"\(from -r file \(line 2\)\)".format(sep=sep),
resolver.resolve,
reqset,
reqset.all_requirements,
True,
)
def test_unpinned_hash_checking(self, data):
@ -237,7 +239,8 @@ class TestRequirementSet(object):
r' simple .* \(from -r file \(line 1\)\)\n'
r' simple2>1.0 .* \(from -r file \(line 2\)\)',
resolver.resolve,
reqset,
reqset.all_requirements,
True,
)
def test_hash_mismatch(self, data):
@ -258,7 +261,8 @@ class TestRequirementSet(object):
r' Got 393043e672415891885c9a2a0929b1af95fb'
r'866d6ca016b42d2e6ce53619b653$',
resolver.resolve,
reqset,
reqset.all_requirements,
True,
)
def test_unhashed_deps_on_require_hashes(self, data):
@ -280,7 +284,8 @@ class TestRequirementSet(object):
r'versions pinned.*\n'
r' TopoRequires from .*$',
resolver.resolve,
reqset,
reqset.all_requirements,
True,
)
def test_hashed_deps_on_require_hashes(self):

View file

@ -320,9 +320,8 @@ class TestProcessLine(object):
'--global-option="yo3" --global-option "yo4"'
filename = 'filename'
req = line_processor(line, filename, 1)[0]
assert req.options == {
'global_options': ['yo3', 'yo4'],
'install_options': ['yo1', 'yo2']}
assert req.global_options == ['yo3', 'yo4']
assert req.install_options == ['yo1', 'yo2']
def test_hash_options(self, line_processor):
"""Test the --hash option: mostly its value storage.
@ -338,13 +337,13 @@ class TestProcessLine(object):
'e5a6c65260e9cb8a7')
filename = 'filename'
req = line_processor(line, filename, 1)[0]
assert req.options == {'hashes': {
assert req.hash_options == {
'sha256': ['2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e730433'
'62938b9824',
'486ea46224d1bb4fb680f34f7c9ad96a8f24ec88be73ea8e5a6c65'
'260e9cb8a7'],
'sha384': ['59e1748777448c69de6b800d7a33bbfb9ff1b463e44354c3553bcd'
'b9c666fa90125a3c79f90397bdf5f6a13de828684f']}}
'b9c666fa90125a3c79f90397bdf5f6a13de828684f']}
def test_set_isolated(self, line_processor, options):
line = 'SomeProject'

View file

@ -20,7 +20,9 @@ class TestInstallRequirementBuildDirectory(object):
# Make sure we're handling it correctly with real path.
requirement = InstallRequirement(None, None)
tmp_dir = tempfile.mkdtemp('-build', 'pip-')
tmp_build_dir = requirement.ensure_build_location(tmp_dir)
tmp_build_dir = requirement.ensure_build_location(
tmp_dir, autodelete=False
)
assert (
os.path.dirname(tmp_build_dir) ==
os.path.realpath(os.path.dirname(tmp_dir))