From 01fa93f715c6c329a7ddf76646e19af82bf94a55 Mon Sep 17 00:00:00 2001 From: Chris Hunt Date: Mon, 2 Sep 2019 18:37:18 -0400 Subject: [PATCH 1/3] Add contextlib2 for ExitStack in Python 2. --- src/pip/_vendor/__init__.py | 1 + src/pip/_vendor/contextlib2.LICENSE | 120 ++++++++ src/pip/_vendor/contextlib2.py | 436 ++++++++++++++++++++++++++++ src/pip/_vendor/vendor.txt | 1 + 4 files changed, 558 insertions(+) create mode 100644 src/pip/_vendor/contextlib2.LICENSE create mode 100644 src/pip/_vendor/contextlib2.py diff --git a/src/pip/_vendor/__init__.py b/src/pip/_vendor/__init__.py index c1d9508dd..2f8245561 100644 --- a/src/pip/_vendor/__init__.py +++ b/src/pip/_vendor/__init__.py @@ -60,6 +60,7 @@ if DEBUNDLED: # Actually alias all of our vendored dependencies. vendored("cachecontrol") vendored("colorama") + vendored("contextlib2") vendored("distlib") vendored("distro") vendored("html5lib") diff --git a/src/pip/_vendor/contextlib2.LICENSE b/src/pip/_vendor/contextlib2.LICENSE new file mode 100644 index 000000000..c12b8ab2d --- /dev/null +++ b/src/pip/_vendor/contextlib2.LICENSE @@ -0,0 +1,120 @@ +A. HISTORY OF THE SOFTWARE +========================== + +contextlib2 is a derivative of the contextlib module distributed by the PSF +as part of the Python standard library. According, it is itself redistributed +under the PSF license (reproduced in full below). As the contextlib module +was added only in Python 2.5, the licenses for earlier Python versions are +not applicable and have not been included. + +Python was created in the early 1990s by Guido van Rossum at Stichting +Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands +as a successor of a language called ABC. Guido remains Python's +principal author, although it includes many contributions from others. + +In 1995, Guido continued his work on Python at the Corporation for +National Research Initiatives (CNRI, see http://www.cnri.reston.va.us) +in Reston, Virginia where he released several versions of the +software. + +In May 2000, Guido and the Python core development team moved to +BeOpen.com to form the BeOpen PythonLabs team. In October of the same +year, the PythonLabs team moved to Digital Creations (now Zope +Corporation, see http://www.zope.com). In 2001, the Python Software +Foundation (PSF, see http://www.python.org/psf/) was formed, a +non-profit organization created specifically to own Python-related +Intellectual Property. Zope Corporation is a sponsoring member of +the PSF. + +All Python releases are Open Source (see http://www.opensource.org for +the Open Source Definition). Historically, most, but not all, Python +releases have also been GPL-compatible; the table below summarizes +the various releases that included the contextlib module. + + Release Derived Year Owner GPL- + from compatible? (1) + + 2.5 2.4 2006 PSF yes + 2.5.1 2.5 2007 PSF yes + 2.5.2 2.5.1 2008 PSF yes + 2.5.3 2.5.2 2008 PSF yes + 2.6 2.5 2008 PSF yes + 2.6.1 2.6 2008 PSF yes + 2.6.2 2.6.1 2009 PSF yes + 2.6.3 2.6.2 2009 PSF yes + 2.6.4 2.6.3 2009 PSF yes + 2.6.5 2.6.4 2010 PSF yes + 3.0 2.6 2008 PSF yes + 3.0.1 3.0 2009 PSF yes + 3.1 3.0.1 2009 PSF yes + 3.1.1 3.1 2009 PSF yes + 3.1.2 3.1.1 2010 PSF yes + 3.1.3 3.1.2 2010 PSF yes + 3.1.4 3.1.3 2011 PSF yes + 3.2 3.1 2011 PSF yes + 3.2.1 3.2 2011 PSF yes + 3.2.2 3.2.1 2011 PSF yes + 3.3 3.2 2012 PSF yes + +Footnotes: + +(1) GPL-compatible doesn't mean that we're distributing Python under + the GPL. All Python licenses, unlike the GPL, let you distribute + a modified version without making your changes open source. The + GPL-compatible licenses make it possible to combine Python with + other software that is released under the GPL; the others don't. + +Thanks to the many outside volunteers who have worked under Guido's +direction to make these releases possible. + + +B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON +=============================================================== + +PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 +-------------------------------------------- + +1. This LICENSE AGREEMENT is between the Python Software Foundation +("PSF"), and the Individual or Organization ("Licensee") accessing and +otherwise using this software ("Python") in source or binary form and +its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, PSF hereby +grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, +analyze, test, perform and/or display publicly, prepare derivative works, +distribute, and otherwise use Python alone or in any derivative version, +provided, however, that PSF's License Agreement and PSF's notice of copyright, +i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, +2011 Python Software Foundation; All Rights Reserved" are retained in Python +alone or in any derivative version prepared by Licensee. + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python. + +4. PSF is making Python available to Licensee on an "AS IS" +basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. Nothing in this License Agreement shall be deemed to create any +relationship of agency, partnership, or joint venture between PSF and +Licensee. This License Agreement does not grant permission to use PSF +trademarks or trade name in a trademark sense to endorse or promote +products or services of Licensee, or any third party. + +8. By copying, installing or otherwise using Python, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. diff --git a/src/pip/_vendor/contextlib2.py b/src/pip/_vendor/contextlib2.py new file mode 100644 index 000000000..f08df14ce --- /dev/null +++ b/src/pip/_vendor/contextlib2.py @@ -0,0 +1,436 @@ +"""contextlib2 - backports and enhancements to the contextlib module""" + +import sys +import warnings +from collections import deque +from functools import wraps + +__all__ = ["contextmanager", "closing", "ContextDecorator", "ExitStack", + "redirect_stdout", "redirect_stderr", "suppress"] + +# Backwards compatibility +__all__ += ["ContextStack"] + +class ContextDecorator(object): + "A base class or mixin that enables context managers to work as decorators." + + def refresh_cm(self): + """Returns the context manager used to actually wrap the call to the + decorated function. + + The default implementation just returns *self*. + + Overriding this method allows otherwise one-shot context managers + like _GeneratorContextManager to support use as decorators via + implicit recreation. + + DEPRECATED: refresh_cm was never added to the standard library's + ContextDecorator API + """ + warnings.warn("refresh_cm was never added to the standard library", + DeprecationWarning) + return self._recreate_cm() + + def _recreate_cm(self): + """Return a recreated instance of self. + + Allows an otherwise one-shot context manager like + _GeneratorContextManager to support use as + a decorator via implicit recreation. + + This is a private interface just for _GeneratorContextManager. + See issue #11647 for details. + """ + return self + + def __call__(self, func): + @wraps(func) + def inner(*args, **kwds): + with self._recreate_cm(): + return func(*args, **kwds) + return inner + + +class _GeneratorContextManager(ContextDecorator): + """Helper for @contextmanager decorator.""" + + def __init__(self, func, args, kwds): + self.gen = func(*args, **kwds) + self.func, self.args, self.kwds = func, args, kwds + # Issue 19330: ensure context manager instances have good docstrings + doc = getattr(func, "__doc__", None) + if doc is None: + doc = type(self).__doc__ + self.__doc__ = doc + # Unfortunately, this still doesn't provide good help output when + # inspecting the created context manager instances, since pydoc + # currently bypasses the instance docstring and shows the docstring + # for the class instead. + # See http://bugs.python.org/issue19404 for more details. + + def _recreate_cm(self): + # _GCM instances are one-shot context managers, so the + # CM must be recreated each time a decorated function is + # called + return self.__class__(self.func, self.args, self.kwds) + + def __enter__(self): + try: + return next(self.gen) + except StopIteration: + raise RuntimeError("generator didn't yield") + + def __exit__(self, type, value, traceback): + if type is None: + try: + next(self.gen) + except StopIteration: + return + else: + raise RuntimeError("generator didn't stop") + else: + if value is None: + # Need to force instantiation so we can reliably + # tell if we get the same exception back + value = type() + try: + self.gen.throw(type, value, traceback) + raise RuntimeError("generator didn't stop after throw()") + except StopIteration as exc: + # Suppress StopIteration *unless* it's the same exception that + # was passed to throw(). This prevents a StopIteration + # raised inside the "with" statement from being suppressed. + return exc is not value + except RuntimeError as exc: + # Don't re-raise the passed in exception + if exc is value: + return False + # Likewise, avoid suppressing if a StopIteration exception + # was passed to throw() and later wrapped into a RuntimeError + # (see PEP 479). + if _HAVE_EXCEPTION_CHAINING and exc.__cause__ is value: + return False + raise + except: + # only re-raise if it's *not* the exception that was + # passed to throw(), because __exit__() must not raise + # an exception unless __exit__() itself failed. But throw() + # has to raise the exception to signal propagation, so this + # fixes the impedance mismatch between the throw() protocol + # and the __exit__() protocol. + # + if sys.exc_info()[1] is not value: + raise + + +def contextmanager(func): + """@contextmanager decorator. + + Typical usage: + + @contextmanager + def some_generator(): + + try: + yield + finally: + + + This makes this: + + with some_generator() as : + + + equivalent to this: + + + try: + = + + finally: + + + """ + @wraps(func) + def helper(*args, **kwds): + return _GeneratorContextManager(func, args, kwds) + return helper + + +class closing(object): + """Context to automatically close something at the end of a block. + + Code like this: + + with closing(.open()) as f: + + + is equivalent to this: + + f = .open() + try: + + finally: + f.close() + + """ + def __init__(self, thing): + self.thing = thing + def __enter__(self): + return self.thing + def __exit__(self, *exc_info): + self.thing.close() + + +class _RedirectStream(object): + + _stream = None + + def __init__(self, new_target): + self._new_target = new_target + # We use a list of old targets to make this CM re-entrant + self._old_targets = [] + + def __enter__(self): + self._old_targets.append(getattr(sys, self._stream)) + setattr(sys, self._stream, self._new_target) + return self._new_target + + def __exit__(self, exctype, excinst, exctb): + setattr(sys, self._stream, self._old_targets.pop()) + + +class redirect_stdout(_RedirectStream): + """Context manager for temporarily redirecting stdout to another file. + + # How to send help() to stderr + with redirect_stdout(sys.stderr): + help(dir) + + # How to write help() to a file + with open('help.txt', 'w') as f: + with redirect_stdout(f): + help(pow) + """ + + _stream = "stdout" + + +class redirect_stderr(_RedirectStream): + """Context manager for temporarily redirecting stderr to another file.""" + + _stream = "stderr" + + +class suppress(object): + """Context manager to suppress specified exceptions + + After the exception is suppressed, execution proceeds with the next + statement following the with statement. + + with suppress(FileNotFoundError): + os.remove(somefile) + # Execution still resumes here if the file was already removed + """ + + def __init__(self, *exceptions): + self._exceptions = exceptions + + def __enter__(self): + pass + + def __exit__(self, exctype, excinst, exctb): + # Unlike isinstance and issubclass, CPython exception handling + # currently only looks at the concrete type hierarchy (ignoring + # the instance and subclass checking hooks). While Guido considers + # that a bug rather than a feature, it's a fairly hard one to fix + # due to various internal implementation details. suppress provides + # the simpler issubclass based semantics, rather than trying to + # exactly reproduce the limitations of the CPython interpreter. + # + # See http://bugs.python.org/issue12029 for more details + return exctype is not None and issubclass(exctype, self._exceptions) + + +# Context manipulation is Python 3 only +_HAVE_EXCEPTION_CHAINING = sys.version_info[0] >= 3 +if _HAVE_EXCEPTION_CHAINING: + def _make_context_fixer(frame_exc): + def _fix_exception_context(new_exc, old_exc): + # Context may not be correct, so find the end of the chain + while 1: + exc_context = new_exc.__context__ + if exc_context is old_exc: + # Context is already set correctly (see issue 20317) + return + if exc_context is None or exc_context is frame_exc: + break + new_exc = exc_context + # Change the end of the chain to point to the exception + # we expect it to reference + new_exc.__context__ = old_exc + return _fix_exception_context + + def _reraise_with_existing_context(exc_details): + try: + # bare "raise exc_details[1]" replaces our carefully + # set-up context + fixed_ctx = exc_details[1].__context__ + raise exc_details[1] + except BaseException: + exc_details[1].__context__ = fixed_ctx + raise +else: + # No exception context in Python 2 + def _make_context_fixer(frame_exc): + return lambda new_exc, old_exc: None + + # Use 3 argument raise in Python 2, + # but use exec to avoid SyntaxError in Python 3 + def _reraise_with_existing_context(exc_details): + exc_type, exc_value, exc_tb = exc_details + exec ("raise exc_type, exc_value, exc_tb") + +# Handle old-style classes if they exist +try: + from types import InstanceType +except ImportError: + # Python 3 doesn't have old-style classes + _get_type = type +else: + # Need to handle old-style context managers on Python 2 + def _get_type(obj): + obj_type = type(obj) + if obj_type is InstanceType: + return obj.__class__ # Old-style class + return obj_type # New-style class + +# Inspired by discussions on http://bugs.python.org/issue13585 +class ExitStack(object): + """Context manager for dynamic management of a stack of exit callbacks + + For example: + + with ExitStack() as stack: + files = [stack.enter_context(open(fname)) for fname in filenames] + # All opened files will automatically be closed at the end of + # the with statement, even if attempts to open files later + # in the list raise an exception + + """ + def __init__(self): + self._exit_callbacks = deque() + + def pop_all(self): + """Preserve the context stack by transferring it to a new instance""" + new_stack = type(self)() + new_stack._exit_callbacks = self._exit_callbacks + self._exit_callbacks = deque() + return new_stack + + def _push_cm_exit(self, cm, cm_exit): + """Helper to correctly register callbacks to __exit__ methods""" + def _exit_wrapper(*exc_details): + return cm_exit(cm, *exc_details) + _exit_wrapper.__self__ = cm + self.push(_exit_wrapper) + + def push(self, exit): + """Registers a callback with the standard __exit__ method signature + + Can suppress exceptions the same way __exit__ methods can. + + Also accepts any object with an __exit__ method (registering a call + to the method instead of the object itself) + """ + # We use an unbound method rather than a bound method to follow + # the standard lookup behaviour for special methods + _cb_type = _get_type(exit) + try: + exit_method = _cb_type.__exit__ + except AttributeError: + # Not a context manager, so assume its a callable + self._exit_callbacks.append(exit) + else: + self._push_cm_exit(exit, exit_method) + return exit # Allow use as a decorator + + def callback(self, callback, *args, **kwds): + """Registers an arbitrary callback and arguments. + + Cannot suppress exceptions. + """ + def _exit_wrapper(exc_type, exc, tb): + callback(*args, **kwds) + # We changed the signature, so using @wraps is not appropriate, but + # setting __wrapped__ may still help with introspection + _exit_wrapper.__wrapped__ = callback + self.push(_exit_wrapper) + return callback # Allow use as a decorator + + def enter_context(self, cm): + """Enters the supplied context manager + + If successful, also pushes its __exit__ method as a callback and + returns the result of the __enter__ method. + """ + # We look up the special methods on the type to match the with statement + _cm_type = _get_type(cm) + _exit = _cm_type.__exit__ + result = _cm_type.__enter__(cm) + self._push_cm_exit(cm, _exit) + return result + + def close(self): + """Immediately unwind the context stack""" + self.__exit__(None, None, None) + + def __enter__(self): + return self + + def __exit__(self, *exc_details): + received_exc = exc_details[0] is not None + + # We manipulate the exception state so it behaves as though + # we were actually nesting multiple with statements + frame_exc = sys.exc_info()[1] + _fix_exception_context = _make_context_fixer(frame_exc) + + # Callbacks are invoked in LIFO order to match the behaviour of + # nested context managers + suppressed_exc = False + pending_raise = False + while self._exit_callbacks: + cb = self._exit_callbacks.pop() + try: + if cb(*exc_details): + suppressed_exc = True + pending_raise = False + exc_details = (None, None, None) + except: + new_exc_details = sys.exc_info() + # simulate the stack of exceptions by setting the context + _fix_exception_context(new_exc_details[1], exc_details[1]) + pending_raise = True + exc_details = new_exc_details + if pending_raise: + _reraise_with_existing_context(exc_details) + return received_exc and suppressed_exc + +# Preserve backwards compatibility +class ContextStack(ExitStack): + """Backwards compatibility alias for ExitStack""" + + def __init__(self): + warnings.warn("ContextStack has been renamed to ExitStack", + DeprecationWarning) + super(ContextStack, self).__init__() + + def register_exit(self, callback): + return self.push(callback) + + def register(self, callback, *args, **kwds): + return self.callback(callback, *args, **kwds) + + def preserve(self): + return self.pop_all() diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index dbf7d664a..84ff34ea1 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -1,6 +1,7 @@ appdirs==1.4.3 CacheControl==0.12.5 colorama==0.4.1 +contextlib2==0.5.5 distlib==0.2.9.post0 distro==1.4.0 html5lib==1.0.1 From 45fe22b3ec083e2f708b80dade34528f0ea443c6 Mon Sep 17 00:00:00 2001 From: Chris Hunt Date: Mon, 2 Sep 2019 18:45:36 -0400 Subject: [PATCH 2/3] Manage overall application context in Command. --- src/pip/_internal/cli/base_command.py | 15 ++++++++++---- src/pip/_internal/cli/command_context.py | 26 ++++++++++++++++++++++++ 2 files changed, 37 insertions(+), 4 deletions(-) create mode 100644 src/pip/_internal/cli/command_context.py diff --git a/src/pip/_internal/cli/base_command.py b/src/pip/_internal/cli/base_command.py index 694527c1d..dad08c24e 100644 --- a/src/pip/_internal/cli/base_command.py +++ b/src/pip/_internal/cli/base_command.py @@ -11,6 +11,7 @@ import sys import traceback from pip._internal.cli import cmdoptions +from pip._internal.cli.command_context import CommandContextMixIn from pip._internal.cli.parser import ( ConfigOptionParser, UpdatingDefaultsHelpFormatter, @@ -44,12 +45,13 @@ __all__ = ['Command'] logger = logging.getLogger(__name__) -class Command(object): +class Command(CommandContextMixIn): usage = None # type: str ignore_require_venv = False # type: bool def __init__(self, name, summary, isolated=False): # type: (str, str, bool) -> None + super(Command, self).__init__() parser_kw = { 'usage': self.usage, 'prog': '%s %s' % (get_prog(), name), @@ -95,6 +97,14 @@ class Command(object): return self.parser.parse_args(args) def main(self, args): + # type: (List[str]) -> int + try: + with self.main_context(): + return self._main(args) + finally: + logging.shutdown() + + def _main(self, args): # type: (List[str]) -> int options, args = self.parse_args(args) @@ -180,7 +190,4 @@ class Command(object): finally: self.handle_pip_version_check(options) - # Shutdown the logging module - logging.shutdown() - return SUCCESS diff --git a/src/pip/_internal/cli/command_context.py b/src/pip/_internal/cli/command_context.py new file mode 100644 index 000000000..c55f0116e --- /dev/null +++ b/src/pip/_internal/cli/command_context.py @@ -0,0 +1,26 @@ +from contextlib import contextmanager + +from pip._vendor.contextlib2 import ExitStack + + +class CommandContextMixIn(object): + def __init__(self): + super(CommandContextMixIn, self).__init__() + self._in_main_context = False + self._main_context = ExitStack() + + @contextmanager + def main_context(self): + assert not self._in_main_context + + self._in_main_context = True + try: + with self._main_context: + yield + finally: + self._in_main_context = False + + def enter_context(self, context_provider): + assert self._in_main_context + + return self._main_context.enter_context(context_provider) From 82be4ee76a4203cab7831f0e7b0b50d2db29dee2 Mon Sep 17 00:00:00 2001 From: Chris Hunt Date: Mon, 2 Sep 2019 18:48:15 -0400 Subject: [PATCH 3/3] Use application context manager for PipSession. This removes a level of indentation from all commands without introducing any dummy functions. --- src/pip/_internal/cli/req_command.py | 15 +- src/pip/_internal/commands/download.py | 107 +++++---- src/pip/_internal/commands/install.py | 307 ++++++++++++------------ src/pip/_internal/commands/search.py | 14 +- src/pip/_internal/commands/uninstall.py | 57 ++--- src/pip/_internal/commands/wheel.py | 103 ++++---- tests/functional/test_search.py | 10 +- 7 files changed, 316 insertions(+), 297 deletions(-) diff --git a/src/pip/_internal/cli/req_command.py b/src/pip/_internal/cli/req_command.py index 62ffe4b14..081ad190b 100644 --- a/src/pip/_internal/cli/req_command.py +++ b/src/pip/_internal/cli/req_command.py @@ -10,6 +10,7 @@ from functools import partial from pip._internal.cli.base_command import Command from pip._internal.cli.cmdoptions import make_search_scope +from pip._internal.cli.command_context import CommandContextMixIn from pip._internal.download import PipSession from pip._internal.exceptions import CommandError from pip._internal.index import PackageFinder @@ -36,11 +37,14 @@ if MYPY_CHECK_RUNNING: from pip._internal.utils.temp_dir import TempDirectory -class SessionCommandMixin(object): +class SessionCommandMixin(CommandContextMixIn): """ A class mixin for command classes needing _build_session(). """ + def __init__(self): + super(SessionCommandMixin, self).__init__() + self._session = None # Optional[PipSession] @classmethod def _get_index_urls(cls, options): @@ -56,6 +60,13 @@ class SessionCommandMixin(object): # Return None rather than an empty list return index_urls or None + def get_default_session(self, options): + # type: (Values) -> PipSession + """Get a default-managed session.""" + if self._session is None: + self._session = self.enter_context(self._build_session(options)) + return self._session + def _build_session(self, options, retries=None, timeout=None): # type: (Values, Optional[int], Optional[int]) -> PipSession session = PipSession( @@ -95,7 +106,7 @@ class SessionCommandMixin(object): return session -class IndexGroupCommand(SessionCommandMixin, Command): +class IndexGroupCommand(Command, SessionCommandMixin): """ Abstract base class for commands with the index_group options. diff --git a/src/pip/_internal/commands/download.py b/src/pip/_internal/commands/download.py index 1f0cd7c73..506899048 100644 --- a/src/pip/_internal/commands/download.py +++ b/src/pip/_internal/commands/download.py @@ -88,65 +88,66 @@ class DownloadCommand(RequirementCommand): ensure_dir(options.download_dir) - with self._build_session(options) as session: - target_python = make_target_python(options) - finder = self._build_package_finder( - options=options, - session=session, - target_python=target_python, + session = self.get_default_session(options) + + target_python = make_target_python(options) + finder = self._build_package_finder( + options=options, + session=session, + target_python=target_python, + ) + build_delete = (not (options.no_clean or options.build_dir)) + if options.cache_dir and not check_path_owner(options.cache_dir): + logger.warning( + "The directory '%s' or its parent directory is not owned " + "by the current user and caching wheels has been " + "disabled. check the permissions and owner of that " + "directory. If executing pip with sudo, you may want " + "sudo's -H flag.", + options.cache_dir, ) - build_delete = (not (options.no_clean or options.build_dir)) - if options.cache_dir and not check_path_owner(options.cache_dir): - logger.warning( - "The directory '%s' or its parent directory is not owned " - "by the current user and caching wheels has been " - "disabled. check the permissions and owner of that " - "directory. If executing pip with sudo, you may want " - "sudo's -H flag.", - options.cache_dir, - ) - options.cache_dir = None + options.cache_dir = None - with RequirementTracker() as req_tracker, TempDirectory( - options.build_dir, delete=build_delete, kind="download" - ) as directory: + with RequirementTracker() as req_tracker, TempDirectory( + options.build_dir, delete=build_delete, kind="download" + ) as directory: - requirement_set = RequirementSet( - require_hashes=options.require_hashes, - ) - self.populate_requirement_set( - requirement_set, - args, - options, - finder, - session, - None - ) + requirement_set = RequirementSet( + require_hashes=options.require_hashes, + ) + self.populate_requirement_set( + requirement_set, + args, + options, + finder, + session, + None + ) - preparer = self.make_requirement_preparer( - temp_directory=directory, - options=options, - req_tracker=req_tracker, - download_dir=options.download_dir, - ) + preparer = self.make_requirement_preparer( + temp_directory=directory, + options=options, + req_tracker=req_tracker, + download_dir=options.download_dir, + ) - resolver = self.make_resolver( - preparer=preparer, - finder=finder, - session=session, - options=options, - py_version_info=options.python_version, - ) - resolver.resolve(requirement_set) + resolver = self.make_resolver( + preparer=preparer, + finder=finder, + session=session, + options=options, + py_version_info=options.python_version, + ) + resolver.resolve(requirement_set) - downloaded = ' '.join([ - req.name for req in requirement_set.successfully_downloaded - ]) - if downloaded: - write_output('Successfully downloaded %s', downloaded) + downloaded = ' '.join([ + req.name for req in requirement_set.successfully_downloaded + ]) + if downloaded: + write_output('Successfully downloaded %s', downloaded) - # Clean up - if not options.no_clean: - requirement_set.cleanup_files() + # Clean up + if not options.no_clean: + requirement_set.cleanup_files() return requirement_set diff --git a/src/pip/_internal/commands/install.py b/src/pip/_internal/commands/install.py index 27cc6e815..a50229a9a 100644 --- a/src/pip/_internal/commands/install.py +++ b/src/pip/_internal/commands/install.py @@ -321,179 +321,180 @@ class InstallCommand(RequirementCommand): global_options = options.global_options or [] - with self._build_session(options) as session: - target_python = make_target_python(options) - finder = self._build_package_finder( - options=options, - session=session, - target_python=target_python, - ignore_requires_python=options.ignore_requires_python, + session = self.get_default_session(options) + + target_python = make_target_python(options) + finder = self._build_package_finder( + options=options, + session=session, + target_python=target_python, + ignore_requires_python=options.ignore_requires_python, + ) + build_delete = (not (options.no_clean or options.build_dir)) + wheel_cache = WheelCache(options.cache_dir, options.format_control) + + if options.cache_dir and not check_path_owner(options.cache_dir): + logger.warning( + "The directory '%s' or its parent directory is not owned " + "by the current user and caching wheels has been " + "disabled. check the permissions and owner of that " + "directory. If executing pip with sudo, you may want " + "sudo's -H flag.", + options.cache_dir, ) - build_delete = (not (options.no_clean or options.build_dir)) - wheel_cache = WheelCache(options.cache_dir, options.format_control) + options.cache_dir = None - if options.cache_dir and not check_path_owner(options.cache_dir): - logger.warning( - "The directory '%s' or its parent directory is not owned " - "by the current user and caching wheels has been " - "disabled. check the permissions and owner of that " - "directory. If executing pip with sudo, you may want " - "sudo's -H flag.", - options.cache_dir, - ) - options.cache_dir = None + with RequirementTracker() as req_tracker, TempDirectory( + options.build_dir, delete=build_delete, kind="install" + ) as directory: + requirement_set = RequirementSet( + require_hashes=options.require_hashes, + check_supported_wheels=not options.target_dir, + ) - with RequirementTracker() as req_tracker, TempDirectory( - options.build_dir, delete=build_delete, kind="install" - ) as directory: - requirement_set = RequirementSet( - require_hashes=options.require_hashes, - check_supported_wheels=not options.target_dir, + try: + self.populate_requirement_set( + requirement_set, args, options, finder, session, + wheel_cache ) + preparer = self.make_requirement_preparer( + temp_directory=directory, + options=options, + req_tracker=req_tracker, + ) + resolver = self.make_resolver( + preparer=preparer, + finder=finder, + session=session, + options=options, + wheel_cache=wheel_cache, + use_user_site=options.use_user_site, + ignore_installed=options.ignore_installed, + ignore_requires_python=options.ignore_requires_python, + force_reinstall=options.force_reinstall, + upgrade_strategy=upgrade_strategy, + use_pep517=options.use_pep517, + ) + resolver.resolve(requirement_set) try: - self.populate_requirement_set( - requirement_set, args, options, finder, session, - wheel_cache - ) - preparer = self.make_requirement_preparer( - temp_directory=directory, - options=options, - req_tracker=req_tracker, - ) - resolver = self.make_resolver( - preparer=preparer, - finder=finder, - session=session, - options=options, - wheel_cache=wheel_cache, - use_user_site=options.use_user_site, - ignore_installed=options.ignore_installed, - ignore_requires_python=options.ignore_requires_python, - force_reinstall=options.force_reinstall, - upgrade_strategy=upgrade_strategy, - use_pep517=options.use_pep517, - ) - resolver.resolve(requirement_set) + pip_req = requirement_set.get_requirement("pip") + except KeyError: + modifying_pip = None + else: + # If we're not replacing an already installed pip, + # we're not modifying it. + modifying_pip = pip_req.satisfied_by is None + protect_pip_from_modification_on_windows( + modifying_pip=modifying_pip + ) - try: - pip_req = requirement_set.get_requirement("pip") - except KeyError: - modifying_pip = None + check_binary_allowed = get_check_binary_allowed( + finder.format_control + ) + # Consider legacy and PEP517-using requirements separately + legacy_requirements = [] + pep517_requirements = [] + for req in requirement_set.requirements.values(): + if req.use_pep517: + pep517_requirements.append(req) else: - # If we're not replacing an already installed pip, - # we're not modifying it. - modifying_pip = pip_req.satisfied_by is None - protect_pip_from_modification_on_windows( - modifying_pip=modifying_pip - ) + legacy_requirements.append(req) - check_binary_allowed = get_check_binary_allowed( - finder.format_control - ) - # Consider legacy and PEP517-using requirements separately - legacy_requirements = [] - pep517_requirements = [] - for req in requirement_set.requirements.values(): - if req.use_pep517: - pep517_requirements.append(req) - else: - legacy_requirements.append(req) + wheel_builder = WheelBuilder( + preparer, wheel_cache, + build_options=[], global_options=[], + check_binary_allowed=check_binary_allowed, + ) - wheel_builder = WheelBuilder( - preparer, wheel_cache, - build_options=[], global_options=[], - check_binary_allowed=check_binary_allowed, - ) + build_failures = build_wheels( + builder=wheel_builder, + pep517_requirements=pep517_requirements, + legacy_requirements=legacy_requirements, + ) - build_failures = build_wheels( - builder=wheel_builder, - pep517_requirements=pep517_requirements, - legacy_requirements=legacy_requirements, - ) + # If we're using PEP 517, we cannot do a direct install + # so we fail here. + if build_failures: + raise InstallationError( + "Could not build wheels for {} which use" + " PEP 517 and cannot be installed directly".format( + ", ".join(r.name for r in build_failures))) - # If we're using PEP 517, we cannot do a direct install - # so we fail here. - if build_failures: - raise InstallationError( - "Could not build wheels for {} which use" - " PEP 517 and cannot be installed directly".format( - ", ".join(r.name for r in build_failures))) + to_install = resolver.get_installation_order( + requirement_set + ) - to_install = resolver.get_installation_order( - requirement_set - ) + # Consistency Checking of the package set we're installing. + should_warn_about_conflicts = ( + not options.ignore_dependencies and + options.warn_about_conflicts + ) + if should_warn_about_conflicts: + self._warn_about_conflicts(to_install) - # Consistency Checking of the package set we're installing. - should_warn_about_conflicts = ( - not options.ignore_dependencies and - options.warn_about_conflicts - ) - if should_warn_about_conflicts: - self._warn_about_conflicts(to_install) + # Don't warn about script install locations if + # --target has been specified + warn_script_location = options.warn_script_location + if options.target_dir: + warn_script_location = False - # Don't warn about script install locations if - # --target has been specified - warn_script_location = options.warn_script_location - if options.target_dir: - warn_script_location = False + installed = install_given_reqs( + to_install, + install_options, + global_options, + root=options.root_path, + home=target_temp_dir.path, + prefix=options.prefix_path, + pycompile=options.compile, + warn_script_location=warn_script_location, + use_user_site=options.use_user_site, + ) - installed = install_given_reqs( - to_install, - install_options, - global_options, - root=options.root_path, - home=target_temp_dir.path, - prefix=options.prefix_path, - pycompile=options.compile, - warn_script_location=warn_script_location, - use_user_site=options.use_user_site, - ) + lib_locations = get_lib_location_guesses( + user=options.use_user_site, + home=target_temp_dir.path, + root=options.root_path, + prefix=options.prefix_path, + isolated=options.isolated_mode, + ) + working_set = pkg_resources.WorkingSet(lib_locations) - lib_locations = get_lib_location_guesses( - user=options.use_user_site, - home=target_temp_dir.path, - root=options.root_path, - prefix=options.prefix_path, - isolated=options.isolated_mode, - ) - working_set = pkg_resources.WorkingSet(lib_locations) - - reqs = sorted(installed, key=operator.attrgetter('name')) - items = [] - for req in reqs: - item = req.name - try: - installed_version = get_installed_version( - req.name, working_set=working_set - ) - if installed_version: - item += '-' + installed_version - except Exception: - pass - items.append(item) - installed_desc = ' '.join(items) - if installed_desc: - write_output( - 'Successfully installed %s', installed_desc, + reqs = sorted(installed, key=operator.attrgetter('name')) + items = [] + for req in reqs: + item = req.name + try: + installed_version = get_installed_version( + req.name, working_set=working_set ) - except EnvironmentError as error: - show_traceback = (self.verbosity >= 1) - - message = create_env_error_message( - error, show_traceback, options.use_user_site, + if installed_version: + item += '-' + installed_version + except Exception: + pass + items.append(item) + installed_desc = ' '.join(items) + if installed_desc: + write_output( + 'Successfully installed %s', installed_desc, ) - logger.error(message, exc_info=show_traceback) + except EnvironmentError as error: + show_traceback = (self.verbosity >= 1) - return ERROR - except PreviousBuildDirError: - options.no_clean = True - raise - finally: - # Clean up - if not options.no_clean: - requirement_set.cleanup_files() - wheel_cache.cleanup() + message = create_env_error_message( + error, show_traceback, options.use_user_site, + ) + logger.error(message, exc_info=show_traceback) + + return ERROR + except PreviousBuildDirError: + options.no_clean = True + raise + finally: + # Clean up + if not options.no_clean: + requirement_set.cleanup_files() + wheel_cache.cleanup() if options.target_dir: self._handle_target_dir( diff --git a/src/pip/_internal/commands/search.py b/src/pip/_internal/commands/search.py index d0a36e83f..ef698d0b7 100644 --- a/src/pip/_internal/commands/search.py +++ b/src/pip/_internal/commands/search.py @@ -24,7 +24,7 @@ from pip._internal.utils.misc import write_output logger = logging.getLogger(__name__) -class SearchCommand(SessionCommandMixin, Command): +class SearchCommand(Command, SessionCommandMixin): """Search for PyPI packages whose name or summary contains .""" usage = """ @@ -60,11 +60,13 @@ class SearchCommand(SessionCommandMixin, Command): def search(self, query, options): index_url = options.index - with self._build_session(options) as session: - transport = PipXmlrpcTransport(index_url, session) - pypi = xmlrpc_client.ServerProxy(index_url, transport) - hits = pypi.search({'name': query, 'summary': query}, 'or') - return hits + + session = self.get_default_session(options) + + transport = PipXmlrpcTransport(index_url, session) + pypi = xmlrpc_client.ServerProxy(index_url, transport) + hits = pypi.search({'name': query, 'summary': query}, 'or') + return hits def transform_hits(hits): diff --git a/src/pip/_internal/commands/uninstall.py b/src/pip/_internal/commands/uninstall.py index 6d72400e6..0d8e4662f 100644 --- a/src/pip/_internal/commands/uninstall.py +++ b/src/pip/_internal/commands/uninstall.py @@ -10,7 +10,7 @@ from pip._internal.req.constructors import install_req_from_line from pip._internal.utils.misc import protect_pip_from_modification_on_windows -class UninstallCommand(SessionCommandMixin, Command): +class UninstallCommand(Command, SessionCommandMixin): """ Uninstall packages. @@ -45,34 +45,35 @@ class UninstallCommand(SessionCommandMixin, Command): self.parser.insert_option_group(0, self.cmd_opts) def run(self, options, args): - with self._build_session(options) as session: - reqs_to_uninstall = {} - for name in args: - req = install_req_from_line( - name, isolated=options.isolated_mode, - ) + session = self.get_default_session(options) + + reqs_to_uninstall = {} + for name in args: + req = install_req_from_line( + name, isolated=options.isolated_mode, + ) + if req.name: + reqs_to_uninstall[canonicalize_name(req.name)] = req + for filename in options.requirements: + for req in parse_requirements( + filename, + options=options, + session=session): if req.name: reqs_to_uninstall[canonicalize_name(req.name)] = req - for filename in options.requirements: - for req in parse_requirements( - filename, - options=options, - session=session): - if req.name: - reqs_to_uninstall[canonicalize_name(req.name)] = req - if not reqs_to_uninstall: - raise InstallationError( - 'You must give at least one requirement to %(name)s (see ' - '"pip help %(name)s")' % dict(name=self.name) - ) - - protect_pip_from_modification_on_windows( - modifying_pip="pip" in reqs_to_uninstall + if not reqs_to_uninstall: + raise InstallationError( + 'You must give at least one requirement to %(name)s (see ' + '"pip help %(name)s")' % dict(name=self.name) ) - for req in reqs_to_uninstall.values(): - uninstall_pathset = req.uninstall( - auto_confirm=options.yes, verbose=self.verbosity > 0, - ) - if uninstall_pathset: - uninstall_pathset.commit() + protect_pip_from_modification_on_windows( + modifying_pip="pip" in reqs_to_uninstall + ) + + for req in reqs_to_uninstall.values(): + uninstall_pathset = req.uninstall( + auto_confirm=options.yes, verbose=self.verbosity > 0, + ) + if uninstall_pathset: + uninstall_pathset.commit() diff --git a/src/pip/_internal/commands/wheel.py b/src/pip/_internal/commands/wheel.py index 635dedca6..a9b9507fe 100644 --- a/src/pip/_internal/commands/wheel.py +++ b/src/pip/_internal/commands/wheel.py @@ -115,61 +115,62 @@ class WheelCommand(RequirementCommand): options.src_dir = os.path.abspath(options.src_dir) - with self._build_session(options) as session: - finder = self._build_package_finder(options, session) - build_delete = (not (options.no_clean or options.build_dir)) - wheel_cache = WheelCache(options.cache_dir, options.format_control) + session = self.get_default_session(options) - with RequirementTracker() as req_tracker, TempDirectory( - options.build_dir, delete=build_delete, kind="wheel" - ) as directory: + finder = self._build_package_finder(options, session) + build_delete = (not (options.no_clean or options.build_dir)) + wheel_cache = WheelCache(options.cache_dir, options.format_control) - requirement_set = RequirementSet( - require_hashes=options.require_hashes, + with RequirementTracker() as req_tracker, TempDirectory( + options.build_dir, delete=build_delete, kind="wheel" + ) as directory: + + requirement_set = RequirementSet( + require_hashes=options.require_hashes, + ) + + try: + self.populate_requirement_set( + requirement_set, args, options, finder, session, + wheel_cache ) - try: - self.populate_requirement_set( - requirement_set, args, options, finder, session, - wheel_cache - ) + preparer = self.make_requirement_preparer( + temp_directory=directory, + options=options, + req_tracker=req_tracker, + wheel_download_dir=options.wheel_dir, + ) - preparer = self.make_requirement_preparer( - temp_directory=directory, - options=options, - req_tracker=req_tracker, - wheel_download_dir=options.wheel_dir, - ) + resolver = self.make_resolver( + preparer=preparer, + finder=finder, + session=session, + options=options, + wheel_cache=wheel_cache, + ignore_requires_python=options.ignore_requires_python, + use_pep517=options.use_pep517, + ) + resolver.resolve(requirement_set) - resolver = self.make_resolver( - preparer=preparer, - finder=finder, - session=session, - options=options, - wheel_cache=wheel_cache, - ignore_requires_python=options.ignore_requires_python, - use_pep517=options.use_pep517, + # build wheels + wb = WheelBuilder( + preparer, wheel_cache, + build_options=options.build_options or [], + global_options=options.global_options or [], + no_clean=options.no_clean, + ) + build_failures = wb.build( + requirement_set.requirements.values(), + ) + if len(build_failures) != 0: + raise CommandError( + "Failed to build one or more wheels" ) - resolver.resolve(requirement_set) - - # build wheels - wb = WheelBuilder( - preparer, wheel_cache, - build_options=options.build_options or [], - global_options=options.global_options or [], - no_clean=options.no_clean, - ) - build_failures = wb.build( - requirement_set.requirements.values(), - ) - if len(build_failures) != 0: - raise CommandError( - "Failed to build one or more wheels" - ) - except PreviousBuildDirError: - options.no_clean = True - raise - finally: - if not options.no_clean: - requirement_set.cleanup_files() - wheel_cache.cleanup() + except PreviousBuildDirError: + options.no_clean = True + raise + finally: + if not options.no_clean: + requirement_set.cleanup_files() + wheel_cache.cleanup() diff --git a/tests/functional/test_search.py b/tests/functional/test_search.py index 70421de30..fce6c5f81 100644 --- a/tests/functional/test_search.py +++ b/tests/functional/test_search.py @@ -113,8 +113,9 @@ def test_run_method_should_return_success_when_find_packages(): """ command = create_command('search') cmdline = "--index=https://pypi.org/pypi pip" - options, args = command.parse_args(cmdline.split()) - status = command.run(options, args) + with command.main_context(): + options, args = command.parse_args(cmdline.split()) + status = command.run(options, args) assert status == SUCCESS @@ -125,8 +126,9 @@ def test_run_method_should_return_no_matches_found_when_does_not_find_pkgs(): """ command = create_command('search') cmdline = "--index=https://pypi.org/pypi nonexistentpackage" - options, args = command.parse_args(cmdline.split()) - status = command.run(options, args) + with command.main_context(): + options, args = command.parse_args(cmdline.split()) + status = command.run(options, args) assert status == NO_MATCHES_FOUND