Merge branch 'master' into mypy/infrastructure

This commit is contained in:
Pradyun S. Gedam 2017-08-03 03:26:59 +05:30
commit c365304f66
23 changed files with 1028 additions and 931 deletions

View File

@ -6,13 +6,11 @@ Installation
Do I need to install pip?
-------------------------
pip is already installed if you're using Python 2 >=2.7.9 or Python 3 >=3.4
binaries downloaded from `python.org <https://www.python.org>`_, but you'll
need to :ref:`upgrade pip <Upgrading pip>`.
Additionally, pip will already be installed if you're working in a :ref:`Virtual
Environment <pypug:Creating and using Virtual Environments>` created by
:ref:`pypug:virtualenv` or :ref:`pyvenv <pypug:venv>`.
pip is already installed if you are using Python 2 >=2.7.9 or Python 3 >=3.4
downloaded from `python.org <https://www.python.org>`_ or if you are working
in a :ref:`Virtual Environment <pypug:Creating and using Virtual Environments>`
created by :ref:`pypug:virtualenv` or :ref:`pyvenv <pypug:venv>`.
Just make sure to :ref:`upgrade pip <Upgrading pip>`.
.. _`get-pip`:
@ -21,25 +19,25 @@ Installing with get-pip.py
--------------------------
To install pip, securely download `get-pip.py
<https://bootstrap.pypa.io/get-pip.py>`_. [1]_
<https://bootstrap.pypa.io/get-pip.py>`_. [1]_::
Then run the following:
curl https://bootstrap.pypa.io/get-pip.py -o get-pip.py
::
Inspect ``get-pip.py`` for any malevolence. Then run the following::
python get-pip.py
.. warning::
Be cautious if you're using a Python install that's managed by your operating
system or another package manager. get-pip.py does not coordinate with
Be cautious if you are using a Python install that is managed by your operating
system or another package manager. ``get-pip.py`` does not coordinate with
those tools, and may leave your system in an inconsistent state.
get-pip.py will also install :ref:`pypug:setuptools` [2]_ and :ref:`pypug:wheel`,
if they're not already. :ref:`pypug:setuptools` is required to install
``get-pip.py`` also installs :ref:`pypug:setuptools` [2]_ and :ref:`pypug:wheel`
if they are not already. :ref:`pypug:setuptools` is required to install
:term:`source distributions <pypug:Source Distribution (or "sdist")>`. Both are
required to be able to build a :ref:`Wheel cache` (which improves installation
required in order to build a :ref:`Wheel cache` (which improves installation
speed), although neither are required to install pre-built :term:`wheels
<pypug:Wheel>`.
@ -55,14 +53,14 @@ get-pip.py options
.. option:: --no-setuptools
If set, don't attempt to install :ref:`pypug:setuptools`
If set, do not attempt to install :ref:`pypug:setuptools`
.. option:: --no-wheel
If set, don't attempt to install :ref:`pypug:wheel`
If set, do not attempt to install :ref:`pypug:wheel`
Additionally, ``get-pip.py`` supports using the :ref:`pip install options <pip
``get-pip.py`` allows :ref:`pip install options <pip
install Options>` and the :ref:`general options <General Options>`. Below are
some examples:
@ -91,16 +89,12 @@ the `Python Packaging User Guide
Upgrading pip
-------------
On Linux or macOS:
::
On Linux or macOS::
pip install -U pip
On Windows [4]_:
::
On Windows [4]_::
python -m pip install -U pip

View File

@ -1 +1 @@
Upgraded pkg_resources (via setuptools) to 35.0.2.
Upgraded pkg_resources (via setuptools) to 36.2.6.

View File

@ -37,6 +37,7 @@ import email.parser
import tempfile
import textwrap
import itertools
import inspect
from pkgutil import get_importer
try:
@ -67,6 +68,7 @@ try:
except ImportError:
importlib_machinery = None
from . import py31compat
from pip._vendor import appdirs
from pip._vendor import packaging
__import__('pip._vendor.packaging.version')
@ -74,6 +76,7 @@ __import__('pip._vendor.packaging.specifiers')
__import__('pip._vendor.packaging.requirements')
__import__('pip._vendor.packaging.markers')
if (3, 0) < sys.version_info < (3, 3):
raise RuntimeError("Python 3.3 or later is required")
@ -1550,7 +1553,7 @@ class EggProvider(NullProvider):
path = self.module_path
old = None
while path != old:
if _is_unpacked_egg(path):
if _is_egg_path(path):
self.egg_name = os.path.basename(path)
self.egg_info = os.path.join(path, 'EGG-INFO')
self.egg_root = path
@ -1953,7 +1956,7 @@ def find_eggs_in_zip(importer, path_item, only=False):
# don't yield nested distros
return
for subitem in metadata.resource_listdir('/'):
if _is_unpacked_egg(subitem):
if _is_egg_path(subitem):
subpath = os.path.join(path_item, subitem)
for dist in find_eggs_in_zip(zipimport.zipimporter(subpath), subpath):
yield dist
@ -2030,7 +2033,7 @@ def find_on_path(importer, path_item, only=False):
yield Distribution.from_location(
path_item, entry, metadata, precedence=DEVELOP_DIST
)
elif not only and _is_unpacked_egg(entry):
elif not only and _is_egg_path(entry):
dists = find_distributions(os.path.join(path_item, entry))
for dist in dists:
yield dist
@ -2218,12 +2221,22 @@ def _normalize_cached(filename, _cache={}):
return result
def _is_egg_path(path):
"""
Determine if given path appears to be an egg.
"""
return (
path.lower().endswith('.egg')
)
def _is_unpacked_egg(path):
"""
Determine if given path appears to be an unpacked egg.
"""
return (
path.lower().endswith('.egg')
_is_egg_path(path) and
os.path.isfile(os.path.join(path, 'EGG-INFO', 'PKG-INFO'))
)
@ -2937,6 +2950,7 @@ class Requirement(packaging.requirements.Requirement):
return req
<<<<<<< HEAD
def _get_mro(cls):
"""Get an mro for a type or classic class"""
if not isinstance(cls, type):
@ -2946,11 +2960,32 @@ def _get_mro(cls):
return new_cls.__mro__[1:]
return cls.__mro__
||||||| merged common ancestors
def _get_mro(cls):
"""Get an mro for a type or classic class"""
if not isinstance(cls, type):
class cls(cls, object):
pass
return cls.__mro__[1:]
return cls.__mro__
=======
def _always_object(classes):
"""
Ensure object appears in the mro even
for old-style classes.
"""
if object not in classes:
return classes + (object,)
return classes
>>>>>>> master
def _find_adapter(registry, ob):
"""Return an adapter factory for `ob` from `registry`"""
for t in _get_mro(getattr(ob, '__class__', type(ob))):
types = _always_object(inspect.getmro(getattr(ob, '__class__', type(ob))))
for t in types:
if t in registry:
return registry[t]
@ -2958,8 +2993,7 @@ def _find_adapter(registry, ob):
def ensure_directory(path):
"""Ensure that the parent directory of `path` exists"""
dirname = os.path.dirname(path)
if not os.path.isdir(dirname):
os.makedirs(dirname)
py31compat.makedirs(dirname, exist_ok=True)
def _bypass_ensure_directory(path):

View File

@ -0,0 +1,22 @@
import os
import errno
import sys
def _makedirs_31(path, exist_ok=False):
try:
os.makedirs(path)
except OSError as exc:
if not exist_ok or exc.errno != errno.EEXIST:
raise
# rely on compatibility behavior until mode considerations
# and exists_ok considerations are disentangled.
# See https://github.com/pypa/setuptools/pull/1083#issuecomment-315168663
needs_makedirs = (
sys.version_info < (3, 2, 5) or
(3, 3) <= sys.version_info < (3, 3, 6) or
(3, 4) <= sys.version_info < (3, 4, 1)
)
makedirs = _makedirs_31 if needs_makedirs else os.makedirs

View File

@ -1,3 +1,3 @@
from .core import TomlError
from .parser import load, loads
from .writer import dump, dumps
from .core import TomlError
from .parser import load, loads
from .writer import dump, dumps

View File

@ -1,13 +1,13 @@
class TomlError(RuntimeError):
def __init__(self, message, line, col, filename):
RuntimeError.__init__(self, message, line, col, filename)
self.message = message
self.line = line
self.col = col
self.filename = filename
def __str__(self):
return '{}({}, {}): {}'.format(self.filename, self.line, self.col, self.message)
def __repr__(self):
return 'TomlError({!r}, {!r}, {!r}, {!r})'.format(self.message, self.line, self.col, self.filename)
class TomlError(RuntimeError):
def __init__(self, message, line, col, filename):
RuntimeError.__init__(self, message, line, col, filename)
self.message = message
self.line = line
self.col = col
self.filename = filename
def __str__(self):
return '{}({}, {}): {}'.format(self.filename, self.line, self.col, self.message)
def __repr__(self):
return 'TomlError({!r}, {!r}, {!r}, {!r})'.format(self.message, self.line, self.col, self.filename)

View File

@ -1,366 +1,366 @@
import string, re, sys, datetime
from .core import TomlError
if sys.version_info[0] == 2:
_chr = unichr
else:
_chr = chr
def load(fin, translate=lambda t, x, v: v):
return loads(fin.read(), translate=translate, filename=getattr(fin, 'name', repr(fin)))
def loads(s, filename='<string>', translate=lambda t, x, v: v):
if isinstance(s, bytes):
s = s.decode('utf-8')
s = s.replace('\r\n', '\n')
root = {}
tables = {}
scope = root
src = _Source(s, filename=filename)
ast = _p_toml(src)
def error(msg):
raise TomlError(msg, pos[0], pos[1], filename)
def process_value(v):
kind, text, value, pos = v
if kind == 'str' and value.startswith('\n'):
value = value[1:]
if kind == 'array':
if value and any(k != value[0][0] for k, t, v, p in value[1:]):
error('array-type-mismatch')
value = [process_value(item) for item in value]
elif kind == 'table':
value = dict([(k, process_value(value[k])) for k in value])
return translate(kind, text, value)
for kind, value, pos in ast:
if kind == 'kv':
k, v = value
if k in scope:
error('duplicate_keys. Key "{0}" was used more than once.'.format(k))
scope[k] = process_value(v)
else:
is_table_array = (kind == 'table_array')
cur = tables
for name in value[:-1]:
if isinstance(cur.get(name), list):
d, cur = cur[name][-1]
else:
d, cur = cur.setdefault(name, (None, {}))
scope = {}
name = value[-1]
if name not in cur:
if is_table_array:
cur[name] = [(scope, {})]
else:
cur[name] = (scope, {})
elif isinstance(cur[name], list):
if not is_table_array:
error('table_type_mismatch')
cur[name].append((scope, {}))
else:
if is_table_array:
error('table_type_mismatch')
old_scope, next_table = cur[name]
if old_scope is not None:
error('duplicate_tables')
cur[name] = (scope, next_table)
def merge_tables(scope, tables):
if scope is None:
scope = {}
for k in tables:
if k in scope:
error('key_table_conflict')
v = tables[k]
if isinstance(v, list):
scope[k] = [merge_tables(sc, tbl) for sc, tbl in v]
else:
scope[k] = merge_tables(v[0], v[1])
return scope
return merge_tables(root, tables)
class _Source:
def __init__(self, s, filename=None):
self.s = s
self._pos = (1, 1)
self._last = None
self._filename = filename
self.backtrack_stack = []
def last(self):
return self._last
def pos(self):
return self._pos
def fail(self):
return self._expect(None)
def consume_dot(self):
if self.s:
self._last = self.s[0]
self.s = self[1:]
self._advance(self._last)
return self._last
return None
def expect_dot(self):
return self._expect(self.consume_dot())
def consume_eof(self):
if not self.s:
self._last = ''
return True
return False
def expect_eof(self):
return self._expect(self.consume_eof())
def consume(self, s):
if self.s.startswith(s):
self.s = self.s[len(s):]
self._last = s
self._advance(s)
return True
return False
def expect(self, s):
return self._expect(self.consume(s))
def consume_re(self, re):
m = re.match(self.s)
if m:
self.s = self.s[len(m.group(0)):]
self._last = m
self._advance(m.group(0))
return m
return None
def expect_re(self, re):
return self._expect(self.consume_re(re))
def __enter__(self):
self.backtrack_stack.append((self.s, self._pos))
def __exit__(self, type, value, traceback):
if type is None:
self.backtrack_stack.pop()
else:
self.s, self._pos = self.backtrack_stack.pop()
return type == TomlError
def commit(self):
self.backtrack_stack[-1] = (self.s, self._pos)
def _expect(self, r):
if not r:
raise TomlError('msg', self._pos[0], self._pos[1], self._filename)
return r
def _advance(self, s):
suffix_pos = s.rfind('\n')
if suffix_pos == -1:
self._pos = (self._pos[0], self._pos[1] + len(s))
else:
self._pos = (self._pos[0] + s.count('\n'), len(s) - suffix_pos)
_ews_re = re.compile(r'(?:[ \t]|#[^\n]*\n|#[^\n]*\Z|\n)*')
def _p_ews(s):
s.expect_re(_ews_re)
_ws_re = re.compile(r'[ \t]*')
def _p_ws(s):
s.expect_re(_ws_re)
_escapes = { 'b': '\b', 'n': '\n', 'r': '\r', 't': '\t', '"': '"', '\'': '\'',
'\\': '\\', '/': '/', 'f': '\f' }
_basicstr_re = re.compile(r'[^"\\\000-\037]*')
_short_uni_re = re.compile(r'u([0-9a-fA-F]{4})')
_long_uni_re = re.compile(r'U([0-9a-fA-F]{8})')
_escapes_re = re.compile('[bnrt"\'\\\\/f]')
_newline_esc_re = re.compile('\n[ \t\n]*')
def _p_basicstr_content(s, content=_basicstr_re):
res = []
while True:
res.append(s.expect_re(content).group(0))
if not s.consume('\\'):
break
if s.consume_re(_newline_esc_re):
pass
elif s.consume_re(_short_uni_re) or s.consume_re(_long_uni_re):
res.append(_chr(int(s.last().group(1), 16)))
else:
s.expect_re(_escapes_re)
res.append(_escapes[s.last().group(0)])
return ''.join(res)
_key_re = re.compile(r'[0-9a-zA-Z-_]+')
def _p_key(s):
with s:
s.expect('"')
r = _p_basicstr_content(s, _basicstr_re)
s.expect('"')
return r
return s.expect_re(_key_re).group(0)
_float_re = re.compile(r'[+-]?(?:0|[1-9](?:_?\d)*)(?:\.\d(?:_?\d)*)?(?:[eE][+-]?(?:\d(?:_?\d)*))?')
_datetime_re = re.compile(r'(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2})(\.\d+)?(?:Z|([+-]\d{2}):(\d{2}))')
_basicstr_ml_re = re.compile(r'(?:(?:|"|"")[^"\\\000-\011\013-\037])*')
_litstr_re = re.compile(r"[^'\000-\037]*")
_litstr_ml_re = re.compile(r"(?:(?:|'|'')(?:[^'\000-\011\013-\037]))*")
def _p_value(s):
pos = s.pos()
if s.consume('true'):
return 'bool', s.last(), True, pos
if s.consume('false'):
return 'bool', s.last(), False, pos
if s.consume('"'):
if s.consume('""'):
r = _p_basicstr_content(s, _basicstr_ml_re)
s.expect('"""')
else:
r = _p_basicstr_content(s, _basicstr_re)
s.expect('"')
return 'str', r, r, pos
if s.consume('\''):
if s.consume('\'\''):
r = s.expect_re(_litstr_ml_re).group(0)
s.expect('\'\'\'')
else:
r = s.expect_re(_litstr_re).group(0)
s.expect('\'')
return 'str', r, r, pos
if s.consume_re(_datetime_re):
m = s.last()
s0 = m.group(0)
r = map(int, m.groups()[:6])
if m.group(7):
micro = float(m.group(7))
else:
micro = 0
if m.group(8):
g = int(m.group(8), 10) * 60 + int(m.group(9), 10)
tz = _TimeZone(datetime.timedelta(0, g * 60))
else:
tz = _TimeZone(datetime.timedelta(0, 0))
y, m, d, H, M, S = r
dt = datetime.datetime(y, m, d, H, M, S, int(micro * 1000000), tz)
return 'datetime', s0, dt, pos
if s.consume_re(_float_re):
m = s.last().group(0)
r = m.replace('_','')
if '.' in m or 'e' in m or 'E' in m:
return 'float', m, float(r), pos
else:
return 'int', m, int(r, 10), pos
if s.consume('['):
items = []
with s:
while True:
_p_ews(s)
items.append(_p_value(s))
s.commit()
_p_ews(s)
s.expect(',')
s.commit()
_p_ews(s)
s.expect(']')
return 'array', None, items, pos
if s.consume('{'):
_p_ws(s)
items = {}
if not s.consume('}'):
k = _p_key(s)
_p_ws(s)
s.expect('=')
_p_ws(s)
items[k] = _p_value(s)
_p_ws(s)
while s.consume(','):
_p_ws(s)
k = _p_key(s)
_p_ws(s)
s.expect('=')
_p_ws(s)
items[k] = _p_value(s)
_p_ws(s)
s.expect('}')
return 'table', None, items, pos
s.fail()
def _p_stmt(s):
pos = s.pos()
if s.consume( '['):
is_array = s.consume('[')
_p_ws(s)
keys = [_p_key(s)]
_p_ws(s)
while s.consume('.'):
_p_ws(s)
keys.append(_p_key(s))
_p_ws(s)
s.expect(']')
if is_array:
s.expect(']')
return 'table_array' if is_array else 'table', keys, pos
key = _p_key(s)
_p_ws(s)
s.expect('=')
_p_ws(s)
value = _p_value(s)
return 'kv', (key, value), pos
_stmtsep_re = re.compile(r'(?:[ \t]*(?:#[^\n]*)?\n)+[ \t]*')
def _p_toml(s):
stmts = []
_p_ews(s)
with s:
stmts.append(_p_stmt(s))
while True:
s.commit()
s.expect_re(_stmtsep_re)
stmts.append(_p_stmt(s))
_p_ews(s)
s.expect_eof()
return stmts
class _TimeZone(datetime.tzinfo):
def __init__(self, offset):
self._offset = offset
def utcoffset(self, dt):
return self._offset
def dst(self, dt):
return None
def tzname(self, dt):
m = self._offset.total_seconds() // 60
if m < 0:
res = '-'
m = -m
else:
res = '+'
h = m // 60
m = m - h * 60
return '{}{:.02}{:.02}'.format(res, h, m)
import string, re, sys, datetime
from .core import TomlError
if sys.version_info[0] == 2:
_chr = unichr
else:
_chr = chr
def load(fin, translate=lambda t, x, v: v):
return loads(fin.read(), translate=translate, filename=getattr(fin, 'name', repr(fin)))
def loads(s, filename='<string>', translate=lambda t, x, v: v):
if isinstance(s, bytes):
s = s.decode('utf-8')
s = s.replace('\r\n', '\n')
root = {}
tables = {}
scope = root
src = _Source(s, filename=filename)
ast = _p_toml(src)
def error(msg):
raise TomlError(msg, pos[0], pos[1], filename)
def process_value(v):
kind, text, value, pos = v
if kind == 'str' and value.startswith('\n'):
value = value[1:]
if kind == 'array':
if value and any(k != value[0][0] for k, t, v, p in value[1:]):
error('array-type-mismatch')
value = [process_value(item) for item in value]
elif kind == 'table':
value = dict([(k, process_value(value[k])) for k in value])
return translate(kind, text, value)
for kind, value, pos in ast:
if kind == 'kv':
k, v = value
if k in scope:
error('duplicate_keys. Key "{0}" was used more than once.'.format(k))
scope[k] = process_value(v)
else:
is_table_array = (kind == 'table_array')
cur = tables
for name in value[:-1]:
if isinstance(cur.get(name), list):
d, cur = cur[name][-1]
else:
d, cur = cur.setdefault(name, (None, {}))
scope = {}
name = value[-1]
if name not in cur:
if is_table_array:
cur[name] = [(scope, {})]
else:
cur[name] = (scope, {})
elif isinstance(cur[name], list):
if not is_table_array:
error('table_type_mismatch')
cur[name].append((scope, {}))
else:
if is_table_array:
error('table_type_mismatch')
old_scope, next_table = cur[name]
if old_scope is not None:
error('duplicate_tables')
cur[name] = (scope, next_table)
def merge_tables(scope, tables):
if scope is None:
scope = {}
for k in tables:
if k in scope:
error('key_table_conflict')
v = tables[k]
if isinstance(v, list):
scope[k] = [merge_tables(sc, tbl) for sc, tbl in v]
else:
scope[k] = merge_tables(v[0], v[1])
return scope
return merge_tables(root, tables)
class _Source:
def __init__(self, s, filename=None):
self.s = s
self._pos = (1, 1)
self._last = None
self._filename = filename
self.backtrack_stack = []
def last(self):
return self._last
def pos(self):
return self._pos
def fail(self):
return self._expect(None)
def consume_dot(self):
if self.s:
self._last = self.s[0]
self.s = self[1:]
self._advance(self._last)
return self._last
return None
def expect_dot(self):
return self._expect(self.consume_dot())
def consume_eof(self):
if not self.s:
self._last = ''
return True
return False
def expect_eof(self):
return self._expect(self.consume_eof())
def consume(self, s):
if self.s.startswith(s):
self.s = self.s[len(s):]
self._last = s
self._advance(s)
return True
return False
def expect(self, s):
return self._expect(self.consume(s))
def consume_re(self, re):
m = re.match(self.s)
if m:
self.s = self.s[len(m.group(0)):]
self._last = m
self._advance(m.group(0))
return m
return None
def expect_re(self, re):
return self._expect(self.consume_re(re))
def __enter__(self):
self.backtrack_stack.append((self.s, self._pos))
def __exit__(self, type, value, traceback):
if type is None:
self.backtrack_stack.pop()
else:
self.s, self._pos = self.backtrack_stack.pop()
return type == TomlError
def commit(self):
self.backtrack_stack[-1] = (self.s, self._pos)
def _expect(self, r):
if not r:
raise TomlError('msg', self._pos[0], self._pos[1], self._filename)
return r
def _advance(self, s):
suffix_pos = s.rfind('\n')
if suffix_pos == -1:
self._pos = (self._pos[0], self._pos[1] + len(s))
else:
self._pos = (self._pos[0] + s.count('\n'), len(s) - suffix_pos)
_ews_re = re.compile(r'(?:[ \t]|#[^\n]*\n|#[^\n]*\Z|\n)*')
def _p_ews(s):
s.expect_re(_ews_re)
_ws_re = re.compile(r'[ \t]*')
def _p_ws(s):
s.expect_re(_ws_re)
_escapes = { 'b': '\b', 'n': '\n', 'r': '\r', 't': '\t', '"': '"', '\'': '\'',
'\\': '\\', '/': '/', 'f': '\f' }
_basicstr_re = re.compile(r'[^"\\\000-\037]*')
_short_uni_re = re.compile(r'u([0-9a-fA-F]{4})')
_long_uni_re = re.compile(r'U([0-9a-fA-F]{8})')
_escapes_re = re.compile('[bnrt"\'\\\\/f]')
_newline_esc_re = re.compile('\n[ \t\n]*')
def _p_basicstr_content(s, content=_basicstr_re):
res = []
while True:
res.append(s.expect_re(content).group(0))
if not s.consume('\\'):
break
if s.consume_re(_newline_esc_re):
pass
elif s.consume_re(_short_uni_re) or s.consume_re(_long_uni_re):
res.append(_chr(int(s.last().group(1), 16)))
else:
s.expect_re(_escapes_re)
res.append(_escapes[s.last().group(0)])
return ''.join(res)
_key_re = re.compile(r'[0-9a-zA-Z-_]+')
def _p_key(s):
with s:
s.expect('"')
r = _p_basicstr_content(s, _basicstr_re)
s.expect('"')
return r
return s.expect_re(_key_re).group(0)
_float_re = re.compile(r'[+-]?(?:0|[1-9](?:_?\d)*)(?:\.\d(?:_?\d)*)?(?:[eE][+-]?(?:\d(?:_?\d)*))?')
_datetime_re = re.compile(r'(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2})(\.\d+)?(?:Z|([+-]\d{2}):(\d{2}))')
_basicstr_ml_re = re.compile(r'(?:(?:|"|"")[^"\\\000-\011\013-\037])*')
_litstr_re = re.compile(r"[^'\000-\037]*")
_litstr_ml_re = re.compile(r"(?:(?:|'|'')(?:[^'\000-\011\013-\037]))*")
def _p_value(s):
pos = s.pos()
if s.consume('true'):
return 'bool', s.last(), True, pos
if s.consume('false'):
return 'bool', s.last(), False, pos
if s.consume('"'):
if s.consume('""'):
r = _p_basicstr_content(s, _basicstr_ml_re)
s.expect('"""')
else:
r = _p_basicstr_content(s, _basicstr_re)
s.expect('"')
return 'str', r, r, pos
if s.consume('\''):
if s.consume('\'\''):
r = s.expect_re(_litstr_ml_re).group(0)
s.expect('\'\'\'')
else:
r = s.expect_re(_litstr_re).group(0)
s.expect('\'')
return 'str', r, r, pos
if s.consume_re(_datetime_re):
m = s.last()
s0 = m.group(0)
r = map(int, m.groups()[:6])
if m.group(7):
micro = float(m.group(7))
else:
micro = 0
if m.group(8):
g = int(m.group(8), 10) * 60 + int(m.group(9), 10)
tz = _TimeZone(datetime.timedelta(0, g * 60))
else:
tz = _TimeZone(datetime.timedelta(0, 0))
y, m, d, H, M, S = r
dt = datetime.datetime(y, m, d, H, M, S, int(micro * 1000000), tz)
return 'datetime', s0, dt, pos
if s.consume_re(_float_re):
m = s.last().group(0)
r = m.replace('_','')
if '.' in m or 'e' in m or 'E' in m:
return 'float', m, float(r), pos
else:
return 'int', m, int(r, 10), pos
if s.consume('['):
items = []
with s:
while True:
_p_ews(s)
items.append(_p_value(s))
s.commit()
_p_ews(s)
s.expect(',')
s.commit()
_p_ews(s)
s.expect(']')
return 'array', None, items, pos
if s.consume('{'):
_p_ws(s)
items = {}
if not s.consume('}'):
k = _p_key(s)
_p_ws(s)
s.expect('=')
_p_ws(s)
items[k] = _p_value(s)
_p_ws(s)
while s.consume(','):
_p_ws(s)
k = _p_key(s)
_p_ws(s)
s.expect('=')
_p_ws(s)
items[k] = _p_value(s)
_p_ws(s)
s.expect('}')
return 'table', None, items, pos
s.fail()
def _p_stmt(s):
pos = s.pos()
if s.consume( '['):
is_array = s.consume('[')
_p_ws(s)
keys = [_p_key(s)]
_p_ws(s)
while s.consume('.'):
_p_ws(s)
keys.append(_p_key(s))
_p_ws(s)
s.expect(']')
if is_array:
s.expect(']')
return 'table_array' if is_array else 'table', keys, pos
key = _p_key(s)
_p_ws(s)
s.expect('=')
_p_ws(s)
value = _p_value(s)
return 'kv', (key, value), pos
_stmtsep_re = re.compile(r'(?:[ \t]*(?:#[^\n]*)?\n)+[ \t]*')
def _p_toml(s):
stmts = []
_p_ews(s)
with s:
stmts.append(_p_stmt(s))
while True:
s.commit()
s.expect_re(_stmtsep_re)
stmts.append(_p_stmt(s))
_p_ews(s)
s.expect_eof()
return stmts
class _TimeZone(datetime.tzinfo):
def __init__(self, offset):
self._offset = offset
def utcoffset(self, dt):
return self._offset
def dst(self, dt):
return None
def tzname(self, dt):
m = self._offset.total_seconds() // 60
if m < 0:
res = '-'
m = -m
else:
res = '+'
h = m // 60
m = m - h * 60
return '{}{:.02}{:.02}'.format(res, h, m)

View File

@ -1,121 +1,121 @@
from __future__ import unicode_literals
import io, datetime, sys
if sys.version_info[0] == 3:
long = int
unicode = str
def dumps(obj, sort_keys=False):
fout = io.StringIO()
dump(obj, fout, sort_keys=sort_keys)
return fout.getvalue()
_escapes = {'\n': 'n', '\r': 'r', '\\': '\\', '\t': 't', '\b': 'b', '\f': 'f', '"': '"'}
def _escape_string(s):
res = []
start = 0
def flush():
if start != i:
res.append(s[start:i])
return i + 1
i = 0
while i < len(s):
c = s[i]
if c in '"\\\n\r\t\b\f':
start = flush()
res.append('\\' + _escapes[c])
elif ord(c) < 0x20:
start = flush()
res.append('\\u%04x' % ord(c))
i += 1
flush()
return '"' + ''.join(res) + '"'
def _escape_id(s):
if any(not c.isalnum() and c not in '-_' for c in s):
return _escape_string(s)
return s
def _format_list(v):
return '[{0}]'.format(', '.join(_format_value(obj) for obj in v))
# Formula from:
# https://docs.python.org/2/library/datetime.html#datetime.timedelta.total_seconds
# Once support for py26 is dropped, this can be replaced by td.total_seconds()
def _total_seconds(td):
return ((td.microseconds
+ (td.seconds + td.days * 24 * 3600) * 10**6) / 10.0**6)
def _format_value(v):
if isinstance(v, bool):
return 'true' if v else 'false'
if isinstance(v, int) or isinstance(v, long):
return unicode(v)
if isinstance(v, float):
return repr(v)
elif isinstance(v, unicode) or isinstance(v, bytes):
return _escape_string(v)
elif isinstance(v, datetime.datetime):
offs = v.utcoffset()
offs = _total_seconds(offs) // 60 if offs is not None else 0
if offs == 0:
suffix = 'Z'
else:
if offs > 0:
suffix = '+'
else:
suffix = '-'
offs = -offs
suffix = '{0}{1:.02}{2:.02}'.format(suffix, offs // 60, offs % 60)
if v.microsecond:
return v.strftime('%Y-%m-%dT%H:%M:%S.%f') + suffix
else:
return v.strftime('%Y-%m-%dT%H:%M:%S') + suffix
elif isinstance(v, list):
return _format_list(v)
else:
raise RuntimeError(v)
def dump(obj, fout, sort_keys=False):
tables = [((), obj, False)]
while tables:
name, table, is_array = tables.pop()
if name:
section_name = '.'.join(_escape_id(c) for c in name)
if is_array:
fout.write('[[{0}]]\n'.format(section_name))
else:
fout.write('[{0}]\n'.format(section_name))
table_keys = sorted(table.keys()) if sort_keys else table.keys()
new_tables = []
for k in table_keys:
v = table[k]
if isinstance(v, dict):
new_tables.append((name + (k,), v, False))
elif isinstance(v, list) and v and all(isinstance(o, dict) for o in v):
new_tables.extend((name + (k,), d, True) for d in v)
elif v is None:
# based on mojombo's comment: https://github.com/toml-lang/toml/issues/146#issuecomment-25019344
fout.write(
'#{} = null # To use: uncomment and replace null with value\n'.format(_escape_id(k)))
else:
fout.write('{0} = {1}\n'.format(_escape_id(k), _format_value(v)))
tables.extend(reversed(new_tables))
if tables:
fout.write('\n')
from __future__ import unicode_literals
import io, datetime, sys
if sys.version_info[0] == 3:
long = int
unicode = str
def dumps(obj, sort_keys=False):
fout = io.StringIO()
dump(obj, fout, sort_keys=sort_keys)
return fout.getvalue()
_escapes = {'\n': 'n', '\r': 'r', '\\': '\\', '\t': 't', '\b': 'b', '\f': 'f', '"': '"'}
def _escape_string(s):
res = []
start = 0
def flush():
if start != i:
res.append(s[start:i])
return i + 1
i = 0
while i < len(s):
c = s[i]
if c in '"\\\n\r\t\b\f':
start = flush()
res.append('\\' + _escapes[c])
elif ord(c) < 0x20:
start = flush()
res.append('\\u%04x' % ord(c))
i += 1
flush()
return '"' + ''.join(res) + '"'
def _escape_id(s):
if any(not c.isalnum() and c not in '-_' for c in s):
return _escape_string(s)
return s
def _format_list(v):
return '[{0}]'.format(', '.join(_format_value(obj) for obj in v))
# Formula from:
# https://docs.python.org/2/library/datetime.html#datetime.timedelta.total_seconds
# Once support for py26 is dropped, this can be replaced by td.total_seconds()
def _total_seconds(td):
return ((td.microseconds
+ (td.seconds + td.days * 24 * 3600) * 10**6) / 10.0**6)
def _format_value(v):
if isinstance(v, bool):
return 'true' if v else 'false'
if isinstance(v, int) or isinstance(v, long):
return unicode(v)
if isinstance(v, float):
return repr(v)
elif isinstance(v, unicode) or isinstance(v, bytes):
return _escape_string(v)
elif isinstance(v, datetime.datetime):
offs = v.utcoffset()
offs = _total_seconds(offs) // 60 if offs is not None else 0
if offs == 0:
suffix = 'Z'
else:
if offs > 0:
suffix = '+'
else:
suffix = '-'
offs = -offs
suffix = '{0}{1:.02}{2:.02}'.format(suffix, offs // 60, offs % 60)
if v.microsecond:
return v.strftime('%Y-%m-%dT%H:%M:%S.%f') + suffix
else:
return v.strftime('%Y-%m-%dT%H:%M:%S') + suffix
elif isinstance(v, list):
return _format_list(v)
else:
raise RuntimeError(v)
def dump(obj, fout, sort_keys=False):
tables = [((), obj, False)]
while tables:
name, table, is_array = tables.pop()
if name:
section_name = '.'.join(_escape_id(c) for c in name)
if is_array:
fout.write('[[{0}]]\n'.format(section_name))
else:
fout.write('[{0}]\n'.format(section_name))
table_keys = sorted(table.keys()) if sort_keys else table.keys()
new_tables = []
for k in table_keys:
v = table[k]
if isinstance(v, dict):
new_tables.append((name + (k,), v, False))
elif isinstance(v, list) and v and all(isinstance(o, dict) for o in v):
new_tables.extend((name + (k,), d, True) for d in v)
elif v is None:
# based on mojombo's comment: https://github.com/toml-lang/toml/issues/146#issuecomment-25019344
fout.write(
'#{} = null # To use: uncomment and replace null with value\n'.format(_escape_id(k)))
else:
fout.write('{0} = {1}\n'.format(_escape_id(k), _format_value(v)))
tables.extend(reversed(new_tables))
if tables:
fout.write('\n')

View File

@ -14,5 +14,5 @@ pyparsing==2.2.0
pytoml==0.1.12
retrying==1.3.3
requests==2.14.2
setuptools==35.0.2
setuptools==36.2.6
webencodings==0.5.1

View File

@ -2,6 +2,7 @@
"""
import errno
import hashlib
import logging
import os
@ -10,52 +11,130 @@ from pip._vendor.packaging.utils import canonicalize_name
import pip.index
from pip.compat import expanduser
from pip.download import path_to_url
from pip.utils.cache import get_cache_path_for_link
from pip.wheel import InvalidWheelFilename, Wheel
logger = logging.getLogger(__name__)
class WheelCache(object):
"""A cache of wheels for future installs."""
class Cache(object):
"""An abstract class - provides cache directories for data from links
def __init__(self, cache_dir, format_control):
"""Create a wheel cache.
:param cache_dir: The root of the cache.
:param format_control: A pip.index.FormatControl object to limit
binaries being read from the cache.
:param allowed_formats: which formats of files the cache should store.
('binary' and 'source' are the only allowed values)
"""
def __init__(self, cache_dir, format_control, allowed_formats):
super(Cache, self).__init__()
self.cache_dir = expanduser(cache_dir) if cache_dir else None
self.format_control = format_control
self.allowed_formats = allowed_formats
_valid_formats = {"source", "binary"}
assert self.allowed_formats.union(_valid_formats) == _valid_formats
def _get_cache_path_parts(self, link):
"""Get parts of part that must be os.path.joined with cache_dir
"""
self._cache_dir = expanduser(cache_dir) if cache_dir else None
self._format_control = format_control
def cached_wheel(self, link, package_name):
not_cached = (
not self._cache_dir or
not link or
link.is_wheel or
not link.is_artifact or
not package_name
# We want to generate an url to use as our cache key, we don't want to
# just re-use the URL because it might have other items in the fragment
# and we don't care about those.
key_parts = [link.url_without_fragment]
if link.hash_name is not None and link.hash is not None:
key_parts.append("=".join([link.hash_name, link.hash]))
key_url = "#".join(key_parts)
# Encode our key url with sha224, we'll use this because it has similar
# security properties to sha256, but with a shorter total output (and
# thus less secure). However the differences don't make a lot of
# difference for our use case here.
hashed = hashlib.sha224(key_url.encode()).hexdigest()
# We want to nest the directories some to prevent having a ton of top
# level directories where we might run out of sub directories on some
# FS.
parts = [hashed[:2], hashed[2:4], hashed[4:6], hashed[6:]]
return parts
def _get_candidates(self, link, package_name):
can_not_cache = (
not self.cache_dir or
not package_name or
not link
)
if not_cached:
return link
if can_not_cache:
return []
canonical_name = canonicalize_name(package_name)
formats = pip.index.fmt_ctl_formats(
self._format_control, canonical_name
self.format_control, canonical_name
)
if "binary" not in formats:
return link
root = get_cache_path_for_link(self._cache_dir, link)
if not self.allowed_formats.intersection(formats):
return []
root = self.get_path_for_link(link)
try:
wheel_names = os.listdir(root)
return os.listdir(root)
except OSError as err:
if err.errno in {errno.ENOENT, errno.ENOTDIR}:
return link
return []
raise
def get_path_for_link(self, link):
"""Return a directory to store cached items in for link.
"""
raise NotImplementedError()
def get(self, link, package_name):
"""Returns a link to a cached item if it exists, otherwise returns the
passed link.
"""
raise NotImplementedError()
def _link_for_candidate(self, link, candidate):
root = self.get_path_for_link(link)
path = os.path.join(root, candidate)
return pip.index.Link(path_to_url(path))
class WheelCache(Cache):
"""A cache of wheels for future installs.
"""
def __init__(self, cache_dir, format_control):
super(WheelCache, self).__init__(cache_dir, format_control, {"binary"})
def get_path_for_link(self, link):
"""Return a directory to store cached wheels for link
Because there are M wheels for any one sdist, we provide a directory
to cache them in, and then consult that directory when looking up
cache hits.
We only insert things into the cache if they have plausible version
numbers, so that we don't contaminate the cache with things that were
not unique. E.g. ./package might have dozens of installs done for it
and build a version of 0.0...and if we built and cached a wheel, we'd
end up using the same wheel even if the source has been edited.
:param link: The link of the sdist for which this will cache wheels.
"""
parts = self._get_cache_path_parts(link)
# Inside of the base location for cached wheels, expand our parts and
# join them all together.
return os.path.join(self.cache_dir, "wheels", *parts)
def get(self, link, package_name):
candidates = []
for wheel_name in wheel_names:
for wheel_name in self._get_candidates(link, package_name):
try:
wheel = Wheel(wheel_name)
except InvalidWheelFilename:
@ -64,8 +143,8 @@ class WheelCache(object):
# Built for a different python/arch/etc
continue
candidates.append((wheel.support_index_min(), wheel_name))
if not candidates:
return link
candidates.sort()
path = os.path.join(root, candidates[0][1])
return pip.index.Link(path_to_url(path))
return self._link_for_candidate(link, min(candidates)[1])

View File

@ -149,195 +149,219 @@ class RequirementPreparer(object):
return False
def prepare_requirement(self, req, resolver):
# TODO: Breakup into smaller functions
# TODO: Add a nice docstring
"""Prepare a requirement for installation
Returns an AbstractDist that can be used to install the package
"""
# TODO: Remove circular dependency on resolver
assert resolver.require_hashes is not None, (
"require_hashes should have been set in Resolver.resolve()"
)
if req.editable:
logger.info('Obtaining %s', req)
return self._prepare_editable_requirement(req, resolver)
# satisfied_by is only evaluated by calling _check_skip_installed,
# so it must be None here.
assert req.satisfied_by is None
if not resolver.ignore_installed:
skip_reason = resolver._check_skip_installed(req)
if req.satisfied_by:
return self._prepare_installed_requirement(
req, resolver, skip_reason
)
return self._prepare_linked_requirement(req, resolver)
def _prepare_linked_requirement(self, req, resolver):
"""Prepare a requirement that would be obtained from req.link
"""
# TODO: Breakup into smaller functions
if req.link and req.link.scheme == 'file':
path = url_to_path(req.link.url)
logger.info('Processing %s', display_path(path))
else:
# satisfied_by is only evaluated by calling _check_skip_installed,
# so it must be None here.
assert req.satisfied_by is None
if not resolver.ignore_installed:
skip_reason = resolver._check_skip_installed(req)
if req.satisfied_by:
assert skip_reason is not None, (
'_check_skip_installed returned None but '
'req.satisfied_by is set to %r'
% (req.satisfied_by,))
logger.info(
'Requirement %s: %s (%s)', skip_reason,
req,
req.satisfied_by.version)
else:
if (req.link and
req.link.scheme == 'file'):
path = url_to_path(req.link.url)
logger.info('Processing %s', display_path(path))
else:
logger.info('Collecting %s', req)
assert resolver.require_hashes is not None, \
"This should have been set in resolve()"
logger.info('Collecting %s', req)
with indent_log():
# ################################ #
# # vcs update or unpack archive # #
# ################################ #
if req.editable:
if resolver.require_hashes:
raise InstallationError(
'The editable requirement %s cannot be installed when '
'requiring hashes, because there is no single file to '
'hash.' % req)
req.ensure_has_source_dir(self.src_dir)
req.update_editable(not self._download_should_save)
abstract_dist = make_abstract_dist(req)
abstract_dist.prep_for_dist()
if self._download_should_save:
req.archive(self.download_dir)
req.check_if_exists()
elif req.satisfied_by:
if resolver.require_hashes:
logger.debug(
'Since it is already installed, we are trusting this '
'package without checking its hash. To ensure a '
'completely repeatable environment, install into an '
'empty virtualenv.')
abstract_dist = Installed(req)
else:
# @@ if filesystem packages are not marked
# editable in a req, a non deterministic error
# occurs when the script attempts to unpack the
# build directory
req.ensure_has_source_dir(self.build_dir)
# If a checkout exists, it's unwise to keep going. version
# inconsistencies are logged later, but do not fail the
# installation.
# FIXME: this won't upgrade when there's an existing
# package unpacked in `req.source_dir`
# package unpacked in `req.source_dir`
if os.path.exists(
os.path.join(req.source_dir, 'setup.py')):
raise PreviousBuildDirError(
"pip can't proceed with requirements '%s' due to a"
" pre-existing build directory (%s). This is "
"likely due to a previous installation that failed"
". pip is being responsible and not assuming it "
"can delete this. Please delete it and try again."
% (req, req.source_dir)
)
req.populate_link(
resolver.finder,
resolver._is_upgrade_allowed(req),
resolver.require_hashes
# @@ if filesystem packages are not marked
# editable in a req, a non deterministic error
# occurs when the script attempts to unpack the
# build directory
req.ensure_has_source_dir(self.build_dir)
# If a checkout exists, it's unwise to keep going. version
# inconsistencies are logged later, but do not fail the
# installation.
# FIXME: this won't upgrade when there's an existing
# package unpacked in `req.source_dir`
# package unpacked in `req.source_dir`
if os.path.exists(os.path.join(req.source_dir, 'setup.py')):
raise PreviousBuildDirError(
"pip can't proceed with requirements '%s' due to a"
" pre-existing build directory (%s). This is "
"likely due to a previous installation that failed"
". pip is being responsible and not assuming it "
"can delete this. Please delete it and try again."
% (req, req.source_dir)
)
# We can't hit this spot and have populate_link return None.
# req.satisfied_by is None here (because we're
# guarded) and upgrade has no impact except when satisfied_by
# is not None.
# Then inside find_requirement existing_applicable -> False
# If no new versions are found, DistributionNotFound is raised,
# otherwise a result is guaranteed.
assert req.link
link = req.link
req.populate_link(
resolver.finder,
resolver._is_upgrade_allowed(req),
resolver.require_hashes
)
# We can't hit this spot and have populate_link return None.
# req.satisfied_by is None here (because we're
# guarded) and upgrade has no impact except when satisfied_by
# is not None.
# Then inside find_requirement existing_applicable -> False
# If no new versions are found, DistributionNotFound is raised,
# otherwise a result is guaranteed.
assert req.link
link = req.link
# Now that we have the real link, we can tell what kind of
# requirements we have and raise some more informative errors
# than otherwise. (For example, we can raise VcsHashUnsupported
# for a VCS URL rather than HashMissing.)
if resolver.require_hashes:
# We could check these first 2 conditions inside
# unpack_url and save repetition of conditions, but then
# we would report less-useful error messages for
# unhashable requirements, complaining that there's no
# hash provided.
if is_vcs_url(link):
raise VcsHashUnsupported()
elif is_file_url(link) and is_dir_url(link):
raise DirectoryUrlHashUnsupported()
if (not req.original_link and
not req.is_pinned):
# Unpinned packages are asking for trouble when a new
# version is uploaded. This isn't a security check, but
# it saves users a surprising hash mismatch in the
# future.
#
# file:/// URLs aren't pinnable, so don't complain
# about them not being pinned.
raise HashUnpinned()
hashes = req.hashes(
trust_internet=not resolver.require_hashes)
if resolver.require_hashes and not hashes:
# Known-good hashes are missing for this requirement, so
# shim it with a facade object that will provoke hash
# computation and then raise a HashMissing exception
# showing the user what the hash should be.
hashes = MissingHashes()
# Now that we have the real link, we can tell what kind of
# requirements we have and raise some more informative errors
# than otherwise. (For example, we can raise VcsHashUnsupported
# for a VCS URL rather than HashMissing.)
if resolver.require_hashes:
# We could check these first 2 conditions inside
# unpack_url and save repetition of conditions, but then
# we would report less-useful error messages for
# unhashable requirements, complaining that there's no
# hash provided.
if is_vcs_url(link):
raise VcsHashUnsupported()
elif is_file_url(link) and is_dir_url(link):
raise DirectoryUrlHashUnsupported()
if not req.original_link and not req.is_pinned:
# Unpinned packages are asking for trouble when a new
# version is uploaded. This isn't a security check, but
# it saves users a surprising hash mismatch in the
# future.
#
# file:/// URLs aren't pinnable, so don't complain
# about them not being pinned.
raise HashUnpinned()
hashes = req.hashes(trust_internet=not resolver.require_hashes)
if resolver.require_hashes and not hashes:
# Known-good hashes are missing for this requirement, so
# shim it with a facade object that will provoke hash
# computation and then raise a HashMissing exception
# showing the user what the hash should be.
hashes = MissingHashes()
try:
download_dir = self.download_dir
# We always delete unpacked sdists after pip ran.
autodelete_unpacked = True
if req.link.is_wheel \
and self.wheel_download_dir:
# when doing 'pip wheel` we download wheels to a
# dedicated dir.
download_dir = self.wheel_download_dir
if req.link.is_wheel:
if download_dir:
# When downloading, we only unpack wheels to get
# metadata.
autodelete_unpacked = True
else:
# When installing a wheel, we use the unpacked
# wheel.
autodelete_unpacked = False
unpack_url(
req.link, req.source_dir,
download_dir, autodelete_unpacked,
session=resolver.session, hashes=hashes,
progress_bar=self.progress_bar)
except requests.HTTPError as exc:
logger.critical(
'Could not install requirement %s because '
'of error %s',
req,
exc,
)
raise InstallationError(
'Could not install requirement %s because '
'of HTTP error %s for URL %s' %
(req, exc, req.link)
)
abstract_dist = make_abstract_dist(req)
abstract_dist.prep_for_dist()
if self._download_should_save:
# Make a .zip of the source_dir we already created.
if req.link.scheme in vcs.all_schemes:
req.archive(self.download_dir)
# req.req is only avail after unpack for URL
# pkgs repeat check_if_exists to uninstall-on-upgrade
# (#14)
if not resolver.ignore_installed:
req.check_if_exists()
if req.satisfied_by:
should_modify = (
resolver.upgrade_strategy != "to-satisfy-only" or
resolver.ignore_installed
)
if should_modify:
# don't uninstall conflict if user install and
# conflict is not user install
if not (resolver.use_user_site and not
dist_in_usersite(req.satisfied_by)):
req.conflicts_with = \
req.satisfied_by
req.satisfied_by = None
try:
download_dir = self.download_dir
# We always delete unpacked sdists after pip ran.
autodelete_unpacked = True
if req.link.is_wheel and self.wheel_download_dir:
# when doing 'pip wheel` we download wheels to a
# dedicated dir.
download_dir = self.wheel_download_dir
if req.link.is_wheel:
if download_dir:
# When downloading, we only unpack wheels to get
# metadata.
autodelete_unpacked = True
else:
logger.info(
'Requirement already satisfied (use '
'--upgrade to upgrade): %s',
req,
)
# When installing a wheel, we use the unpacked
# wheel.
autodelete_unpacked = False
unpack_url(
req.link, req.source_dir,
download_dir, autodelete_unpacked,
session=resolver.session, hashes=hashes,
progress_bar=self.progress_bar
)
except requests.HTTPError as exc:
logger.critical(
'Could not install requirement %s because of error %s',
req,
exc,
)
raise InstallationError(
'Could not install requirement %s because of HTTP '
'error %s for URL %s' %
(req, exc, req.link)
)
abstract_dist = make_abstract_dist(req)
abstract_dist.prep_for_dist()
if self._download_should_save:
# Make a .zip of the source_dir we already created.
if req.link.scheme in vcs.all_schemes:
req.archive(self.download_dir)
# req.req is only avail after unpack for URL
# pkgs repeat check_if_exists to uninstall-on-upgrade
# (#14)
if not resolver.ignore_installed:
req.check_if_exists()
if req.satisfied_by:
should_modify = (
resolver.upgrade_strategy != "to-satisfy-only" or
resolver.ignore_installed
)
if should_modify:
# don't uninstall conflict if user install and
# conflict is not user install
if not (resolver.use_user_site and
not dist_in_usersite(req.satisfied_by)):
req.conflicts_with = req.satisfied_by
req.satisfied_by = None
else:
logger.info(
'Requirement already satisfied (use '
'--upgrade to upgrade): %s',
req,
)
return abstract_dist
def _prepare_editable_requirement(self, req, resolver):
"""Prepare an editable requirement
"""
assert req.editable, "cannot prepare a non-editable req as editable"
logger.info('Obtaining %s', req)
with indent_log():
if resolver.require_hashes:
raise InstallationError(
'The editable requirement %s cannot be installed when '
'requiring hashes, because there is no single file to '
'hash.' % req
)
req.ensure_has_source_dir(self.src_dir)
req.update_editable(not self._download_should_save)
abstract_dist = make_abstract_dist(req)
abstract_dist.prep_for_dist()
if self._download_should_save:
req.archive(self.download_dir)
req.check_if_exists()
return abstract_dist
def _prepare_installed_requirement(self, req, resolver, skip_reason):
"""Prepare an already-installed requirement
"""
assert req.satisfied_by, "req should have been satisfied but isn't"
assert skip_reason is not None, (
"did not get skip reason skipped but req.satisfied_by "
"is set to %r" % (req.satisfied_by,)
)
logger.info(
'Requirement %s: %s (%s)',
skip_reason, req, req.satisfied_by.version
)
with indent_log():
if resolver.require_hashes:
logger.debug(
'Since it is already installed, we are trusting this '
'package without checking its hash. To ensure a '
'completely repeatable environment, install into an '
'empty virtualenv.'
)
abstract_dist = Installed(req)
return abstract_dist

View File

@ -298,7 +298,7 @@ class InstallRequirement(object):
self.link = finder.find_requirement(self, upgrade)
if self._wheel_cache is not None and not require_hashes:
old_link = self.link
self.link = self._wheel_cache.cached_wheel(self.link, self.name)
self.link = self._wheel_cache.get(self.link, self.name)
if old_link != self.link:
logger.debug('Using cached wheel link: %s', self.link)

View File

@ -1,46 +0,0 @@
"""Helpers for caches
"""
import hashlib
import os.path
def get_cache_path_for_link(cache_dir, link):
"""
Return a directory to store cached wheels in for link.
Because there are M wheels for any one sdist, we provide a directory
to cache them in, and then consult that directory when looking up
cache hits.
We only insert things into the cache if they have plausible version
numbers, so that we don't contaminate the cache with things that were not
unique. E.g. ./package might have dozens of installs done for it and build
a version of 0.0...and if we built and cached a wheel, we'd end up using
the same wheel even if the source has been edited.
:param cache_dir: The cache_dir being used by pip.
:param link: The link of the sdist for which this will cache wheels.
"""
# We want to generate an url to use as our cache key, we don't want to just
# re-use the URL because it might have other items in the fragment and we
# don't care about those.
key_parts = [link.url_without_fragment]
if link.hash_name is not None and link.hash is not None:
key_parts.append("=".join([link.hash_name, link.hash]))
key_url = "#".join(key_parts)
# Encode our key url with sha224, we'll use this because it has similar
# security properties to sha256, but with a shorter total output (and thus
# less secure). However the differences don't make a lot of difference for
# our use case here.
hashed = hashlib.sha224(key_url.encode()).hexdigest()
# We want to nest the directories some to prevent having a ton of top level
# directories where we might run out of sub directories on some FS.
parts = [hashed[:2], hashed[2:4], hashed[4:6], hashed[6:]]
# Inside of the base location for cached wheels, expand our parts and join
# them all together.
return os.path.join(cache_dir, "wheels", *parts)

View File

@ -31,7 +31,6 @@ from pip.exceptions import (
)
from pip.locations import PIP_DELETE_MARKER_FILENAME, distutils_scheme
from pip.utils import call_subprocess, captured_stdout, ensure_dir, read_chunks
from pip.utils.cache import get_cache_path_for_link
from pip.utils.logging import indent_log
from pip.utils.setuptools_build import SETUPTOOLS_SHIM
from pip.utils.temp_dir import TempDirectory
@ -728,7 +727,7 @@ class WheelBuilder(object):
:return: True if all the wheels built correctly.
"""
building_is_possible = self._wheel_dir or (
autobuilding and self.wheel_cache._cache_dir
autobuilding and self.wheel_cache.cache_dir
)
assert building_is_possible
@ -779,10 +778,7 @@ class WheelBuilder(object):
python_tag = None
if autobuilding:
python_tag = pep425tags.implementation_tag
# NOTE: Should move out a method on the cache directly.
output_dir = get_cache_path_for_link(
self.wheel_cache._cache_dir, req.link
)
output_dir = self.wheel_cache.get_path_for_link(req.link)
try:
ensure_dir(output_dir)
except OSError as e:

View File

@ -92,7 +92,7 @@ def rewrite_file_imports(item, vendored_libs):
def apply_patch(ctx, patch_file_path):
log('Applying patch %s' % patch_file_path.name)
ctx.run('git apply %s' % patch_file_path)
ctx.run('git apply --verbose %s' % patch_file_path)
def vendor(ctx, vendor_dir):
@ -108,6 +108,8 @@ def vendor(ctx, vendor_dir):
# Cleanup setuptools unneeded parts
(vendor_dir / 'easy_install.py').unlink()
drop_dir(vendor_dir / 'setuptools')
drop_dir(vendor_dir / 'pkg_resources' / '_vendor')
drop_dir(vendor_dir / 'pkg_resources' / 'extern')
# Drop interpreter and OS specific msgpack libs.
# Pip will rely on the python-only fallback instead.

View File

@ -1,32 +0,0 @@
diff --git a/pip/_vendor/pkg_resources/__init__.py b/pip/_vendor/pkg_resources/__init__.py
index 63351e20..f457ad79 100644
--- a/pip/_vendor/pkg_resources/__init__.py
+++ b/pip/_vendor/pkg_resources/__init__.py
@@ -67,11 +67,12 @@ try:
except ImportError:
importlib_machinery = None
-import packaging.version
-import packaging.specifiers
-import packaging.requirements
-import packaging.markers
from pip._vendor import appdirs
+from pip._vendor import packaging
+__import__('pip._vendor.packaging.version')
+__import__('pip._vendor.packaging.specifiers')
+__import__('pip._vendor.packaging.requirements')
+__import__('pip._vendor.packaging.markers')
if (3, 0) < sys.version_info < (3, 3):
raise RuntimeError("Python 3.3 or later is required")
@@ -2940,8 +2941,8 @@ def _get_mro(cls):
"""Get an mro for a type or classic class"""
if not isinstance(cls, type):
- class cls(cls, object):
+ class new_cls(cls, object):
pass
- return cls.__mro__[1:]
+ return new_cls.__mro__[1:]
return cls.__mro__

View File

@ -1,6 +1,7 @@
import io
import os
import shutil
import subprocess
import sys
import pytest
@ -191,6 +192,18 @@ def script(tmpdir, virtualenv):
)
@pytest.fixture(scope="session")
def common_wheels(tmpdir_factory):
"""Provide a directory with latest setuptools and wheel wheels"""
wheels_dir = tmpdir_factory.mktemp('common_wheels')
subprocess.check_call([
'pip', 'download', 'wheel', 'setuptools',
'-d', str(wheels_dir),
])
yield wheels_dir
wheels_dir.remove(ignore_errors=True)
@pytest.fixture
def data(tmpdir):
return TestData.copy(tmpdir.join("data"))

View File

@ -146,10 +146,8 @@ def test_install_editable_from_svn(script):
result.assert_installed('version-pkg', with_files=['.svn'])
def _test_install_editable_from_git(script, tmpdir, wheel):
def _test_install_editable_from_git(script, tmpdir):
"""Test cloning from Git."""
if wheel:
script.pip('install', 'wheel')
pkg_path = _create_test_package(script, name='testpackage', vcs='git')
args = ['install', '-e', 'git+%s#egg=testpackage' % path_to_url(pkg_path)]
result = script.pip(*args, **{"expect_error": True})
@ -157,12 +155,14 @@ def _test_install_editable_from_git(script, tmpdir, wheel):
def test_install_editable_from_git(script, tmpdir):
_test_install_editable_from_git(script, tmpdir, False)
_test_install_editable_from_git(script, tmpdir)
@pytest.mark.network
def test_install_editable_from_git_autobuild_wheel(script, tmpdir):
_test_install_editable_from_git(script, tmpdir, True)
def test_install_editable_from_git_autobuild_wheel(
script, tmpdir, common_wheels):
script.pip('install', 'wheel', '--no-index', '-f', common_wheels)
_test_install_editable_from_git(script, tmpdir)
@pytest.mark.network
@ -936,21 +936,21 @@ def test_install_topological_sort(script, data):
@pytest.mark.network
def test_install_wheel_broken(script, data):
script.pip('install', 'wheel')
script.pip('download', 'setuptools', 'wheel', '-d', data.packages)
def test_install_wheel_broken(script, data, common_wheels):
script.pip('install', 'wheel', '--no-index', '-f', common_wheels)
res = script.pip(
'install', '--no-index', '-f', data.find_links, 'wheelbroken',
'install', '--no-index', '-f', data.find_links, '-f', common_wheels,
'wheelbroken',
expect_stderr=True)
assert "Successfully installed wheelbroken-0.1" in str(res), str(res)
@pytest.mark.network
def test_cleanup_after_failed_wheel(script, data):
script.pip('install', 'wheel')
script.pip('download', 'setuptools', 'wheel', '-d', data.packages)
def test_cleanup_after_failed_wheel(script, data, common_wheels):
script.pip('install', 'wheel', '--no-index', '-f', common_wheels)
res = script.pip(
'install', '--no-index', '-f', data.find_links, 'wheelbrokenafter',
'install', '--no-index', '-f', data.find_links, '-f', common_wheels,
'wheelbrokenafter',
expect_stderr=True)
# One of the effects of not cleaning up is broken scripts:
script_py = script.bin_path / "script.py"
@ -962,15 +962,14 @@ def test_cleanup_after_failed_wheel(script, data):
@pytest.mark.network
def test_install_builds_wheels(script, data):
def test_install_builds_wheels(script, data, common_wheels):
# NB This incidentally tests a local tree + tarball inputs
# see test_install_editable_from_git_autobuild_wheel for editable
# vcs coverage.
script.pip('install', 'wheel')
script.pip('download', 'setuptools', 'wheel', '-d', data.packages)
script.pip('install', 'wheel', '--no-index', '-f', common_wheels)
to_install = data.packages.join('requires_wheelbroken_upper')
res = script.pip(
'install', '--no-index', '-f', data.find_links,
'install', '--no-index', '-f', data.find_links, '-f', common_wheels,
to_install, expect_stderr=True)
expected = ("Successfully installed requires-wheelbroken-upper-0"
" upper-2.0 wheelbroken-0.1")
@ -1002,30 +1001,24 @@ def test_install_builds_wheels(script, data):
@pytest.mark.network
def test_install_no_binary_disables_building_wheels(script, data):
script.pip('install', 'wheel')
script.pip('download', 'setuptools', 'wheel', '-d', data.packages)
def test_install_no_binary_disables_building_wheels(
script, data, common_wheels):
script.pip('install', 'wheel', '--no-index', '-f', common_wheels)
to_install = data.packages.join('requires_wheelbroken_upper')
res = script.pip(
'install', '--no-index', '--no-binary=upper', '-f', data.find_links,
'-f', common_wheels,
to_install, expect_stderr=True)
expected = ("Successfully installed requires-wheelbroken-upper-0"
" upper-2.0 wheelbroken-0.1")
# Must have installed it all
assert expected in str(res), str(res)
root = appdirs.user_cache_dir('pip')
wheels = []
for top, dirs, files in os.walk(root):
wheels.extend(files)
# and built wheels for wheelbroken only
assert "Running setup.py bdist_wheel for wheelb" in str(res), str(res)
# But not requires_wheel... which is a local dir and thus uncachable.
assert "Running setup.py bdist_wheel for requir" not in str(res), str(res)
# Nor upper, which was blacklisted
assert "Running setup.py bdist_wheel for upper" not in str(res), str(res)
# wheelbroken has to run install
# into the cache
assert wheels != [], str(res)
# the local tree can't build a wheel (because we can't assume that every
# build will have a suitable unique key to cache on).
assert "Running setup.py install for requires-wheel" in str(res), str(res)
@ -1035,12 +1028,11 @@ def test_install_no_binary_disables_building_wheels(script, data):
@pytest.mark.network
def test_install_no_binary_disables_cached_wheels(script, data):
script.pip('install', 'wheel')
script.pip('download', 'setuptools', 'wheel', '-d', data.packages)
def test_install_no_binary_disables_cached_wheels(script, data, common_wheels):
script.pip('install', 'wheel', '--no-index', '-f', common_wheels)
# Seed the cache
script.pip(
'install', '--no-index', '-f', data.find_links,
'install', '--no-index', '-f', data.find_links, '-f', common_wheels,
'upper')
script.pip('uninstall', 'upper', '-y')
res = script.pip(
@ -1107,7 +1099,7 @@ def test_double_install_fail(script, data):
assert msg in result.stderr
def test_install_incompatible_python_requires(script):
def test_install_incompatible_python_requires(script, common_wheels):
script.scratch_path.join("pkga").mkdir()
pkga_path = script.scratch_path / 'pkga'
pkga_path.join("setup.py").write(textwrap.dedent("""
@ -1116,13 +1108,16 @@ def test_install_incompatible_python_requires(script):
python_requires='<1.0',
version='0.1')
"""))
script.pip('install', 'setuptools>24.2') # This should not be needed
script.pip(
'install', 'setuptools>24.2', # This should not be needed
'--no-index', '-f', common_wheels,
)
result = script.pip('install', pkga_path, expect_error=True)
assert ("pkga requires Python '<1.0' "
"but the running Python is ") in result.stderr
def test_install_incompatible_python_requires_editable(script):
def test_install_incompatible_python_requires_editable(script, common_wheels):
script.scratch_path.join("pkga").mkdir()
pkga_path = script.scratch_path / 'pkga'
pkga_path.join("setup.py").write(textwrap.dedent("""
@ -1131,7 +1126,10 @@ def test_install_incompatible_python_requires_editable(script):
python_requires='<1.0',
version='0.1')
"""))
script.pip('install', 'setuptools>24.2') # This should not be needed
script.pip(
'install', 'setuptools>24.2', # This should not be needed
'--no-index', '-f', common_wheels,
)
result = script.pip(
'install', '--editable=%s' % pkga_path, expect_error=True)
assert ("pkga requires Python '<1.0' "
@ -1139,7 +1137,7 @@ def test_install_incompatible_python_requires_editable(script):
@pytest.mark.network
def test_install_incompatible_python_requires_wheel(script):
def test_install_incompatible_python_requires_wheel(script, common_wheels):
script.scratch_path.join("pkga").mkdir()
pkga_path = script.scratch_path / 'pkga'
pkga_path.join("setup.py").write(textwrap.dedent("""
@ -1148,8 +1146,11 @@ def test_install_incompatible_python_requires_wheel(script):
python_requires='<1.0',
version='0.1')
"""))
script.pip('install', 'setuptools>24.2') # This should not be needed
script.pip('install', 'wheel')
script.pip(
'install', 'setuptools>24.2', # This should not be needed
'--no-index', '-f', common_wheels,
)
script.pip('install', 'wheel', '--no-index', '-f', common_wheels)
script.run(
'python', 'setup.py', 'bdist_wheel', '--universal', cwd=pkga_path)
result = script.pip('install', './pkga/dist/pkga-0.1-py2.py3-none-any.whl',
@ -1158,7 +1159,7 @@ def test_install_incompatible_python_requires_wheel(script):
"but the running Python is ") in result.stderr
def test_install_compatible_python_requires(script):
def test_install_compatible_python_requires(script, common_wheels):
script.scratch_path.join("pkga").mkdir()
pkga_path = script.scratch_path / 'pkga'
pkga_path.join("setup.py").write(textwrap.dedent("""
@ -1167,7 +1168,10 @@ def test_install_compatible_python_requires(script):
python_requires='>1.0',
version='0.1')
"""))
script.pip('install', 'setuptools>24.2') # This should not be needed
script.pip(
'install', 'setuptools>24.2', # This should not be needed
'--no-index', '-f', common_wheels,
)
res = script.pip('install', pkga_path, expect_error=True)
assert "Successfully installed pkga-0.1" in res.stdout, res

View File

@ -200,8 +200,9 @@ def test_options_from_venv_config(script, virtualenv):
@pytest.mark.network
def test_install_no_binary_via_config_disables_cached_wheels(script, data):
script.pip('install', 'wheel')
def test_install_no_binary_via_config_disables_cached_wheels(
script, data, common_wheels):
script.pip('install', 'wheel', '--no-index', '-f', common_wheels)
config_file = tempfile.NamedTemporaryFile(mode='wt')
script.environ['PIP_CONFIG_FILE'] = config_file.name
config_file.write(textwrap.dedent("""\

View File

@ -223,10 +223,10 @@ def test_install_local_with_subdirectory(script):
@pytest.mark.network
def test_wheel_user_with_prefix_in_pydistutils_cfg(script, data, virtualenv):
def test_wheel_user_with_prefix_in_pydistutils_cfg(
script, data, virtualenv, common_wheels):
# Make sure wheel is available in the virtualenv
script.pip('install', 'wheel')
script.pip('download', 'setuptools', 'wheel', '-d', data.packages)
script.pip('install', 'wheel', '--no-index', '-f', common_wheels)
virtualenv.system_site_packages = True
homedir = script.environ["HOME"]
script.scratch_path.join("bin").mkdir()
@ -235,8 +235,10 @@ def test_wheel_user_with_prefix_in_pydistutils_cfg(script, data, virtualenv):
[install]
prefix=%s""" % script.scratch_path))
result = script.pip('install', '--user', '--no-index', '-f',
data.find_links, 'requiresupper')
result = script.pip(
'install', '--user', '--no-index',
'-f', data.find_links, '-f', common_wheels,
'requiresupper')
# Check that we are really installing a wheel
assert 'Running setup.py install for requiresupper' not in result.stdout
assert 'installed requiresupper' in result.stdout
@ -339,7 +341,8 @@ def test_constrained_to_url_install_same_url(script, data):
@pytest.mark.network
def test_double_install_spurious_hash_mismatch(script, tmpdir, data):
def test_double_install_spurious_hash_mismatch(
script, tmpdir, data, common_wheels):
"""Make sure installing the same hashed sdist twice doesn't throw hash
mismatch errors.
@ -349,14 +352,14 @@ def test_double_install_spurious_hash_mismatch(script, tmpdir, data):
causes spurious mismatch errors.
"""
script.pip('install', 'wheel') # Otherwise, it won't try to build wheels.
script.pip('download', 'setuptools', 'wheel', '-d', data.packages)
# Install wheel package, otherwise, it won't try to build wheels.
script.pip('install', 'wheel', '--no-index', '-f', common_wheels)
with requirements_file('simple==1.0 --hash=sha256:393043e672415891885c9a2a'
'0929b1af95fb866d6ca016b42d2e6ce53619b653',
tmpdir) as reqs_file:
# Install a package (and build its wheel):
result = script.pip_install_local(
'--find-links', data.find_links,
'--find-links', data.find_links, '-f', common_wheels,
'-r', reqs_file.abspath, expect_error=False)
assert 'Successfully installed simple-1.0' in str(result)
@ -366,7 +369,7 @@ def test_double_install_spurious_hash_mismatch(script, tmpdir, data):
# Then install it again. We should not hit a hash mismatch, and the
# package should install happily.
result = script.pip_install_local(
'--find-links', data.find_links,
'--find-links', data.find_links, '-f', common_wheels,
'-r', reqs_file.abspath, expect_error=False)
assert 'Successfully installed simple-1.0' in str(result)

View File

@ -115,11 +115,11 @@ def test_install_from_wheel_with_headers(script, data):
@pytest.mark.network
def test_install_wheel_with_target(script, data):
def test_install_wheel_with_target(script, data, common_wheels):
"""
Test installing a wheel using pip install --target
"""
script.pip('install', 'wheel')
script.pip('install', 'wheel', '--no-index', '-f', common_wheels)
target_dir = script.scratch_path / 'target'
result = script.pip(
'install', 'simple.dist==0.1', '-t', target_dir,
@ -131,7 +131,7 @@ def test_install_wheel_with_target(script, data):
@pytest.mark.network
def test_install_wheel_with_target_and_data_files(script, data):
def test_install_wheel_with_target_and_data_files(script, data, common_wheels):
"""
Test for issue #4092. It will be checked that a data_files specification in
setup.py is handled correctly when a wheel is installed with the --target
@ -150,7 +150,7 @@ def test_install_wheel_with_target_and_data_files(script, data):
]
)
"""
script.pip('install', 'wheel')
script.pip('install', 'wheel', '--no-index', '-f', common_wheels)
target_dir = script.scratch_path / 'prjwithdatafile'
package = data.packages.join("prjwithdatafile-1.0-py2.py3-none-any.whl")
result = script.pip('install', package,
@ -221,12 +221,12 @@ def test_install_from_wheel_no_deps(script, data):
@pytest.mark.network
def test_install_user_wheel(script, virtualenv, data):
def test_install_user_wheel(script, virtualenv, data, common_wheels):
"""
Test user install from wheel (that has a script)
"""
virtualenv.system_site_packages = True
script.pip('install', 'wheel')
script.pip('install', 'wheel', '--no-index', '-f', common_wheels)
result = script.pip(
'install', 'has.script==1.0', '--user', '--no-index',
'--find-links=' + data.find_links,

View File

@ -20,34 +20,35 @@ def test_pip_wheel_fails_without_wheel(script, data):
assert "'pip wheel' requires the 'wheel' package" in result.stderr
def test_wheel_exit_status_code_when_no_requirements(script):
def test_wheel_exit_status_code_when_no_requirements(script, common_wheels):
"""
Test wheel exit status code when no requirements specified
"""
script.pip('install', 'wheel')
script.pip('install', 'wheel', '--no-index', '-f', common_wheels)
result = script.pip('wheel', expect_error=True)
assert "You must give at least one requirement to wheel" in result.stderr
assert result.returncode == ERROR
def test_wheel_exit_status_code_when_blank_requirements_file(script):
def test_wheel_exit_status_code_when_blank_requirements_file(
script, common_wheels):
"""
Test wheel exit status code when blank requirements file specified
"""
script.pip('install', 'wheel')
script.pip('install', 'wheel', '--no-index', '-f', common_wheels)
script.scratch_path.join("blank.txt").write("\n")
script.pip('wheel', '-r', 'blank.txt')
@pytest.mark.network
def test_pip_wheel_success(script, data):
def test_pip_wheel_success(script, data, common_wheels):
"""
Test 'pip wheel' success.
"""
script.pip('install', 'wheel')
script.pip('download', 'setuptools', 'wheel', '-d', data.packages)
script.pip('install', 'wheel', '--no-index', '-f', common_wheels)
result = script.pip(
'wheel', '--no-index', '-f', data.find_links, 'simple==3.0',
'wheel', '--no-index', '-f', data.find_links, '-f', common_wheels,
'simple==3.0',
)
wheel_file_name = 'simple-3.0-py%s-none-any.whl' % pyversion[0]
wheel_file_path = script.scratch / wheel_file_name
@ -56,11 +57,11 @@ def test_pip_wheel_success(script, data):
@pytest.mark.network
def test_pip_wheel_downloads_wheels(script, data):
def test_pip_wheel_downloads_wheels(script, data, common_wheels):
"""
Test 'pip wheel' downloads wheels
"""
script.pip('install', 'wheel')
script.pip('install', 'wheel', '--no-index', '-f', common_wheels)
result = script.pip(
'wheel', '--no-index', '-f', data.find_links, 'simple.dist',
)
@ -71,27 +72,27 @@ def test_pip_wheel_downloads_wheels(script, data):
@pytest.mark.network
def test_pip_wheel_builds_when_no_binary_set(script, data):
script.pip('install', 'wheel')
script.pip('download', 'setuptools', 'wheel', '-d', data.packages)
def test_pip_wheel_builds_when_no_binary_set(script, data, common_wheels):
script.pip('install', 'wheel', '--no-index', '-f', common_wheels)
data.packages.join('simple-3.0-py2.py3-none-any.whl').touch()
# Check that the wheel package is ignored
res = script.pip(
'wheel', '--no-index', '--no-binary', ':all:', '-f', data.find_links,
'wheel', '--no-index', '--no-binary', ':all:',
'-f', data.find_links, '-f', common_wheels,
'simple==3.0')
assert "Running setup.py bdist_wheel for simple" in str(res), str(res)
@pytest.mark.network
def test_pip_wheel_builds_editable_deps(script, data):
def test_pip_wheel_builds_editable_deps(script, data, common_wheels):
"""
Test 'pip wheel' finds and builds dependencies of editables
"""
script.pip('install', 'wheel')
script.pip('download', 'setuptools', 'wheel', '-d', data.packages)
script.pip('install', 'wheel', '--no-index', '-f', common_wheels)
editable_path = os.path.join(data.src, 'requires_simple')
result = script.pip(
'wheel', '--no-index', '-f', data.find_links, '-e', editable_path
'wheel', '--no-index', '-f', data.find_links, '-f', common_wheels,
'-e', editable_path
)
wheel_file_name = 'simple-1.0-py%s-none-any.whl' % pyversion[0]
wheel_file_path = script.scratch / wheel_file_name
@ -99,15 +100,15 @@ def test_pip_wheel_builds_editable_deps(script, data):
@pytest.mark.network
def test_pip_wheel_builds_editable(script, data):
def test_pip_wheel_builds_editable(script, data, common_wheels):
"""
Test 'pip wheel' builds an editable package
"""
script.pip('install', 'wheel')
script.pip('download', 'setuptools', 'wheel', '-d', data.packages)
script.pip('install', 'wheel', '--no-index', '-f', common_wheels)
editable_path = os.path.join(data.src, 'simplewheel-1.0')
result = script.pip(
'wheel', '--no-index', '-f', data.find_links, '-e', editable_path
'wheel', '--no-index', '-f', data.find_links, '-f', common_wheels,
'-e', editable_path
)
wheel_file_name = 'simplewheel-1.0-py%s-none-any.whl' % pyversion[0]
wheel_file_path = script.scratch / wheel_file_name
@ -115,14 +116,14 @@ def test_pip_wheel_builds_editable(script, data):
@pytest.mark.network
def test_pip_wheel_fail(script, data):
def test_pip_wheel_fail(script, data, common_wheels):
"""
Test 'pip wheel' failure.
"""
script.pip('install', 'wheel')
script.pip('download', 'setuptools', 'wheel', '-d', data.packages)
script.pip('install', 'wheel', '--no-index', '-f', common_wheels)
result = script.pip(
'wheel', '--no-index', '-f', data.find_links, 'wheelbroken==0.1',
'wheel', '--no-index', '-f', data.find_links, '-f', common_wheels,
'wheelbroken==0.1',
expect_error=True,
)
wheel_file_name = 'wheelbroken-0.1-py%s-none-any.whl' % pyversion[0]
@ -137,16 +138,17 @@ def test_pip_wheel_fail(script, data):
@pytest.mark.network
def test_no_clean_option_blocks_cleaning_after_wheel(script, data):
def test_no_clean_option_blocks_cleaning_after_wheel(
script, data, common_wheels):
"""
Test --no-clean option blocks cleaning after wheel build
"""
script.pip('install', 'wheel')
script.pip('download', 'setuptools', 'wheel', '-d', data.packages)
script.pip('install', 'wheel', '--no-index', '-f', common_wheels)
build = script.venv_path / 'build'
result = script.pip(
'wheel', '--no-clean', '--no-index', '--build', build,
'--find-links=%s' % data.find_links, 'simple',
'--find-links=%s' % data.find_links, '-f', common_wheels,
'simple',
expect_temp=True,
)
build = build / 'simple'
@ -154,16 +156,16 @@ def test_no_clean_option_blocks_cleaning_after_wheel(script, data):
@pytest.mark.network
def test_pip_wheel_source_deps(script, data):
def test_pip_wheel_source_deps(script, data, common_wheels):
"""
Test 'pip wheel' finds and builds source archive dependencies
of wheels
"""
# 'requires_source' is a wheel that depends on the 'source' project
script.pip('install', 'wheel')
script.pip('download', 'setuptools', 'wheel', '-d', data.packages)
script.pip('install', 'wheel', '--no-index', '-f', common_wheels)
result = script.pip(
'wheel', '--no-index', '-f', data.find_links, 'requires_source',
'wheel', '--no-index', '-f', data.find_links, '-f', common_wheels,
'requires_source',
)
wheel_file_name = 'source-1.0-py%s-none-any.whl' % pyversion[0]
wheel_file_path = script.scratch / wheel_file_name
@ -172,13 +174,14 @@ def test_pip_wheel_source_deps(script, data):
@pytest.mark.network
def test_pip_wheel_fail_cause_of_previous_build_dir(script, data):
def test_pip_wheel_fail_cause_of_previous_build_dir(
script, data, common_wheels):
"""
Test when 'pip wheel' tries to install a package that has a previous build
directory
"""
script.pip('install', 'wheel')
script.pip('install', 'wheel', '--no-index', '-f', common_wheels)
# Given that I have a previous build dir of the `simple` package
build = script.venv_path / 'build' / 'simple'
@ -198,9 +201,9 @@ def test_pip_wheel_fail_cause_of_previous_build_dir(script, data):
@pytest.mark.network
def test_wheel_package_with_latin1_setup(script, data):
def test_wheel_package_with_latin1_setup(script, data, common_wheels):
"""Create a wheel from a package with latin-1 encoded setup.py."""
script.pip('install', 'wheel')
script.pip('install', 'wheel', '--no-index', '-f', common_wheels)
pkg_to_wheel = data.packages.join("SetupPyLatin1")
result = script.pip('wheel', pkg_to_wheel)

View File

@ -6,8 +6,8 @@ class TestWheelCache:
def test_expands_path(self):
wc = WheelCache("~/.foo/", None)
assert wc._cache_dir == expanduser("~/.foo/")
assert wc.cache_dir == expanduser("~/.foo/")
def test_falsey_path_none(self):
wc = WheelCache(False, None)
assert wc._cache_dir is None
assert wc.cache_dir is None