Update requests to 2.19.1

This commit is contained in:
Pradyun Gedam 2018-06-21 18:40:52 +05:30
parent 7b97a38c6b
commit 7ccc1a8f34
No known key found for this signature in database
GPG Key ID: DA17C4B29CB32E4B
17 changed files with 224 additions and 66 deletions

1
news/requests.vendor Normal file
View File

@ -0,0 +1 @@
Update requests to 2.19.1

View File

@ -57,10 +57,10 @@ def check_compatibility(urllib3_version, chardet_version):
# Check urllib3 for compatibility.
major, minor, patch = urllib3_version # noqa: F811
major, minor, patch = int(major), int(minor), int(patch)
# urllib3 >= 1.21.1, <= 1.22
# urllib3 >= 1.21.1, <= 1.23
assert major == 1
assert minor >= 21
assert minor <= 22
assert minor <= 23
# Check chardet for compatibility.
major, minor, patch = chardet_version.split('.')[:3]
@ -71,6 +71,17 @@ def check_compatibility(urllib3_version, chardet_version):
assert patch >= 2
def _check_cryptography(cryptography_version):
# cryptography < 1.3.4
try:
cryptography_version = list(map(int, cryptography_version.split('.')))
except ValueError:
return
if cryptography_version < [1, 3, 4]:
warning = 'Old version of cryptography ({0}) may cause slowdown.'.format(cryptography_version)
warnings.warn(warning, RequestsDependencyWarning)
# Check imported dependencies for compatibility.
try:
check_compatibility(urllib3.__version__, chardet.__version__)
@ -85,6 +96,10 @@ if not WINDOWS:
try:
from pip._vendor.urllib3.contrib import pyopenssl
pyopenssl.inject_into_urllib3()
# Check cryptography version
from cryptography import __version__ as cryptography_version
_check_cryptography(cryptography_version)
except ImportError:
pass

View File

@ -5,10 +5,10 @@
__title__ = 'requests'
__description__ = 'Python HTTP for Humans.'
__url__ = 'http://python-requests.org'
__version__ = '2.18.4'
__build__ = 0x021804
__version__ = '2.19.1'
__build__ = 0x021901
__author__ = 'Kenneth Reitz'
__author_email__ = 'me@kennethreitz.org'
__license__ = 'Apache 2.0'
__copyright__ = 'Copyright 2017 Kenneth Reitz'
__copyright__ = 'Copyright 2018 Kenneth Reitz'
__cake__ = u'\u2728 \U0001f370 \u2728'

View File

@ -13,6 +13,7 @@ import socket
from pip._vendor.urllib3.poolmanager import PoolManager, proxy_from_url
from pip._vendor.urllib3.response import HTTPResponse
from pip._vendor.urllib3.util import parse_url
from pip._vendor.urllib3.util import Timeout as TimeoutSauce
from pip._vendor.urllib3.util.retry import Retry
from pip._vendor.urllib3.exceptions import ClosedPoolError
@ -28,13 +29,13 @@ from pip._vendor.urllib3.exceptions import ResponseError
from .models import Response
from .compat import urlparse, basestring
from .utils import (DEFAULT_CA_BUNDLE_PATH, get_encoding_from_headers,
prepend_scheme_if_needed, get_auth_from_url, urldefragauth,
select_proxy)
from .utils import (DEFAULT_CA_BUNDLE_PATH, extract_zipped_paths,
get_encoding_from_headers, prepend_scheme_if_needed,
get_auth_from_url, urldefragauth, select_proxy)
from .structures import CaseInsensitiveDict
from .cookies import extract_cookies_to_jar
from .exceptions import (ConnectionError, ConnectTimeout, ReadTimeout, SSLError,
ProxyError, RetryError, InvalidSchema)
ProxyError, RetryError, InvalidSchema, InvalidProxyURL)
from .auth import _basic_auth_str
try:
@ -219,7 +220,7 @@ class HTTPAdapter(BaseAdapter):
cert_loc = verify
if not cert_loc:
cert_loc = DEFAULT_CA_BUNDLE_PATH
cert_loc = extract_zipped_paths(DEFAULT_CA_BUNDLE_PATH)
if not cert_loc or not os.path.exists(cert_loc):
raise IOError("Could not find a suitable TLS CA certificate bundle, "
@ -300,6 +301,10 @@ class HTTPAdapter(BaseAdapter):
if proxy:
proxy = prepend_scheme_if_needed(proxy, 'http')
proxy_url = parse_url(proxy)
if not proxy_url.host:
raise InvalidProxyURL("Please check proxy URL. It is malformed"
" and could be missing the host.")
proxy_manager = self.proxy_manager_for(proxy)
conn = proxy_manager.connection_from_url(url)
else:
@ -406,7 +411,7 @@ class HTTPAdapter(BaseAdapter):
self.cert_verify(conn, request.url, verify, cert)
url = self.request_url(request, proxies)
self.add_headers(request)
self.add_headers(request, stream=stream, timeout=timeout, verify=verify, cert=cert, proxies=proxies)
chunked = not (request.body is None or 'Content-Length' in request.headers)

View File

@ -20,7 +20,7 @@ def request(method, url, **kwargs):
:param url: URL for the new :class:`Request` object.
:param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`.
:param data: (optional) Dictionary or list of tuples ``[(key, value)]`` (will be form-encoded), bytes, or file-like object to send in the body of the :class:`Request`.
:param json: (optional) json data to send in the body of the :class:`Request`.
:param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`.
:param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`.
:param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`.
:param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': file-tuple}``) for multipart encoding upload.

View File

@ -153,6 +153,18 @@ class HTTPDigestAuth(AuthBase):
x = x.encode('utf-8')
return hashlib.sha1(x).hexdigest()
hash_utf8 = sha_utf8
elif _algorithm == 'SHA-256':
def sha256_utf8(x):
if isinstance(x, str):
x = x.encode('utf-8')
return hashlib.sha256(x).hexdigest()
hash_utf8 = sha256_utf8
elif _algorithm == 'SHA-512':
def sha512_utf8(x):
if isinstance(x, str):
x = x.encode('utf-8')
return hashlib.sha512(x).hexdigest()
hash_utf8 = sha512_utf8
KD = lambda s, d: hash_utf8("%s:%s" % (s, d))

View File

@ -47,6 +47,7 @@ if is_py2:
import cookielib
from Cookie import Morsel
from StringIO import StringIO
from collections import Callable, Mapping, MutableMapping
from pip._vendor.urllib3.packages.ordered_dict import OrderedDict
@ -64,6 +65,7 @@ elif is_py3:
from http.cookies import Morsel
from io import StringIO
from collections import OrderedDict
from collections.abc import Callable, Mapping, MutableMapping
builtin_str = str
str = str

View File

@ -12,10 +12,9 @@ requests.utils imports from here, so be careful with imports.
import copy
import time
import calendar
import collections
from ._internal_utils import to_native_string
from .compat import cookielib, urlparse, urlunparse, Morsel
from .compat import cookielib, urlparse, urlunparse, Morsel, MutableMapping
try:
import threading
@ -169,7 +168,7 @@ class CookieConflictError(RuntimeError):
"""
class RequestsCookieJar(cookielib.CookieJar, collections.MutableMapping):
class RequestsCookieJar(cookielib.CookieJar, MutableMapping):
"""Compatibility class; is a cookielib.CookieJar, but exposes a dict
interface.
@ -415,9 +414,14 @@ class RequestsCookieJar(cookielib.CookieJar, collections.MutableMapping):
def copy(self):
"""Return a copy of this RequestsCookieJar."""
new_cj = RequestsCookieJar()
new_cj.set_policy(self.get_policy())
new_cj.update(self)
return new_cj
def get_policy(self):
"""Return the CookiePolicy instance used."""
return self._policy
def _copy_cookie_jar(jar):
if jar is None:

View File

@ -85,6 +85,10 @@ class InvalidHeader(RequestException, ValueError):
"""The header value provided was somehow invalid."""
class InvalidProxyURL(InvalidURL):
"""The proxy URL provided is invalid."""
class ChunkedEncodingError(RequestException):
"""The server declared chunked encoding but sent an invalid chunk."""

View File

@ -13,7 +13,7 @@ from pip._vendor import chardet
from . import __version__ as requests_version
try:
from .packages.urllib3.contrib import pyopenssl
from pip._vendor.urllib3.contrib import pyopenssl
except ImportError:
pyopenssl = None
OpenSSL = None

View File

@ -7,7 +7,6 @@ requests.models
This module contains the primary objects that power Requests.
"""
import collections
import datetime
import sys
@ -37,6 +36,7 @@ from .utils import (
stream_decode_response_unicode, to_key_val_list, parse_header_links,
iter_slices, guess_json_utf, super_len, check_header_validity)
from .compat import (
Callable, Mapping,
cookielib, urlunparse, urlsplit, urlencode, str, bytes,
is_py2, chardet, builtin_str, basestring)
from .compat import json as complexjson
@ -155,8 +155,12 @@ class RequestEncodingMixin(object):
if isinstance(fp, (str, bytes, bytearray)):
fdata = fp
else:
elif hasattr(fp, 'read'):
fdata = fp.read()
elif fp is None:
continue
else:
fdata = fp
rf = RequestField(name=k, data=fdata, filename=fn, headers=fh)
rf.make_multipart(content_type=ft)
@ -174,10 +178,10 @@ class RequestHooksMixin(object):
if event not in self.hooks:
raise ValueError('Unsupported event specified, with event name "%s"' % (event))
if isinstance(hook, collections.Callable):
if isinstance(hook, Callable):
self.hooks[event].append(hook)
elif hasattr(hook, '__iter__'):
self.hooks[event].extend(h for h in hook if isinstance(h, collections.Callable))
self.hooks[event].extend(h for h in hook if isinstance(h, Callable))
def deregister_hook(self, event, hook):
"""Deregister a previously registered hook.
@ -461,7 +465,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
is_stream = all([
hasattr(data, '__iter__'),
not isinstance(data, (basestring, list, tuple, collections.Mapping))
not isinstance(data, (basestring, list, tuple, Mapping))
])
try:
@ -686,11 +690,11 @@ class Response(object):
@property
def ok(self):
"""Returns True if :attr:`status_code` is less than 400.
"""Returns True if :attr:`status_code` is less than 400, False if not.
This attribute checks if the status code of the response is between
400 and 600 to see if there was a client error or a server error. If
the status code, is between 200 and 400, this will return True. This
the status code is between 200 and 400, this will return True. This
is **not** a check to see if the response code is ``200 OK``.
"""
try:
@ -820,7 +824,7 @@ class Response(object):
if self.status_code == 0 or self.raw is None:
self._content = None
else:
self._content = bytes().join(self.iter_content(CONTENT_CHUNK_SIZE)) or bytes()
self._content = b''.join(self.iter_content(CONTENT_CHUNK_SIZE)) or b''
self._content_consumed = True
# don't need to release the connection; that's been handled by urllib3

View File

@ -8,13 +8,12 @@ This module provides a Session object to manage and persist settings across
requests (cookies, auth, proxies).
"""
import os
import platform
import sys
import time
from collections import Mapping
from datetime import timedelta
from .auth import _basic_auth_str
from .compat import cookielib, is_py3, OrderedDict, urljoin, urlparse
from .compat import cookielib, is_py3, OrderedDict, urljoin, urlparse, Mapping
from .cookies import (
cookiejar_from_dict, extract_cookies_to_jar, RequestsCookieJar, merge_cookies)
from .models import Request, PreparedRequest, DEFAULT_REDIRECT_LIMIT
@ -38,8 +37,8 @@ from .status_codes import codes
from .models import REDIRECT_STATI
# Preferred clock, based on which one is more accurate on a given system.
if platform.system() == 'Windows':
try: # Python 3.3+
if sys.platform == 'win32':
try: # Python 3.4+
preferred_clock = time.perf_counter
except AttributeError: # Earlier than Python 3.
preferred_clock = time.clock
@ -123,6 +122,7 @@ class SessionRedirectMixin(object):
hist = [] # keep track of history
url = self.get_redirect_target(resp)
previous_fragment = urlparse(req.url).fragment
while url:
prepared_request = req.copy()
@ -147,8 +147,12 @@ class SessionRedirectMixin(object):
parsed_rurl = urlparse(resp.url)
url = '%s:%s' % (to_native_string(parsed_rurl.scheme), url)
# The scheme should be lower case...
# Normalize url case and attach previous fragment if needed (RFC 7231 7.1.2)
parsed = urlparse(url)
if parsed.fragment == '' and previous_fragment:
parsed = parsed._replace(fragment=previous_fragment)
elif parsed.fragment:
previous_fragment = parsed.fragment
url = parsed.geturl()
# Facilitate relative 'location' headers, as allowed by RFC 7231.
@ -696,7 +700,7 @@ class Session(SessionRedirectMixin):
"""
for (prefix, adapter) in self.adapters.items():
if url.lower().startswith(prefix):
if url.lower().startswith(prefix.lower()):
return adapter
# Nothing matches :-/

View File

@ -1,5 +1,22 @@
# -*- coding: utf-8 -*-
"""
The ``codes`` object defines a mapping from common names for HTTP statuses
to their numerical codes, accessible either as attributes or as dictionary
items.
>>> requests.codes['temporary_redirect']
307
>>> requests.codes.teapot
418
>>> requests.codes['\o/']
200
Some codes have multiple names, and both upper- and lower-case versions of
the names are allowed. For example, ``codes.ok``, ``codes.OK``, and
``codes.okay`` all correspond to the HTTP status code 200.
"""
from .structures import LookupDict
_codes = {
@ -84,8 +101,20 @@ _codes = {
codes = LookupDict(name='status_codes')
for code, titles in _codes.items():
for title in titles:
setattr(codes, title, code)
if not title.startswith(('\\', '/')):
setattr(codes, title.upper(), code)
def _init():
for code, titles in _codes.items():
for title in titles:
setattr(codes, title, code)
if not title.startswith(('\\', '/')):
setattr(codes, title.upper(), code)
def doc(code):
names = ', '.join('``%s``' % n for n in _codes[code])
return '* %d: %s' % (code, names)
global __doc__
__doc__ = (__doc__ + '\n' +
'\n'.join(doc(code) for code in sorted(_codes))
if __doc__ is not None else None)
_init()

View File

@ -7,16 +7,14 @@ requests.structures
Data structures that power Requests.
"""
import collections
from .compat import OrderedDict
from .compat import OrderedDict, Mapping, MutableMapping
class CaseInsensitiveDict(collections.MutableMapping):
class CaseInsensitiveDict(MutableMapping):
"""A case-insensitive ``dict``-like object.
Implements all methods and operations of
``collections.MutableMapping`` as well as dict's ``copy``. Also
``MutableMapping`` as well as dict's ``copy``. Also
provides ``lower_items``.
All keys are expected to be strings. The structure remembers the
@ -71,7 +69,7 @@ class CaseInsensitiveDict(collections.MutableMapping):
)
def __eq__(self, other):
if isinstance(other, collections.Mapping):
if isinstance(other, Mapping):
other = CaseInsensitiveDict(other)
else:
return NotImplemented

View File

@ -8,17 +8,17 @@ This module provides utility functions that are used within Requests
that are also useful for external consumption.
"""
import cgi
import codecs
import collections
import contextlib
import io
import os
import platform
import re
import socket
import struct
import sys
import tempfile
import warnings
import zipfile
from .__version__ import __version__
from . import certs
@ -28,7 +28,7 @@ from .compat import parse_http_list as _parse_list_header
from .compat import (
quote, urlparse, bytes, str, OrderedDict, unquote, getproxies,
proxy_bypass, urlunparse, basestring, integer_types, is_py3,
proxy_bypass_environment, getproxies_environment)
proxy_bypass_environment, getproxies_environment, Mapping)
from .cookies import cookiejar_from_dict
from .structures import CaseInsensitiveDict
from .exceptions import (
@ -39,19 +39,25 @@ NETRC_FILES = ('.netrc', '_netrc')
DEFAULT_CA_BUNDLE_PATH = certs.where()
if platform.system() == 'Windows':
if sys.platform == 'win32':
# provide a proxy_bypass version on Windows without DNS lookups
def proxy_bypass_registry(host):
if is_py3:
import winreg
else:
import _winreg as winreg
try:
if is_py3:
import winreg
else:
import _winreg as winreg
except ImportError:
return False
try:
internetSettings = winreg.OpenKey(winreg.HKEY_CURRENT_USER,
r'Software\Microsoft\Windows\CurrentVersion\Internet Settings')
proxyEnable = winreg.QueryValueEx(internetSettings,
'ProxyEnable')[0]
# ProxyEnable could be REG_SZ or REG_DWORD, normalizing it
proxyEnable = int(winreg.QueryValueEx(internetSettings,
'ProxyEnable')[0])
# ProxyOverride is almost always a string
proxyOverride = winreg.QueryValueEx(internetSettings,
'ProxyOverride')[0]
except OSError:
@ -216,6 +222,38 @@ def guess_filename(obj):
return os.path.basename(name)
def extract_zipped_paths(path):
"""Replace nonexistent paths that look like they refer to a member of a zip
archive with the location of an extracted copy of the target, or else
just return the provided path unchanged.
"""
if os.path.exists(path):
# this is already a valid path, no need to do anything further
return path
# find the first valid part of the provided path and treat that as a zip archive
# assume the rest of the path is the name of a member in the archive
archive, member = os.path.split(path)
while archive and not os.path.exists(archive):
archive, prefix = os.path.split(archive)
member = '/'.join([prefix, member])
if not zipfile.is_zipfile(archive):
return path
zip_file = zipfile.ZipFile(archive)
if member not in zip_file.namelist():
return path
# we have a valid zip archive and a valid member of that archive
tmp = tempfile.gettempdir()
extracted_path = os.path.join(tmp, *member.split('/'))
if not os.path.exists(extracted_path):
extracted_path = zip_file.extract(member, path=tmp)
return extracted_path
def from_key_val_list(value):
"""Take an object and test to see if it can be represented as a
dictionary. Unless it can not be represented as such, return an
@ -262,7 +300,7 @@ def to_key_val_list(value):
if isinstance(value, (str, bytes, bool, int)):
raise ValueError('cannot encode objects that are not 2-tuples')
if isinstance(value, collections.Mapping):
if isinstance(value, Mapping):
value = value.items()
return list(value)
@ -407,6 +445,31 @@ def get_encodings_from_content(content):
xml_re.findall(content))
def _parse_content_type_header(header):
"""Returns content type and parameters from given header
:param header: string
:return: tuple containing content type and dictionary of
parameters
"""
tokens = header.split(';')
content_type, params = tokens[0].strip(), tokens[1:]
params_dict = {}
items_to_strip = "\"' "
for param in params:
param = param.strip()
if param:
key, value = param, True
index_of_equals = param.find("=")
if index_of_equals != -1:
key = param[:index_of_equals].strip(items_to_strip)
value = param[index_of_equals + 1:].strip(items_to_strip)
params_dict[key] = value
return content_type, params_dict
def get_encoding_from_headers(headers):
"""Returns encodings from given HTTP Header Dict.
@ -419,7 +482,7 @@ def get_encoding_from_headers(headers):
if not content_type:
return None
content_type, params = cgi.parse_header(content_type)
content_type, params = _parse_content_type_header(content_type)
if 'charset' in params:
return params['charset'].strip("'\"")
@ -632,6 +695,8 @@ def should_bypass_proxies(url, no_proxy):
:rtype: bool
"""
# Prioritize lowercase environment variables over uppercase
# to keep a consistent behaviour with other http projects (curl, wget).
get_proxy = lambda k: os.environ.get(k) or os.environ.get(k.upper())
# First check whether no_proxy is defined. If it is, check that the URL
@ -639,28 +704,31 @@ def should_bypass_proxies(url, no_proxy):
no_proxy_arg = no_proxy
if no_proxy is None:
no_proxy = get_proxy('no_proxy')
netloc = urlparse(url).netloc
parsed = urlparse(url)
if no_proxy:
# We need to check whether we match here. We need to see if we match
# the end of the netloc, both with and without the port.
# the end of the hostname, both with and without the port.
no_proxy = (
host for host in no_proxy.replace(' ', '').split(',') if host
)
ip = netloc.split(':')[0]
if is_ipv4_address(ip):
if is_ipv4_address(parsed.hostname):
for proxy_ip in no_proxy:
if is_valid_cidr(proxy_ip):
if address_in_network(ip, proxy_ip):
if address_in_network(parsed.hostname, proxy_ip):
return True
elif ip == proxy_ip:
elif parsed.hostname == proxy_ip:
# If no_proxy ip was defined in plain IP notation instead of cidr notation &
# matches the IP of the index
return True
else:
host_with_port = parsed.hostname
if parsed.port:
host_with_port += ':{0}'.format(parsed.port)
for host in no_proxy:
if netloc.endswith(host) or netloc.split(':')[0].endswith(host):
if parsed.hostname.endswith(host) or host_with_port.endswith(host):
# The URL does match something in no_proxy, so we don't want
# to apply the proxies on this URL.
return True
@ -673,7 +741,7 @@ def should_bypass_proxies(url, no_proxy):
# legitimate problems.
with set_environ('no_proxy', no_proxy_arg):
try:
bypass = proxy_bypass(netloc)
bypass = proxy_bypass(parsed.hostname)
except (TypeError, socket.gaierror):
bypass = False
@ -743,7 +811,7 @@ def default_headers():
def parse_header_links(value):
"""Return a dict of parsed link headers proxies.
"""Return a list of parsed link headers proxies.
i.e. Link: <http:/.../front.jpeg>; rel=front; type="image/jpeg",<http://.../back.jpeg>; rel=back;type="image/jpeg"
@ -754,6 +822,10 @@ def parse_header_links(value):
replace_chars = ' \'"'
value = value.strip(replace_chars)
if not value:
return links
for val in re.split(', *<', value):
try:
url, params = val.split(';', 1)

View File

@ -13,7 +13,7 @@ packaging==17.1
pyparsing==2.2.0
pytoml==0.1.16
retrying==1.3.3
requests==2.18.4
requests==2.19.1
chardet==3.0.4
idna==2.7
urllib3==1.23

View File

@ -24,13 +24,17 @@ diff --git a/src/pip/_vendor/requests/__init__.py b/src/pip/_vendor/requests/__i
index 9c3b769..36a4ef40 100644
--- a/src/pip/_vendor/requests/__init__.py
+++ b/src/pip/_vendor/requests/__init__.py
@@ -80,10 +80,12 @@ except (AssertionError, ValueError):
@@ -80,13 +80,15 @@ except (AssertionError, ValueError):
RequestsDependencyWarning)
# Attempt to enable urllib3's SNI support, if possible
-try:
- from pip._vendor.urllib3.contrib import pyopenssl
- pyopenssl.inject_into_urllib3()
-
- # Check cryptography version
- from cryptography import __version__ as cryptography_version
- _check_cryptography(cryptography_version)
-except ImportError:
- pass
+from pip._internal.compat import WINDOWS
@ -38,6 +42,10 @@ index 9c3b769..36a4ef40 100644
+ try:
+ from pip._vendor.urllib3.contrib import pyopenssl
+ pyopenssl.inject_into_urllib3()
+
+ # Check cryptography version
+ from cryptography import __version__ as cryptography_version
+ _check_cryptography(cryptography_version)
+ except ImportError:
+ pass