mirror of
https://github.com/pypa/pip
synced 2023-12-13 21:30:23 +01:00
Upgrade urllib3 to 1.26.2
This commit is contained in:
parent
b08c4d5f3f
commit
e76b1ddeaa
34 changed files with 1346 additions and 409 deletions
1
news/urllib3.vendor.rst
Normal file
1
news/urllib3.vendor.rst
Normal file
|
@ -0,0 +1 @@
|
||||||
|
Upgrade urllib3 to 1.26.2
|
|
@ -1,28 +1,27 @@
|
||||||
"""
|
"""
|
||||||
urllib3 - Thread-safe connection pooling and re-using.
|
Python HTTP library with thread-safe connection pooling, file post support, user friendly, and more
|
||||||
"""
|
"""
|
||||||
from __future__ import absolute_import
|
from __future__ import absolute_import
|
||||||
import warnings
|
|
||||||
|
|
||||||
from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool, connection_from_url
|
# Set default logging handler to avoid "No handler found" warnings.
|
||||||
|
import logging
|
||||||
|
import warnings
|
||||||
|
from logging import NullHandler
|
||||||
|
|
||||||
from . import exceptions
|
from . import exceptions
|
||||||
|
from ._version import __version__
|
||||||
|
from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool, connection_from_url
|
||||||
from .filepost import encode_multipart_formdata
|
from .filepost import encode_multipart_formdata
|
||||||
from .poolmanager import PoolManager, ProxyManager, proxy_from_url
|
from .poolmanager import PoolManager, ProxyManager, proxy_from_url
|
||||||
from .response import HTTPResponse
|
from .response import HTTPResponse
|
||||||
from .util.request import make_headers
|
from .util.request import make_headers
|
||||||
from .util.url import get_host
|
|
||||||
from .util.timeout import Timeout
|
|
||||||
from .util.retry import Retry
|
from .util.retry import Retry
|
||||||
|
from .util.timeout import Timeout
|
||||||
|
from .util.url import get_host
|
||||||
# Set default logging handler to avoid "No handler found" warnings.
|
|
||||||
import logging
|
|
||||||
from logging import NullHandler
|
|
||||||
|
|
||||||
__author__ = "Andrey Petrov (andrey.petrov@shazow.net)"
|
__author__ = "Andrey Petrov (andrey.petrov@shazow.net)"
|
||||||
__license__ = "MIT"
|
__license__ = "MIT"
|
||||||
__version__ = "1.25.9"
|
__version__ = __version__
|
||||||
|
|
||||||
__all__ = (
|
__all__ = (
|
||||||
"HTTPConnectionPool",
|
"HTTPConnectionPool",
|
||||||
|
|
|
@ -17,9 +17,10 @@ except ImportError: # Platform-specific: No threads available
|
||||||
|
|
||||||
|
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
from .exceptions import InvalidHeader
|
|
||||||
from .packages.six import iterkeys, itervalues, PY3
|
|
||||||
|
|
||||||
|
from .exceptions import InvalidHeader
|
||||||
|
from .packages import six
|
||||||
|
from .packages.six import iterkeys, itervalues
|
||||||
|
|
||||||
__all__ = ["RecentlyUsedContainer", "HTTPHeaderDict"]
|
__all__ = ["RecentlyUsedContainer", "HTTPHeaderDict"]
|
||||||
|
|
||||||
|
@ -174,7 +175,7 @@ class HTTPHeaderDict(MutableMapping):
|
||||||
def __ne__(self, other):
|
def __ne__(self, other):
|
||||||
return not self.__eq__(other)
|
return not self.__eq__(other)
|
||||||
|
|
||||||
if not PY3: # Python 2
|
if six.PY2: # Python 2
|
||||||
iterkeys = MutableMapping.iterkeys
|
iterkeys = MutableMapping.iterkeys
|
||||||
itervalues = MutableMapping.itervalues
|
itervalues = MutableMapping.itervalues
|
||||||
|
|
||||||
|
@ -190,7 +191,7 @@ class HTTPHeaderDict(MutableMapping):
|
||||||
|
|
||||||
def pop(self, key, default=__marker):
|
def pop(self, key, default=__marker):
|
||||||
"""D.pop(k[,d]) -> v, remove specified key and return the corresponding value.
|
"""D.pop(k[,d]) -> v, remove specified key and return the corresponding value.
|
||||||
If key is not found, d is returned if given, otherwise KeyError is raised.
|
If key is not found, d is returned if given, otherwise KeyError is raised.
|
||||||
"""
|
"""
|
||||||
# Using the MutableMapping function directly fails due to the private marker.
|
# Using the MutableMapping function directly fails due to the private marker.
|
||||||
# Using ordinary dict.pop would expose the internal structures.
|
# Using ordinary dict.pop would expose the internal structures.
|
||||||
|
|
2
src/pip/_vendor/urllib3/_version.py
Normal file
2
src/pip/_vendor/urllib3/_version.py
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
# This file is protected via CODEOWNERS
|
||||||
|
__version__ = "1.26.2"
|
|
@ -1,14 +1,18 @@
|
||||||
from __future__ import absolute_import
|
from __future__ import absolute_import
|
||||||
import re
|
|
||||||
import datetime
|
import datetime
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
import re
|
||||||
import socket
|
import socket
|
||||||
from socket import error as SocketError, timeout as SocketTimeout
|
|
||||||
import warnings
|
import warnings
|
||||||
|
from socket import error as SocketError
|
||||||
|
from socket import timeout as SocketTimeout
|
||||||
|
|
||||||
from .packages import six
|
from .packages import six
|
||||||
from .packages.six.moves.http_client import HTTPConnection as _HTTPConnection
|
from .packages.six.moves.http_client import HTTPConnection as _HTTPConnection
|
||||||
from .packages.six.moves.http_client import HTTPException # noqa: F401
|
from .packages.six.moves.http_client import HTTPException # noqa: F401
|
||||||
|
from .util.proxy import create_proxy_ssl_context
|
||||||
|
|
||||||
try: # Compiled with SSL?
|
try: # Compiled with SSL?
|
||||||
import ssl
|
import ssl
|
||||||
|
@ -30,27 +34,33 @@ except NameError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
try: # Python 3:
|
||||||
|
# Not a no-op, we're adding this to the namespace so it can be imported.
|
||||||
|
BrokenPipeError = BrokenPipeError
|
||||||
|
except NameError: # Python 2:
|
||||||
|
|
||||||
|
class BrokenPipeError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
from ._collections import HTTPHeaderDict # noqa (historical, removed in v2)
|
||||||
|
from ._version import __version__
|
||||||
from .exceptions import (
|
from .exceptions import (
|
||||||
NewConnectionError,
|
|
||||||
ConnectTimeoutError,
|
ConnectTimeoutError,
|
||||||
|
NewConnectionError,
|
||||||
SubjectAltNameWarning,
|
SubjectAltNameWarning,
|
||||||
SystemTimeWarning,
|
SystemTimeWarning,
|
||||||
)
|
)
|
||||||
from .packages.ssl_match_hostname import match_hostname, CertificateError
|
from .packages.ssl_match_hostname import CertificateError, match_hostname
|
||||||
|
from .util import SKIP_HEADER, SKIPPABLE_HEADERS, connection
|
||||||
from .util.ssl_ import (
|
from .util.ssl_ import (
|
||||||
resolve_cert_reqs,
|
|
||||||
resolve_ssl_version,
|
|
||||||
assert_fingerprint,
|
assert_fingerprint,
|
||||||
create_urllib3_context,
|
create_urllib3_context,
|
||||||
|
resolve_cert_reqs,
|
||||||
|
resolve_ssl_version,
|
||||||
ssl_wrap_socket,
|
ssl_wrap_socket,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
from .util import connection
|
|
||||||
|
|
||||||
from ._collections import HTTPHeaderDict
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
port_by_scheme = {"http": 80, "https": 443}
|
port_by_scheme = {"http": 80, "https": 443}
|
||||||
|
@ -62,34 +72,30 @@ RECENT_DATE = datetime.date(2019, 1, 1)
|
||||||
_CONTAINS_CONTROL_CHAR_RE = re.compile(r"[^-!#$%&'*+.^_`|~0-9a-zA-Z]")
|
_CONTAINS_CONTROL_CHAR_RE = re.compile(r"[^-!#$%&'*+.^_`|~0-9a-zA-Z]")
|
||||||
|
|
||||||
|
|
||||||
class DummyConnection(object):
|
|
||||||
"""Used to detect a failed ConnectionCls import."""
|
|
||||||
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPConnection(_HTTPConnection, object):
|
class HTTPConnection(_HTTPConnection, object):
|
||||||
"""
|
"""
|
||||||
Based on httplib.HTTPConnection but provides an extra constructor
|
Based on :class:`http.client.HTTPConnection` but provides an extra constructor
|
||||||
backwards-compatibility layer between older and newer Pythons.
|
backwards-compatibility layer between older and newer Pythons.
|
||||||
|
|
||||||
Additional keyword parameters are used to configure attributes of the connection.
|
Additional keyword parameters are used to configure attributes of the connection.
|
||||||
Accepted parameters include:
|
Accepted parameters include:
|
||||||
|
|
||||||
- ``strict``: See the documentation on :class:`urllib3.connectionpool.HTTPConnectionPool`
|
- ``strict``: See the documentation on :class:`urllib3.connectionpool.HTTPConnectionPool`
|
||||||
- ``source_address``: Set the source address for the current connection.
|
- ``source_address``: Set the source address for the current connection.
|
||||||
- ``socket_options``: Set specific options on the underlying socket. If not specified, then
|
- ``socket_options``: Set specific options on the underlying socket. If not specified, then
|
||||||
defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling
|
defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling
|
||||||
Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy.
|
Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy.
|
||||||
|
|
||||||
For example, if you wish to enable TCP Keep Alive in addition to the defaults,
|
For example, if you wish to enable TCP Keep Alive in addition to the defaults,
|
||||||
you might pass::
|
you might pass:
|
||||||
|
|
||||||
HTTPConnection.default_socket_options + [
|
.. code-block:: python
|
||||||
(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1),
|
|
||||||
]
|
|
||||||
|
|
||||||
Or you may want to disable the defaults by passing an empty list (e.g., ``[]``).
|
HTTPConnection.default_socket_options + [
|
||||||
|
(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1),
|
||||||
|
]
|
||||||
|
|
||||||
|
Or you may want to disable the defaults by passing an empty list (e.g., ``[]``).
|
||||||
"""
|
"""
|
||||||
|
|
||||||
default_port = port_by_scheme["http"]
|
default_port = port_by_scheme["http"]
|
||||||
|
@ -112,6 +118,10 @@ class HTTPConnection(_HTTPConnection, object):
|
||||||
#: provided, we use the default options.
|
#: provided, we use the default options.
|
||||||
self.socket_options = kw.pop("socket_options", self.default_socket_options)
|
self.socket_options = kw.pop("socket_options", self.default_socket_options)
|
||||||
|
|
||||||
|
# Proxy options provided by the user.
|
||||||
|
self.proxy = kw.pop("proxy", None)
|
||||||
|
self.proxy_config = kw.pop("proxy_config", None)
|
||||||
|
|
||||||
_HTTPConnection.__init__(self, *args, **kw)
|
_HTTPConnection.__init__(self, *args, **kw)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
@ -144,7 +154,7 @@ class HTTPConnection(_HTTPConnection, object):
|
||||||
self._dns_host = value
|
self._dns_host = value
|
||||||
|
|
||||||
def _new_conn(self):
|
def _new_conn(self):
|
||||||
""" Establish a socket connection and set nodelay settings on it.
|
"""Establish a socket connection and set nodelay settings on it.
|
||||||
|
|
||||||
:return: New socket connection.
|
:return: New socket connection.
|
||||||
"""
|
"""
|
||||||
|
@ -174,10 +184,13 @@ class HTTPConnection(_HTTPConnection, object):
|
||||||
|
|
||||||
return conn
|
return conn
|
||||||
|
|
||||||
|
def _is_using_tunnel(self):
|
||||||
|
# Google App Engine's httplib does not define _tunnel_host
|
||||||
|
return getattr(self, "_tunnel_host", None)
|
||||||
|
|
||||||
def _prepare_conn(self, conn):
|
def _prepare_conn(self, conn):
|
||||||
self.sock = conn
|
self.sock = conn
|
||||||
# Google App Engine's httplib does not define _tunnel_host
|
if self._is_using_tunnel():
|
||||||
if getattr(self, "_tunnel_host", None):
|
|
||||||
# TODO: Fix tunnel so it doesn't depend on self.sock state.
|
# TODO: Fix tunnel so it doesn't depend on self.sock state.
|
||||||
self._tunnel()
|
self._tunnel()
|
||||||
# Mark this connection as not reusable
|
# Mark this connection as not reusable
|
||||||
|
@ -188,7 +201,9 @@ class HTTPConnection(_HTTPConnection, object):
|
||||||
self._prepare_conn(conn)
|
self._prepare_conn(conn)
|
||||||
|
|
||||||
def putrequest(self, method, url, *args, **kwargs):
|
def putrequest(self, method, url, *args, **kwargs):
|
||||||
"""Send a request to the server"""
|
""""""
|
||||||
|
# Empty docstring because the indentation of CPython's implementation
|
||||||
|
# is broken but we don't want this method in our documentation.
|
||||||
match = _CONTAINS_CONTROL_CHAR_RE.search(method)
|
match = _CONTAINS_CONTROL_CHAR_RE.search(method)
|
||||||
if match:
|
if match:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
|
@ -198,17 +213,40 @@ class HTTPConnection(_HTTPConnection, object):
|
||||||
|
|
||||||
return _HTTPConnection.putrequest(self, method, url, *args, **kwargs)
|
return _HTTPConnection.putrequest(self, method, url, *args, **kwargs)
|
||||||
|
|
||||||
|
def putheader(self, header, *values):
|
||||||
|
""""""
|
||||||
|
if SKIP_HEADER not in values:
|
||||||
|
_HTTPConnection.putheader(self, header, *values)
|
||||||
|
elif six.ensure_str(header.lower()) not in SKIPPABLE_HEADERS:
|
||||||
|
raise ValueError(
|
||||||
|
"urllib3.util.SKIP_HEADER only supports '%s'"
|
||||||
|
% ("', '".join(map(str.title, sorted(SKIPPABLE_HEADERS))),)
|
||||||
|
)
|
||||||
|
|
||||||
|
def request(self, method, url, body=None, headers=None):
|
||||||
|
if headers is None:
|
||||||
|
headers = {}
|
||||||
|
else:
|
||||||
|
# Avoid modifying the headers passed into .request()
|
||||||
|
headers = headers.copy()
|
||||||
|
if "user-agent" not in (six.ensure_str(k.lower()) for k in headers):
|
||||||
|
headers["User-Agent"] = _get_default_user_agent()
|
||||||
|
super(HTTPConnection, self).request(method, url, body=body, headers=headers)
|
||||||
|
|
||||||
def request_chunked(self, method, url, body=None, headers=None):
|
def request_chunked(self, method, url, body=None, headers=None):
|
||||||
"""
|
"""
|
||||||
Alternative to the common request method, which sends the
|
Alternative to the common request method, which sends the
|
||||||
body with chunked encoding and not as one block
|
body with chunked encoding and not as one block
|
||||||
"""
|
"""
|
||||||
headers = HTTPHeaderDict(headers if headers is not None else {})
|
headers = headers or {}
|
||||||
skip_accept_encoding = "accept-encoding" in headers
|
header_keys = set([six.ensure_str(k.lower()) for k in headers])
|
||||||
skip_host = "host" in headers
|
skip_accept_encoding = "accept-encoding" in header_keys
|
||||||
|
skip_host = "host" in header_keys
|
||||||
self.putrequest(
|
self.putrequest(
|
||||||
method, url, skip_accept_encoding=skip_accept_encoding, skip_host=skip_host
|
method, url, skip_accept_encoding=skip_accept_encoding, skip_host=skip_host
|
||||||
)
|
)
|
||||||
|
if "user-agent" not in header_keys:
|
||||||
|
self.putheader("User-Agent", _get_default_user_agent())
|
||||||
for header, value in headers.items():
|
for header, value in headers.items():
|
||||||
self.putheader(header, value)
|
self.putheader(header, value)
|
||||||
if "transfer-encoding" not in headers:
|
if "transfer-encoding" not in headers:
|
||||||
|
@ -225,16 +263,22 @@ class HTTPConnection(_HTTPConnection, object):
|
||||||
if not isinstance(chunk, bytes):
|
if not isinstance(chunk, bytes):
|
||||||
chunk = chunk.encode("utf8")
|
chunk = chunk.encode("utf8")
|
||||||
len_str = hex(len(chunk))[2:]
|
len_str = hex(len(chunk))[2:]
|
||||||
self.send(len_str.encode("utf-8"))
|
to_send = bytearray(len_str.encode())
|
||||||
self.send(b"\r\n")
|
to_send += b"\r\n"
|
||||||
self.send(chunk)
|
to_send += chunk
|
||||||
self.send(b"\r\n")
|
to_send += b"\r\n"
|
||||||
|
self.send(to_send)
|
||||||
|
|
||||||
# After the if clause, to always have a closed body
|
# After the if clause, to always have a closed body
|
||||||
self.send(b"0\r\n\r\n")
|
self.send(b"0\r\n\r\n")
|
||||||
|
|
||||||
|
|
||||||
class HTTPSConnection(HTTPConnection):
|
class HTTPSConnection(HTTPConnection):
|
||||||
|
"""
|
||||||
|
Many of the parameters to this constructor are passed to the underlying SSL
|
||||||
|
socket by means of :py:func:`urllib3.util.ssl_wrap_socket`.
|
||||||
|
"""
|
||||||
|
|
||||||
default_port = port_by_scheme["https"]
|
default_port = port_by_scheme["https"]
|
||||||
|
|
||||||
cert_reqs = None
|
cert_reqs = None
|
||||||
|
@ -243,6 +287,7 @@ class HTTPSConnection(HTTPConnection):
|
||||||
ca_cert_data = None
|
ca_cert_data = None
|
||||||
ssl_version = None
|
ssl_version = None
|
||||||
assert_fingerprint = None
|
assert_fingerprint = None
|
||||||
|
tls_in_tls_required = False
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
|
@ -307,10 +352,15 @@ class HTTPSConnection(HTTPConnection):
|
||||||
# Add certificate verification
|
# Add certificate verification
|
||||||
conn = self._new_conn()
|
conn = self._new_conn()
|
||||||
hostname = self.host
|
hostname = self.host
|
||||||
|
tls_in_tls = False
|
||||||
|
|
||||||
|
if self._is_using_tunnel():
|
||||||
|
if self.tls_in_tls_required:
|
||||||
|
conn = self._connect_tls_proxy(hostname, conn)
|
||||||
|
tls_in_tls = True
|
||||||
|
|
||||||
# Google App Engine's httplib does not define _tunnel_host
|
|
||||||
if getattr(self, "_tunnel_host", None):
|
|
||||||
self.sock = conn
|
self.sock = conn
|
||||||
|
|
||||||
# Calls self._set_hostport(), so self.host is
|
# Calls self._set_hostport(), so self.host is
|
||||||
# self._tunnel_host below.
|
# self._tunnel_host below.
|
||||||
self._tunnel()
|
self._tunnel()
|
||||||
|
@ -368,8 +418,26 @@ class HTTPSConnection(HTTPConnection):
|
||||||
ca_cert_data=self.ca_cert_data,
|
ca_cert_data=self.ca_cert_data,
|
||||||
server_hostname=server_hostname,
|
server_hostname=server_hostname,
|
||||||
ssl_context=context,
|
ssl_context=context,
|
||||||
|
tls_in_tls=tls_in_tls,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# If we're using all defaults and the connection
|
||||||
|
# is TLSv1 or TLSv1.1 we throw a DeprecationWarning
|
||||||
|
# for the host.
|
||||||
|
if (
|
||||||
|
default_ssl_context
|
||||||
|
and self.ssl_version is None
|
||||||
|
and hasattr(self.sock, "version")
|
||||||
|
and self.sock.version() in {"TLSv1", "TLSv1.1"}
|
||||||
|
):
|
||||||
|
warnings.warn(
|
||||||
|
"Negotiating TLSv1/TLSv1.1 by default is deprecated "
|
||||||
|
"and will be disabled in urllib3 v2.0.0. Connecting to "
|
||||||
|
"'%s' with '%s' can be enabled by explicitly opting-in "
|
||||||
|
"with 'ssl_version'" % (self.host, self.sock.version()),
|
||||||
|
DeprecationWarning,
|
||||||
|
)
|
||||||
|
|
||||||
if self.assert_fingerprint:
|
if self.assert_fingerprint:
|
||||||
assert_fingerprint(
|
assert_fingerprint(
|
||||||
self.sock.getpeercert(binary_form=True), self.assert_fingerprint
|
self.sock.getpeercert(binary_form=True), self.assert_fingerprint
|
||||||
|
@ -400,6 +468,40 @@ class HTTPSConnection(HTTPConnection):
|
||||||
or self.assert_fingerprint is not None
|
or self.assert_fingerprint is not None
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def _connect_tls_proxy(self, hostname, conn):
|
||||||
|
"""
|
||||||
|
Establish a TLS connection to the proxy using the provided SSL context.
|
||||||
|
"""
|
||||||
|
proxy_config = self.proxy_config
|
||||||
|
ssl_context = proxy_config.ssl_context
|
||||||
|
if ssl_context:
|
||||||
|
# If the user provided a proxy context, we assume CA and client
|
||||||
|
# certificates have already been set
|
||||||
|
return ssl_wrap_socket(
|
||||||
|
sock=conn,
|
||||||
|
server_hostname=hostname,
|
||||||
|
ssl_context=ssl_context,
|
||||||
|
)
|
||||||
|
|
||||||
|
ssl_context = create_proxy_ssl_context(
|
||||||
|
self.ssl_version,
|
||||||
|
self.cert_reqs,
|
||||||
|
self.ca_certs,
|
||||||
|
self.ca_cert_dir,
|
||||||
|
self.ca_cert_data,
|
||||||
|
)
|
||||||
|
|
||||||
|
# If no cert was provided, use only the default options for server
|
||||||
|
# certificate validation
|
||||||
|
return ssl_wrap_socket(
|
||||||
|
sock=conn,
|
||||||
|
ca_certs=self.ca_certs,
|
||||||
|
ca_cert_dir=self.ca_cert_dir,
|
||||||
|
ca_cert_data=self.ca_cert_data,
|
||||||
|
server_hostname=hostname,
|
||||||
|
ssl_context=ssl_context,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def _match_hostname(cert, asserted_hostname):
|
def _match_hostname(cert, asserted_hostname):
|
||||||
try:
|
try:
|
||||||
|
@ -416,6 +518,16 @@ def _match_hostname(cert, asserted_hostname):
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
def _get_default_user_agent():
|
||||||
|
return "python-urllib3/%s" % __version__
|
||||||
|
|
||||||
|
|
||||||
|
class DummyConnection(object):
|
||||||
|
"""Used to detect a failed ConnectionCls import."""
|
||||||
|
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
if not ssl:
|
if not ssl:
|
||||||
HTTPSConnection = DummyConnection # noqa: F811
|
HTTPSConnection = DummyConnection # noqa: F811
|
||||||
|
|
||||||
|
|
|
@ -1,57 +1,53 @@
|
||||||
from __future__ import absolute_import
|
from __future__ import absolute_import
|
||||||
|
|
||||||
import errno
|
import errno
|
||||||
import logging
|
import logging
|
||||||
|
import socket
|
||||||
import sys
|
import sys
|
||||||
import warnings
|
import warnings
|
||||||
|
from socket import error as SocketError
|
||||||
|
from socket import timeout as SocketTimeout
|
||||||
|
|
||||||
from socket import error as SocketError, timeout as SocketTimeout
|
from .connection import (
|
||||||
import socket
|
BaseSSLError,
|
||||||
|
BrokenPipeError,
|
||||||
|
DummyConnection,
|
||||||
|
HTTPConnection,
|
||||||
|
HTTPException,
|
||||||
|
HTTPSConnection,
|
||||||
|
VerifiedHTTPSConnection,
|
||||||
|
port_by_scheme,
|
||||||
|
)
|
||||||
from .exceptions import (
|
from .exceptions import (
|
||||||
ClosedPoolError,
|
ClosedPoolError,
|
||||||
ProtocolError,
|
|
||||||
EmptyPoolError,
|
EmptyPoolError,
|
||||||
HeaderParsingError,
|
HeaderParsingError,
|
||||||
HostChangedError,
|
HostChangedError,
|
||||||
|
InsecureRequestWarning,
|
||||||
LocationValueError,
|
LocationValueError,
|
||||||
MaxRetryError,
|
MaxRetryError,
|
||||||
|
NewConnectionError,
|
||||||
|
ProtocolError,
|
||||||
ProxyError,
|
ProxyError,
|
||||||
ReadTimeoutError,
|
ReadTimeoutError,
|
||||||
SSLError,
|
SSLError,
|
||||||
TimeoutError,
|
TimeoutError,
|
||||||
InsecureRequestWarning,
|
|
||||||
NewConnectionError,
|
|
||||||
)
|
)
|
||||||
from .packages.ssl_match_hostname import CertificateError
|
|
||||||
from .packages import six
|
from .packages import six
|
||||||
from .packages.six.moves import queue
|
from .packages.six.moves import queue
|
||||||
from .connection import (
|
from .packages.ssl_match_hostname import CertificateError
|
||||||
port_by_scheme,
|
|
||||||
DummyConnection,
|
|
||||||
HTTPConnection,
|
|
||||||
HTTPSConnection,
|
|
||||||
VerifiedHTTPSConnection,
|
|
||||||
HTTPException,
|
|
||||||
BaseSSLError,
|
|
||||||
)
|
|
||||||
from .request import RequestMethods
|
from .request import RequestMethods
|
||||||
from .response import HTTPResponse
|
from .response import HTTPResponse
|
||||||
|
|
||||||
from .util.connection import is_connection_dropped
|
from .util.connection import is_connection_dropped
|
||||||
|
from .util.proxy import connection_requires_http_tunnel
|
||||||
|
from .util.queue import LifoQueue
|
||||||
from .util.request import set_file_position
|
from .util.request import set_file_position
|
||||||
from .util.response import assert_header_parsing
|
from .util.response import assert_header_parsing
|
||||||
from .util.retry import Retry
|
from .util.retry import Retry
|
||||||
from .util.timeout import Timeout
|
from .util.timeout import Timeout
|
||||||
from .util.url import (
|
from .util.url import Url, _encode_target
|
||||||
get_host,
|
from .util.url import _normalize_host as normalize_host
|
||||||
parse_url,
|
from .util.url import get_host, parse_url
|
||||||
Url,
|
|
||||||
_normalize_host as normalize_host,
|
|
||||||
_encode_target,
|
|
||||||
)
|
|
||||||
from .util.queue import LifoQueue
|
|
||||||
|
|
||||||
|
|
||||||
xrange = six.moves.xrange
|
xrange = six.moves.xrange
|
||||||
|
|
||||||
|
@ -111,16 +107,16 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
|
||||||
|
|
||||||
:param host:
|
:param host:
|
||||||
Host used for this HTTP Connection (e.g. "localhost"), passed into
|
Host used for this HTTP Connection (e.g. "localhost"), passed into
|
||||||
:class:`httplib.HTTPConnection`.
|
:class:`http.client.HTTPConnection`.
|
||||||
|
|
||||||
:param port:
|
:param port:
|
||||||
Port used for this HTTP Connection (None is equivalent to 80), passed
|
Port used for this HTTP Connection (None is equivalent to 80), passed
|
||||||
into :class:`httplib.HTTPConnection`.
|
into :class:`http.client.HTTPConnection`.
|
||||||
|
|
||||||
:param strict:
|
:param strict:
|
||||||
Causes BadStatusLine to be raised if the status line can't be parsed
|
Causes BadStatusLine to be raised if the status line can't be parsed
|
||||||
as a valid HTTP/1.0 or 1.1 status line, passed into
|
as a valid HTTP/1.0 or 1.1 status line, passed into
|
||||||
:class:`httplib.HTTPConnection`.
|
:class:`http.client.HTTPConnection`.
|
||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
Only works in Python 2. This parameter is ignored in Python 3.
|
Only works in Python 2. This parameter is ignored in Python 3.
|
||||||
|
@ -154,11 +150,11 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
|
||||||
|
|
||||||
:param _proxy:
|
:param _proxy:
|
||||||
Parsed proxy URL, should not be used directly, instead, see
|
Parsed proxy URL, should not be used directly, instead, see
|
||||||
:class:`urllib3.connectionpool.ProxyManager`"
|
:class:`urllib3.ProxyManager`
|
||||||
|
|
||||||
:param _proxy_headers:
|
:param _proxy_headers:
|
||||||
A dictionary with proxy headers, should not be used directly,
|
A dictionary with proxy headers, should not be used directly,
|
||||||
instead, see :class:`urllib3.connectionpool.ProxyManager`"
|
instead, see :class:`urllib3.ProxyManager`
|
||||||
|
|
||||||
:param \\**conn_kw:
|
:param \\**conn_kw:
|
||||||
Additional parameters are used to create fresh :class:`urllib3.connection.HTTPConnection`,
|
Additional parameters are used to create fresh :class:`urllib3.connection.HTTPConnection`,
|
||||||
|
@ -181,6 +177,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
|
||||||
retries=None,
|
retries=None,
|
||||||
_proxy=None,
|
_proxy=None,
|
||||||
_proxy_headers=None,
|
_proxy_headers=None,
|
||||||
|
_proxy_config=None,
|
||||||
**conn_kw
|
**conn_kw
|
||||||
):
|
):
|
||||||
ConnectionPool.__init__(self, host, port)
|
ConnectionPool.__init__(self, host, port)
|
||||||
|
@ -202,6 +199,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
|
||||||
|
|
||||||
self.proxy = _proxy
|
self.proxy = _proxy
|
||||||
self.proxy_headers = _proxy_headers or {}
|
self.proxy_headers = _proxy_headers or {}
|
||||||
|
self.proxy_config = _proxy_config
|
||||||
|
|
||||||
# Fill the queue up so that doing get() on it will block properly
|
# Fill the queue up so that doing get() on it will block properly
|
||||||
for _ in xrange(maxsize):
|
for _ in xrange(maxsize):
|
||||||
|
@ -218,6 +216,9 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
|
||||||
# list.
|
# list.
|
||||||
self.conn_kw.setdefault("socket_options", [])
|
self.conn_kw.setdefault("socket_options", [])
|
||||||
|
|
||||||
|
self.conn_kw["proxy"] = self.proxy
|
||||||
|
self.conn_kw["proxy_config"] = self.proxy_config
|
||||||
|
|
||||||
def _new_conn(self):
|
def _new_conn(self):
|
||||||
"""
|
"""
|
||||||
Return a fresh :class:`HTTPConnection`.
|
Return a fresh :class:`HTTPConnection`.
|
||||||
|
@ -272,7 +273,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
|
||||||
conn.close()
|
conn.close()
|
||||||
if getattr(conn, "auto_open", 1) == 0:
|
if getattr(conn, "auto_open", 1) == 0:
|
||||||
# This is a proxied connection that has been mutated by
|
# This is a proxied connection that has been mutated by
|
||||||
# httplib._tunnel() and cannot be reused (since it would
|
# http.client._tunnel() and cannot be reused (since it would
|
||||||
# attempt to bypass the proxy)
|
# attempt to bypass the proxy)
|
||||||
conn = None
|
conn = None
|
||||||
|
|
||||||
|
@ -384,12 +385,30 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
|
||||||
self._raise_timeout(err=e, url=url, timeout_value=conn.timeout)
|
self._raise_timeout(err=e, url=url, timeout_value=conn.timeout)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
# conn.request() calls httplib.*.request, not the method in
|
# conn.request() calls http.client.*.request, not the method in
|
||||||
# urllib3.request. It also calls makefile (recv) on the socket.
|
# urllib3.request. It also calls makefile (recv) on the socket.
|
||||||
if chunked:
|
try:
|
||||||
conn.request_chunked(method, url, **httplib_request_kw)
|
if chunked:
|
||||||
else:
|
conn.request_chunked(method, url, **httplib_request_kw)
|
||||||
conn.request(method, url, **httplib_request_kw)
|
else:
|
||||||
|
conn.request(method, url, **httplib_request_kw)
|
||||||
|
|
||||||
|
# We are swallowing BrokenPipeError (errno.EPIPE) since the server is
|
||||||
|
# legitimately able to close the connection after sending a valid response.
|
||||||
|
# With this behaviour, the received response is still readable.
|
||||||
|
except BrokenPipeError:
|
||||||
|
# Python 3
|
||||||
|
pass
|
||||||
|
except IOError as e:
|
||||||
|
# Python 2 and macOS/Linux
|
||||||
|
# EPIPE and ESHUTDOWN are BrokenPipeError on Python 2, and EPROTOTYPE is needed on macOS
|
||||||
|
# https://erickt.github.io/blog/2014/11/19/adventures-in-debugging-a-potential-osx-kernel-bug/
|
||||||
|
if e.errno not in {
|
||||||
|
errno.EPIPE,
|
||||||
|
errno.ESHUTDOWN,
|
||||||
|
errno.EPROTOTYPE,
|
||||||
|
}:
|
||||||
|
raise
|
||||||
|
|
||||||
# Reset the timeout for the recv() on the socket
|
# Reset the timeout for the recv() on the socket
|
||||||
read_timeout = timeout_obj.read_timeout
|
read_timeout = timeout_obj.read_timeout
|
||||||
|
@ -532,10 +551,12 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
|
||||||
:param method:
|
:param method:
|
||||||
HTTP request method (such as GET, POST, PUT, etc.)
|
HTTP request method (such as GET, POST, PUT, etc.)
|
||||||
|
|
||||||
|
:param url:
|
||||||
|
The URL to perform the request on.
|
||||||
|
|
||||||
:param body:
|
:param body:
|
||||||
Data to send in the request body (useful for creating
|
Data to send in the request body, either :class:`str`, :class:`bytes`,
|
||||||
POST requests, see HTTPConnectionPool.post_url for
|
an iterable of :class:`str`/:class:`bytes`, or a file-like object.
|
||||||
more convenience).
|
|
||||||
|
|
||||||
:param headers:
|
:param headers:
|
||||||
Dictionary of custom headers to send, such as User-Agent,
|
Dictionary of custom headers to send, such as User-Agent,
|
||||||
|
@ -565,7 +586,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
|
||||||
|
|
||||||
:param assert_same_host:
|
:param assert_same_host:
|
||||||
If ``True``, will make sure that the host of the pool requests is
|
If ``True``, will make sure that the host of the pool requests is
|
||||||
consistent else will raise HostChangedError. When False, you can
|
consistent else will raise HostChangedError. When ``False``, you can
|
||||||
use the pool on an HTTP proxy and request foreign hosts.
|
use the pool on an HTTP proxy and request foreign hosts.
|
||||||
|
|
||||||
:param timeout:
|
:param timeout:
|
||||||
|
@ -602,6 +623,10 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
|
||||||
Additional parameters are passed to
|
Additional parameters are passed to
|
||||||
:meth:`urllib3.response.HTTPResponse.from_httplib`
|
:meth:`urllib3.response.HTTPResponse.from_httplib`
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
parsed_url = parse_url(url)
|
||||||
|
destination_scheme = parsed_url.scheme
|
||||||
|
|
||||||
if headers is None:
|
if headers is None:
|
||||||
headers = self.headers
|
headers = self.headers
|
||||||
|
|
||||||
|
@ -619,7 +644,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
|
||||||
if url.startswith("/"):
|
if url.startswith("/"):
|
||||||
url = six.ensure_str(_encode_target(url))
|
url = six.ensure_str(_encode_target(url))
|
||||||
else:
|
else:
|
||||||
url = six.ensure_str(parse_url(url).url)
|
url = six.ensure_str(parsed_url.url)
|
||||||
|
|
||||||
conn = None
|
conn = None
|
||||||
|
|
||||||
|
@ -634,10 +659,14 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
|
||||||
# [1] <https://github.com/urllib3/urllib3/issues/651>
|
# [1] <https://github.com/urllib3/urllib3/issues/651>
|
||||||
release_this_conn = release_conn
|
release_this_conn = release_conn
|
||||||
|
|
||||||
# Merge the proxy headers. Only do this in HTTP. We have to copy the
|
http_tunnel_required = connection_requires_http_tunnel(
|
||||||
# headers dict so we can safely change it without those changes being
|
self.proxy, self.proxy_config, destination_scheme
|
||||||
# reflected in anyone else's copy.
|
)
|
||||||
if self.scheme == "http":
|
|
||||||
|
# Merge the proxy headers. Only done when not using HTTP CONNECT. We
|
||||||
|
# have to copy the headers dict so we can safely change it without those
|
||||||
|
# changes being reflected in anyone else's copy.
|
||||||
|
if not http_tunnel_required:
|
||||||
headers = headers.copy()
|
headers = headers.copy()
|
||||||
headers.update(self.proxy_headers)
|
headers.update(self.proxy_headers)
|
||||||
|
|
||||||
|
@ -663,7 +692,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
|
||||||
is_new_proxy_conn = self.proxy is not None and not getattr(
|
is_new_proxy_conn = self.proxy is not None and not getattr(
|
||||||
conn, "sock", None
|
conn, "sock", None
|
||||||
)
|
)
|
||||||
if is_new_proxy_conn:
|
if is_new_proxy_conn and http_tunnel_required:
|
||||||
self._prepare_proxy(conn)
|
self._prepare_proxy(conn)
|
||||||
|
|
||||||
# Make the request on the httplib connection object.
|
# Make the request on the httplib connection object.
|
||||||
|
@ -698,9 +727,11 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
|
||||||
# Everything went great!
|
# Everything went great!
|
||||||
clean_exit = True
|
clean_exit = True
|
||||||
|
|
||||||
except queue.Empty:
|
except EmptyPoolError:
|
||||||
# Timed out by queue.
|
# Didn't get a connection from the pool, no need to clean up
|
||||||
raise EmptyPoolError(self, "No pool connections are available.")
|
clean_exit = True
|
||||||
|
release_this_conn = False
|
||||||
|
raise
|
||||||
|
|
||||||
except (
|
except (
|
||||||
TimeoutError,
|
TimeoutError,
|
||||||
|
@ -835,11 +866,7 @@ class HTTPSConnectionPool(HTTPConnectionPool):
|
||||||
"""
|
"""
|
||||||
Same as :class:`.HTTPConnectionPool`, but HTTPS.
|
Same as :class:`.HTTPConnectionPool`, but HTTPS.
|
||||||
|
|
||||||
When Python is compiled with the :mod:`ssl` module, then
|
:class:`.HTTPSConnection` uses one of ``assert_fingerprint``,
|
||||||
:class:`.VerifiedHTTPSConnection` is used, which *can* verify certificates,
|
|
||||||
instead of :class:`.HTTPSConnection`.
|
|
||||||
|
|
||||||
:class:`.VerifiedHTTPSConnection` uses one of ``assert_fingerprint``,
|
|
||||||
``assert_hostname`` and ``host`` in this order to verify connections.
|
``assert_hostname`` and ``host`` in this order to verify connections.
|
||||||
If ``assert_hostname`` is False, no verification is done.
|
If ``assert_hostname`` is False, no verification is done.
|
||||||
|
|
||||||
|
@ -923,15 +950,22 @@ class HTTPSConnectionPool(HTTPConnectionPool):
|
||||||
|
|
||||||
def _prepare_proxy(self, conn):
|
def _prepare_proxy(self, conn):
|
||||||
"""
|
"""
|
||||||
Establish tunnel connection early, because otherwise httplib
|
Establishes a tunnel connection through HTTP CONNECT.
|
||||||
would improperly set Host: header to proxy's IP:port.
|
|
||||||
|
Tunnel connection is established early because otherwise httplib would
|
||||||
|
improperly set Host: header to proxy's IP:port.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
conn.set_tunnel(self._proxy_host, self.port, self.proxy_headers)
|
conn.set_tunnel(self._proxy_host, self.port, self.proxy_headers)
|
||||||
|
|
||||||
|
if self.proxy.scheme == "https":
|
||||||
|
conn.tls_in_tls_required = True
|
||||||
|
|
||||||
conn.connect()
|
conn.connect()
|
||||||
|
|
||||||
def _new_conn(self):
|
def _new_conn(self):
|
||||||
"""
|
"""
|
||||||
Return a fresh :class:`httplib.HTTPSConnection`.
|
Return a fresh :class:`http.client.HTTPSConnection`.
|
||||||
"""
|
"""
|
||||||
self.num_connections += 1
|
self.num_connections += 1
|
||||||
log.debug(
|
log.debug(
|
||||||
|
|
|
@ -32,30 +32,26 @@ license and by oscrypto's:
|
||||||
from __future__ import absolute_import
|
from __future__ import absolute_import
|
||||||
|
|
||||||
import platform
|
import platform
|
||||||
from ctypes.util import find_library
|
|
||||||
from ctypes import (
|
from ctypes import (
|
||||||
c_void_p,
|
CDLL,
|
||||||
c_int32,
|
CFUNCTYPE,
|
||||||
c_char_p,
|
POINTER,
|
||||||
c_size_t,
|
c_bool,
|
||||||
c_byte,
|
c_byte,
|
||||||
|
c_char_p,
|
||||||
|
c_int32,
|
||||||
|
c_long,
|
||||||
|
c_size_t,
|
||||||
c_uint32,
|
c_uint32,
|
||||||
c_ulong,
|
c_ulong,
|
||||||
c_long,
|
c_void_p,
|
||||||
c_bool,
|
|
||||||
)
|
)
|
||||||
from ctypes import CDLL, POINTER, CFUNCTYPE
|
from ctypes.util import find_library
|
||||||
|
|
||||||
|
from pip._vendor.urllib3.packages.six import raise_from
|
||||||
|
|
||||||
security_path = find_library("Security")
|
if platform.system() != "Darwin":
|
||||||
if not security_path:
|
raise ImportError("Only macOS is supported")
|
||||||
raise ImportError("The library Security could not be found")
|
|
||||||
|
|
||||||
|
|
||||||
core_foundation_path = find_library("CoreFoundation")
|
|
||||||
if not core_foundation_path:
|
|
||||||
raise ImportError("The library CoreFoundation could not be found")
|
|
||||||
|
|
||||||
|
|
||||||
version = platform.mac_ver()[0]
|
version = platform.mac_ver()[0]
|
||||||
version_info = tuple(map(int, version.split(".")))
|
version_info = tuple(map(int, version.split(".")))
|
||||||
|
@ -65,8 +61,31 @@ if version_info < (10, 8):
|
||||||
% (version_info[0], version_info[1])
|
% (version_info[0], version_info[1])
|
||||||
)
|
)
|
||||||
|
|
||||||
Security = CDLL(security_path, use_errno=True)
|
|
||||||
CoreFoundation = CDLL(core_foundation_path, use_errno=True)
|
def load_cdll(name, macos10_16_path):
|
||||||
|
"""Loads a CDLL by name, falling back to known path on 10.16+"""
|
||||||
|
try:
|
||||||
|
# Big Sur is technically 11 but we use 10.16 due to the Big Sur
|
||||||
|
# beta being labeled as 10.16.
|
||||||
|
if version_info >= (10, 16):
|
||||||
|
path = macos10_16_path
|
||||||
|
else:
|
||||||
|
path = find_library(name)
|
||||||
|
if not path:
|
||||||
|
raise OSError # Caught and reraised as 'ImportError'
|
||||||
|
return CDLL(path, use_errno=True)
|
||||||
|
except OSError:
|
||||||
|
raise_from(ImportError("The library %s failed to load" % name), None)
|
||||||
|
|
||||||
|
|
||||||
|
Security = load_cdll(
|
||||||
|
"Security", "/System/Library/Frameworks/Security.framework/Security"
|
||||||
|
)
|
||||||
|
CoreFoundation = load_cdll(
|
||||||
|
"CoreFoundation",
|
||||||
|
"/System/Library/Frameworks/CoreFoundation.framework/CoreFoundation",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
Boolean = c_bool
|
Boolean = c_bool
|
||||||
CFIndex = c_long
|
CFIndex = c_long
|
||||||
|
@ -276,6 +295,13 @@ try:
|
||||||
Security.SSLSetProtocolVersionMax.argtypes = [SSLContextRef, SSLProtocol]
|
Security.SSLSetProtocolVersionMax.argtypes = [SSLContextRef, SSLProtocol]
|
||||||
Security.SSLSetProtocolVersionMax.restype = OSStatus
|
Security.SSLSetProtocolVersionMax.restype = OSStatus
|
||||||
|
|
||||||
|
try:
|
||||||
|
Security.SSLSetALPNProtocols.argtypes = [SSLContextRef, CFArrayRef]
|
||||||
|
Security.SSLSetALPNProtocols.restype = OSStatus
|
||||||
|
except AttributeError:
|
||||||
|
# Supported only in 10.12+
|
||||||
|
pass
|
||||||
|
|
||||||
Security.SecCopyErrorMessageString.argtypes = [OSStatus, c_void_p]
|
Security.SecCopyErrorMessageString.argtypes = [OSStatus, c_void_p]
|
||||||
Security.SecCopyErrorMessageString.restype = CFStringRef
|
Security.SecCopyErrorMessageString.restype = CFStringRef
|
||||||
|
|
||||||
|
|
|
@ -10,13 +10,13 @@ appropriate and useful assistance to the higher-level code.
|
||||||
import base64
|
import base64
|
||||||
import ctypes
|
import ctypes
|
||||||
import itertools
|
import itertools
|
||||||
import re
|
|
||||||
import os
|
import os
|
||||||
|
import re
|
||||||
import ssl
|
import ssl
|
||||||
|
import struct
|
||||||
import tempfile
|
import tempfile
|
||||||
|
|
||||||
from .bindings import Security, CoreFoundation, CFConst
|
from .bindings import CFConst, CoreFoundation, Security
|
||||||
|
|
||||||
|
|
||||||
# This regular expression is used to grab PEM data out of a PEM bundle.
|
# This regular expression is used to grab PEM data out of a PEM bundle.
|
||||||
_PEM_CERTS_RE = re.compile(
|
_PEM_CERTS_RE = re.compile(
|
||||||
|
@ -56,6 +56,51 @@ def _cf_dictionary_from_tuples(tuples):
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _cfstr(py_bstr):
|
||||||
|
"""
|
||||||
|
Given a Python binary data, create a CFString.
|
||||||
|
The string must be CFReleased by the caller.
|
||||||
|
"""
|
||||||
|
c_str = ctypes.c_char_p(py_bstr)
|
||||||
|
cf_str = CoreFoundation.CFStringCreateWithCString(
|
||||||
|
CoreFoundation.kCFAllocatorDefault,
|
||||||
|
c_str,
|
||||||
|
CFConst.kCFStringEncodingUTF8,
|
||||||
|
)
|
||||||
|
return cf_str
|
||||||
|
|
||||||
|
|
||||||
|
def _create_cfstring_array(lst):
|
||||||
|
"""
|
||||||
|
Given a list of Python binary data, create an associated CFMutableArray.
|
||||||
|
The array must be CFReleased by the caller.
|
||||||
|
|
||||||
|
Raises an ssl.SSLError on failure.
|
||||||
|
"""
|
||||||
|
cf_arr = None
|
||||||
|
try:
|
||||||
|
cf_arr = CoreFoundation.CFArrayCreateMutable(
|
||||||
|
CoreFoundation.kCFAllocatorDefault,
|
||||||
|
0,
|
||||||
|
ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks),
|
||||||
|
)
|
||||||
|
if not cf_arr:
|
||||||
|
raise MemoryError("Unable to allocate memory!")
|
||||||
|
for item in lst:
|
||||||
|
cf_str = _cfstr(item)
|
||||||
|
if not cf_str:
|
||||||
|
raise MemoryError("Unable to allocate memory!")
|
||||||
|
try:
|
||||||
|
CoreFoundation.CFArrayAppendValue(cf_arr, cf_str)
|
||||||
|
finally:
|
||||||
|
CoreFoundation.CFRelease(cf_str)
|
||||||
|
except BaseException as e:
|
||||||
|
if cf_arr:
|
||||||
|
CoreFoundation.CFRelease(cf_arr)
|
||||||
|
raise ssl.SSLError("Unable to allocate array: %s" % (e,))
|
||||||
|
return cf_arr
|
||||||
|
|
||||||
|
|
||||||
def _cf_string_to_unicode(value):
|
def _cf_string_to_unicode(value):
|
||||||
"""
|
"""
|
||||||
Creates a Unicode string from a CFString object. Used entirely for error
|
Creates a Unicode string from a CFString object. Used entirely for error
|
||||||
|
@ -326,3 +371,26 @@ def _load_client_cert_chain(keychain, *paths):
|
||||||
finally:
|
finally:
|
||||||
for obj in itertools.chain(identities, certificates):
|
for obj in itertools.chain(identities, certificates):
|
||||||
CoreFoundation.CFRelease(obj)
|
CoreFoundation.CFRelease(obj)
|
||||||
|
|
||||||
|
|
||||||
|
TLS_PROTOCOL_VERSIONS = {
|
||||||
|
"SSLv2": (0, 2),
|
||||||
|
"SSLv3": (3, 0),
|
||||||
|
"TLSv1": (3, 1),
|
||||||
|
"TLSv1.1": (3, 2),
|
||||||
|
"TLSv1.2": (3, 3),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _build_tls_unknown_ca_alert(version):
|
||||||
|
"""
|
||||||
|
Builds a TLS alert record for an unknown CA.
|
||||||
|
"""
|
||||||
|
ver_maj, ver_min = TLS_PROTOCOL_VERSIONS[version]
|
||||||
|
severity_fatal = 0x02
|
||||||
|
description_unknown_ca = 0x30
|
||||||
|
msg = struct.pack(">BB", severity_fatal, description_unknown_ca)
|
||||||
|
msg_len = len(msg)
|
||||||
|
record_type_alert = 0x15
|
||||||
|
record = struct.pack(">BBBH", record_type_alert, ver_maj, ver_min, msg_len) + msg
|
||||||
|
return record
|
||||||
|
|
|
@ -39,24 +39,24 @@ urllib3 on Google App Engine:
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from __future__ import absolute_import
|
from __future__ import absolute_import
|
||||||
|
|
||||||
import io
|
import io
|
||||||
import logging
|
import logging
|
||||||
import warnings
|
import warnings
|
||||||
from ..packages.six.moves.urllib.parse import urljoin
|
|
||||||
|
|
||||||
from ..exceptions import (
|
from ..exceptions import (
|
||||||
HTTPError,
|
HTTPError,
|
||||||
HTTPWarning,
|
HTTPWarning,
|
||||||
MaxRetryError,
|
MaxRetryError,
|
||||||
ProtocolError,
|
ProtocolError,
|
||||||
TimeoutError,
|
|
||||||
SSLError,
|
SSLError,
|
||||||
|
TimeoutError,
|
||||||
)
|
)
|
||||||
|
from ..packages.six.moves.urllib.parse import urljoin
|
||||||
from ..request import RequestMethods
|
from ..request import RequestMethods
|
||||||
from ..response import HTTPResponse
|
from ..response import HTTPResponse
|
||||||
from ..util.timeout import Timeout
|
|
||||||
from ..util.retry import Retry
|
from ..util.retry import Retry
|
||||||
|
from ..util.timeout import Timeout
|
||||||
from . import _appengine_environ
|
from . import _appengine_environ
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -90,7 +90,7 @@ class AppEngineManager(RequestMethods):
|
||||||
* If you attempt to use this on App Engine Flexible, as full socket
|
* If you attempt to use this on App Engine Flexible, as full socket
|
||||||
support is available.
|
support is available.
|
||||||
* If a request size is more than 10 megabytes.
|
* If a request size is more than 10 megabytes.
|
||||||
* If a response size is more than 32 megabtyes.
|
* If a response size is more than 32 megabytes.
|
||||||
* If you use an unsupported request method such as OPTIONS.
|
* If you use an unsupported request method such as OPTIONS.
|
||||||
|
|
||||||
Beyond those cases, it will raise normal urllib3 errors.
|
Beyond those cases, it will raise normal urllib3 errors.
|
||||||
|
|
|
@ -6,12 +6,12 @@ Issue #10, see: http://code.google.com/p/urllib3/issues/detail?id=10
|
||||||
from __future__ import absolute_import
|
from __future__ import absolute_import
|
||||||
|
|
||||||
from logging import getLogger
|
from logging import getLogger
|
||||||
|
|
||||||
from ntlm import ntlm
|
from ntlm import ntlm
|
||||||
|
|
||||||
from .. import HTTPSConnectionPool
|
from .. import HTTPSConnectionPool
|
||||||
from ..packages.six.moves.http_client import HTTPSConnection
|
from ..packages.six.moves.http_client import HTTPSConnection
|
||||||
|
|
||||||
|
|
||||||
log = getLogger(__name__)
|
log = getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,27 +1,31 @@
|
||||||
"""
|
"""
|
||||||
SSL with SNI_-support for Python 2. Follow these instructions if you would
|
TLS with SNI_-support for Python 2. Follow these instructions if you would
|
||||||
like to verify SSL certificates in Python 2. Note, the default libraries do
|
like to verify TLS certificates in Python 2. Note, the default libraries do
|
||||||
*not* do certificate checking; you need to do additional work to validate
|
*not* do certificate checking; you need to do additional work to validate
|
||||||
certificates yourself.
|
certificates yourself.
|
||||||
|
|
||||||
This needs the following packages installed:
|
This needs the following packages installed:
|
||||||
|
|
||||||
* pyOpenSSL (tested with 16.0.0)
|
* `pyOpenSSL`_ (tested with 16.0.0)
|
||||||
* cryptography (minimum 1.3.4, from pyopenssl)
|
* `cryptography`_ (minimum 1.3.4, from pyopenssl)
|
||||||
* idna (minimum 2.0, from cryptography)
|
* `idna`_ (minimum 2.0, from cryptography)
|
||||||
|
|
||||||
However, pyopenssl depends on cryptography, which depends on idna, so while we
|
However, pyopenssl depends on cryptography, which depends on idna, so while we
|
||||||
use all three directly here we end up having relatively few packages required.
|
use all three directly here we end up having relatively few packages required.
|
||||||
|
|
||||||
You can install them with the following command:
|
You can install them with the following command:
|
||||||
|
|
||||||
pip install pyopenssl cryptography idna
|
.. code-block:: bash
|
||||||
|
|
||||||
|
$ python -m pip install pyopenssl cryptography idna
|
||||||
|
|
||||||
To activate certificate checking, call
|
To activate certificate checking, call
|
||||||
:func:`~urllib3.contrib.pyopenssl.inject_into_urllib3` from your Python code
|
:func:`~urllib3.contrib.pyopenssl.inject_into_urllib3` from your Python code
|
||||||
before you begin making HTTP requests. This can be done in a ``sitecustomize``
|
before you begin making HTTP requests. This can be done in a ``sitecustomize``
|
||||||
module, or at any other time before your application begins using ``urllib3``,
|
module, or at any other time before your application begins using ``urllib3``,
|
||||||
like this::
|
like this:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import urllib3.contrib.pyopenssl
|
import urllib3.contrib.pyopenssl
|
||||||
|
@ -35,11 +39,11 @@ when the required modules are installed.
|
||||||
Activating this module also has the positive side effect of disabling SSL/TLS
|
Activating this module also has the positive side effect of disabling SSL/TLS
|
||||||
compression in Python 2 (see `CRIME attack`_).
|
compression in Python 2 (see `CRIME attack`_).
|
||||||
|
|
||||||
If you want to configure the default list of supported cipher suites, you can
|
|
||||||
set the ``urllib3.contrib.pyopenssl.DEFAULT_SSL_CIPHER_LIST`` variable.
|
|
||||||
|
|
||||||
.. _sni: https://en.wikipedia.org/wiki/Server_Name_Indication
|
.. _sni: https://en.wikipedia.org/wiki/Server_Name_Indication
|
||||||
.. _crime attack: https://en.wikipedia.org/wiki/CRIME_(security_exploit)
|
.. _crime attack: https://en.wikipedia.org/wiki/CRIME_(security_exploit)
|
||||||
|
.. _pyopenssl: https://www.pyopenssl.org
|
||||||
|
.. _cryptography: https://cryptography.io
|
||||||
|
.. _idna: https://github.com/kjd/idna
|
||||||
"""
|
"""
|
||||||
from __future__ import absolute_import
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
@ -56,8 +60,9 @@ except ImportError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
from socket import timeout, error as SocketError
|
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
|
from socket import error as SocketError
|
||||||
|
from socket import timeout
|
||||||
|
|
||||||
try: # Platform-specific: Python 2
|
try: # Platform-specific: Python 2
|
||||||
from socket import _fileobject
|
from socket import _fileobject
|
||||||
|
@ -67,11 +72,10 @@ except ImportError: # Platform-specific: Python 3
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import ssl
|
import ssl
|
||||||
from ..packages import six
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from .. import util
|
from .. import util
|
||||||
|
from ..packages import six
|
||||||
|
|
||||||
__all__ = ["inject_into_urllib3", "extract_from_urllib3"]
|
__all__ = ["inject_into_urllib3", "extract_from_urllib3"]
|
||||||
|
|
||||||
|
@ -465,6 +469,10 @@ class PyOpenSSLContext(object):
|
||||||
self._ctx.set_passwd_cb(lambda *_: password)
|
self._ctx.set_passwd_cb(lambda *_: password)
|
||||||
self._ctx.use_privatekey_file(keyfile or certfile)
|
self._ctx.use_privatekey_file(keyfile or certfile)
|
||||||
|
|
||||||
|
def set_alpn_protocols(self, protocols):
|
||||||
|
protocols = [six.ensure_binary(p) for p in protocols]
|
||||||
|
return self._ctx.set_alpn_protos(protocols)
|
||||||
|
|
||||||
def wrap_socket(
|
def wrap_socket(
|
||||||
self,
|
self,
|
||||||
sock,
|
sock,
|
||||||
|
|
|
@ -29,6 +29,8 @@ library. An enormous debt is owed to him for blazing this trail for us. For
|
||||||
that reason, this code should be considered to be covered both by urllib3's
|
that reason, this code should be considered to be covered both by urllib3's
|
||||||
license and by oscrypto's:
|
license and by oscrypto's:
|
||||||
|
|
||||||
|
.. code-block::
|
||||||
|
|
||||||
Copyright (c) 2015-2016 Will Bond <will@wbond.net>
|
Copyright (c) 2015-2016 Will Bond <will@wbond.net>
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a
|
Permission is hereby granted, free of charge, to any person obtaining a
|
||||||
|
@ -58,16 +60,21 @@ import os.path
|
||||||
import shutil
|
import shutil
|
||||||
import socket
|
import socket
|
||||||
import ssl
|
import ssl
|
||||||
|
import struct
|
||||||
import threading
|
import threading
|
||||||
import weakref
|
import weakref
|
||||||
|
|
||||||
|
from pip._vendor import six
|
||||||
|
|
||||||
from .. import util
|
from .. import util
|
||||||
from ._securetransport.bindings import Security, SecurityConst, CoreFoundation
|
from ._securetransport.bindings import CoreFoundation, Security, SecurityConst
|
||||||
from ._securetransport.low_level import (
|
from ._securetransport.low_level import (
|
||||||
_assert_no_error,
|
_assert_no_error,
|
||||||
|
_build_tls_unknown_ca_alert,
|
||||||
_cert_array_from_pem,
|
_cert_array_from_pem,
|
||||||
_temporary_keychain,
|
_create_cfstring_array,
|
||||||
_load_client_cert_chain,
|
_load_client_cert_chain,
|
||||||
|
_temporary_keychain,
|
||||||
)
|
)
|
||||||
|
|
||||||
try: # Platform-specific: Python 2
|
try: # Platform-specific: Python 2
|
||||||
|
@ -374,16 +381,55 @@ class WrappedSocket(object):
|
||||||
)
|
)
|
||||||
_assert_no_error(result)
|
_assert_no_error(result)
|
||||||
|
|
||||||
|
def _set_alpn_protocols(self, protocols):
|
||||||
|
"""
|
||||||
|
Sets up the ALPN protocols on the context.
|
||||||
|
"""
|
||||||
|
if not protocols:
|
||||||
|
return
|
||||||
|
protocols_arr = _create_cfstring_array(protocols)
|
||||||
|
try:
|
||||||
|
result = Security.SSLSetALPNProtocols(self.context, protocols_arr)
|
||||||
|
_assert_no_error(result)
|
||||||
|
finally:
|
||||||
|
CoreFoundation.CFRelease(protocols_arr)
|
||||||
|
|
||||||
def _custom_validate(self, verify, trust_bundle):
|
def _custom_validate(self, verify, trust_bundle):
|
||||||
"""
|
"""
|
||||||
Called when we have set custom validation. We do this in two cases:
|
Called when we have set custom validation. We do this in two cases:
|
||||||
first, when cert validation is entirely disabled; and second, when
|
first, when cert validation is entirely disabled; and second, when
|
||||||
using a custom trust DB.
|
using a custom trust DB.
|
||||||
|
Raises an SSLError if the connection is not trusted.
|
||||||
"""
|
"""
|
||||||
# If we disabled cert validation, just say: cool.
|
# If we disabled cert validation, just say: cool.
|
||||||
if not verify:
|
if not verify:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
successes = (
|
||||||
|
SecurityConst.kSecTrustResultUnspecified,
|
||||||
|
SecurityConst.kSecTrustResultProceed,
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
trust_result = self._evaluate_trust(trust_bundle)
|
||||||
|
if trust_result in successes:
|
||||||
|
return
|
||||||
|
reason = "error code: %d" % (trust_result,)
|
||||||
|
except Exception as e:
|
||||||
|
# Do not trust on error
|
||||||
|
reason = "exception: %r" % (e,)
|
||||||
|
|
||||||
|
# SecureTransport does not send an alert nor shuts down the connection.
|
||||||
|
rec = _build_tls_unknown_ca_alert(self.version())
|
||||||
|
self.socket.sendall(rec)
|
||||||
|
# close the connection immediately
|
||||||
|
# l_onoff = 1, activate linger
|
||||||
|
# l_linger = 0, linger for 0 seoncds
|
||||||
|
opts = struct.pack("ii", 1, 0)
|
||||||
|
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_LINGER, opts)
|
||||||
|
self.close()
|
||||||
|
raise ssl.SSLError("certificate verify failed, %s" % reason)
|
||||||
|
|
||||||
|
def _evaluate_trust(self, trust_bundle):
|
||||||
# We want data in memory, so load it up.
|
# We want data in memory, so load it up.
|
||||||
if os.path.isfile(trust_bundle):
|
if os.path.isfile(trust_bundle):
|
||||||
with open(trust_bundle, "rb") as f:
|
with open(trust_bundle, "rb") as f:
|
||||||
|
@ -421,15 +467,7 @@ class WrappedSocket(object):
|
||||||
if cert_array is not None:
|
if cert_array is not None:
|
||||||
CoreFoundation.CFRelease(cert_array)
|
CoreFoundation.CFRelease(cert_array)
|
||||||
|
|
||||||
# Ok, now we can look at what the result was.
|
return trust_result.value
|
||||||
successes = (
|
|
||||||
SecurityConst.kSecTrustResultUnspecified,
|
|
||||||
SecurityConst.kSecTrustResultProceed,
|
|
||||||
)
|
|
||||||
if trust_result.value not in successes:
|
|
||||||
raise ssl.SSLError(
|
|
||||||
"certificate verify failed, error code: %d" % trust_result.value
|
|
||||||
)
|
|
||||||
|
|
||||||
def handshake(
|
def handshake(
|
||||||
self,
|
self,
|
||||||
|
@ -441,6 +479,7 @@ class WrappedSocket(object):
|
||||||
client_cert,
|
client_cert,
|
||||||
client_key,
|
client_key,
|
||||||
client_key_passphrase,
|
client_key_passphrase,
|
||||||
|
alpn_protocols,
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Actually performs the TLS handshake. This is run automatically by
|
Actually performs the TLS handshake. This is run automatically by
|
||||||
|
@ -481,6 +520,9 @@ class WrappedSocket(object):
|
||||||
# Setup the ciphers.
|
# Setup the ciphers.
|
||||||
self._set_ciphers()
|
self._set_ciphers()
|
||||||
|
|
||||||
|
# Setup the ALPN protocols.
|
||||||
|
self._set_alpn_protocols(alpn_protocols)
|
||||||
|
|
||||||
# Set the minimum and maximum TLS versions.
|
# Set the minimum and maximum TLS versions.
|
||||||
result = Security.SSLSetProtocolVersionMin(self.context, min_version)
|
result = Security.SSLSetProtocolVersionMin(self.context, min_version)
|
||||||
_assert_no_error(result)
|
_assert_no_error(result)
|
||||||
|
@ -754,6 +796,7 @@ class SecureTransportContext(object):
|
||||||
self._client_cert = None
|
self._client_cert = None
|
||||||
self._client_key = None
|
self._client_key = None
|
||||||
self._client_key_passphrase = None
|
self._client_key_passphrase = None
|
||||||
|
self._alpn_protocols = None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def check_hostname(self):
|
def check_hostname(self):
|
||||||
|
@ -831,6 +874,18 @@ class SecureTransportContext(object):
|
||||||
self._client_key = keyfile
|
self._client_key = keyfile
|
||||||
self._client_cert_passphrase = password
|
self._client_cert_passphrase = password
|
||||||
|
|
||||||
|
def set_alpn_protocols(self, protocols):
|
||||||
|
"""
|
||||||
|
Sets the ALPN protocols that will later be set on the context.
|
||||||
|
|
||||||
|
Raises a NotImplementedError if ALPN is not supported.
|
||||||
|
"""
|
||||||
|
if not hasattr(Security, "SSLSetALPNProtocols"):
|
||||||
|
raise NotImplementedError(
|
||||||
|
"SecureTransport supports ALPN only in macOS 10.12+"
|
||||||
|
)
|
||||||
|
self._alpn_protocols = [six.ensure_binary(p) for p in protocols]
|
||||||
|
|
||||||
def wrap_socket(
|
def wrap_socket(
|
||||||
self,
|
self,
|
||||||
sock,
|
sock,
|
||||||
|
@ -860,5 +915,6 @@ class SecureTransportContext(object):
|
||||||
self._client_cert,
|
self._client_cert,
|
||||||
self._client_key,
|
self._client_key,
|
||||||
self._client_key_passphrase,
|
self._client_key_passphrase,
|
||||||
|
self._alpn_protocols,
|
||||||
)
|
)
|
||||||
return wrapped_socket
|
return wrapped_socket
|
||||||
|
|
|
@ -14,22 +14,26 @@ supports the following SOCKS features:
|
||||||
- SOCKS5 with local DNS (``proxy_url='socks5://...``)
|
- SOCKS5 with local DNS (``proxy_url='socks5://...``)
|
||||||
- Usernames and passwords for the SOCKS proxy
|
- Usernames and passwords for the SOCKS proxy
|
||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
It is recommended to use ``socks5h://`` or ``socks4a://`` schemes in
|
It is recommended to use ``socks5h://`` or ``socks4a://`` schemes in
|
||||||
your ``proxy_url`` to ensure that DNS resolution is done from the remote
|
your ``proxy_url`` to ensure that DNS resolution is done from the remote
|
||||||
server instead of client-side when connecting to a domain name.
|
server instead of client-side when connecting to a domain name.
|
||||||
|
|
||||||
SOCKS4 supports IPv4 and domain names with the SOCKS4A extension. SOCKS5
|
SOCKS4 supports IPv4 and domain names with the SOCKS4A extension. SOCKS5
|
||||||
supports IPv4, IPv6, and domain names.
|
supports IPv4, IPv6, and domain names.
|
||||||
|
|
||||||
When connecting to a SOCKS4 proxy the ``username`` portion of the ``proxy_url``
|
When connecting to a SOCKS4 proxy the ``username`` portion of the ``proxy_url``
|
||||||
will be sent as the ``userid`` section of the SOCKS request::
|
will be sent as the ``userid`` section of the SOCKS request:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
proxy_url="socks4a://<userid>@proxy-host"
|
proxy_url="socks4a://<userid>@proxy-host"
|
||||||
|
|
||||||
When connecting to a SOCKS5 proxy the ``username`` and ``password`` portion
|
When connecting to a SOCKS5 proxy the ``username`` and ``password`` portion
|
||||||
of the ``proxy_url`` will be sent as the username/password to authenticate
|
of the ``proxy_url`` will be sent as the username/password to authenticate
|
||||||
with the proxy::
|
with the proxy:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
proxy_url="socks5h://<username>:<password>@proxy-host"
|
proxy_url="socks5h://<username>:<password>@proxy-host"
|
||||||
|
|
||||||
|
@ -40,6 +44,7 @@ try:
|
||||||
import socks
|
import socks
|
||||||
except ImportError:
|
except ImportError:
|
||||||
import warnings
|
import warnings
|
||||||
|
|
||||||
from ..exceptions import DependencyWarning
|
from ..exceptions import DependencyWarning
|
||||||
|
|
||||||
warnings.warn(
|
warnings.warn(
|
||||||
|
@ -52,7 +57,8 @@ except ImportError:
|
||||||
)
|
)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
from socket import error as SocketError, timeout as SocketTimeout
|
from socket import error as SocketError
|
||||||
|
from socket import timeout as SocketTimeout
|
||||||
|
|
||||||
from ..connection import HTTPConnection, HTTPSConnection
|
from ..connection import HTTPConnection, HTTPSConnection
|
||||||
from ..connectionpool import HTTPConnectionPool, HTTPSConnectionPool
|
from ..connectionpool import HTTPConnectionPool, HTTPSConnectionPool
|
||||||
|
|
|
@ -1,21 +1,24 @@
|
||||||
from __future__ import absolute_import
|
from __future__ import absolute_import
|
||||||
|
|
||||||
from .packages.six.moves.http_client import IncompleteRead as httplib_IncompleteRead
|
from .packages.six.moves.http_client import IncompleteRead as httplib_IncompleteRead
|
||||||
|
|
||||||
# Base Exceptions
|
# Base Exceptions
|
||||||
|
|
||||||
|
|
||||||
class HTTPError(Exception):
|
class HTTPError(Exception):
|
||||||
"Base exception used by this module."
|
"""Base exception used by this module."""
|
||||||
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class HTTPWarning(Warning):
|
class HTTPWarning(Warning):
|
||||||
"Base warning used by this module."
|
"""Base warning used by this module."""
|
||||||
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class PoolError(HTTPError):
|
class PoolError(HTTPError):
|
||||||
"Base exception for errors caused within a pool."
|
"""Base exception for errors caused within a pool."""
|
||||||
|
|
||||||
def __init__(self, pool, message):
|
def __init__(self, pool, message):
|
||||||
self.pool = pool
|
self.pool = pool
|
||||||
|
@ -27,7 +30,7 @@ class PoolError(HTTPError):
|
||||||
|
|
||||||
|
|
||||||
class RequestError(PoolError):
|
class RequestError(PoolError):
|
||||||
"Base exception for PoolErrors that have associated URLs."
|
"""Base exception for PoolErrors that have associated URLs."""
|
||||||
|
|
||||||
def __init__(self, pool, url, message):
|
def __init__(self, pool, url, message):
|
||||||
self.url = url
|
self.url = url
|
||||||
|
@ -39,12 +42,13 @@ class RequestError(PoolError):
|
||||||
|
|
||||||
|
|
||||||
class SSLError(HTTPError):
|
class SSLError(HTTPError):
|
||||||
"Raised when SSL certificate fails in an HTTPS connection."
|
"""Raised when SSL certificate fails in an HTTPS connection."""
|
||||||
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class ProxyError(HTTPError):
|
class ProxyError(HTTPError):
|
||||||
"Raised when the connection to a proxy fails."
|
"""Raised when the connection to a proxy fails."""
|
||||||
|
|
||||||
def __init__(self, message, error, *args):
|
def __init__(self, message, error, *args):
|
||||||
super(ProxyError, self).__init__(message, error, *args)
|
super(ProxyError, self).__init__(message, error, *args)
|
||||||
|
@ -52,12 +56,14 @@ class ProxyError(HTTPError):
|
||||||
|
|
||||||
|
|
||||||
class DecodeError(HTTPError):
|
class DecodeError(HTTPError):
|
||||||
"Raised when automatic decoding based on Content-Type fails."
|
"""Raised when automatic decoding based on Content-Type fails."""
|
||||||
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class ProtocolError(HTTPError):
|
class ProtocolError(HTTPError):
|
||||||
"Raised when something unexpected happens mid-request/response."
|
"""Raised when something unexpected happens mid-request/response."""
|
||||||
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
@ -87,7 +93,7 @@ class MaxRetryError(RequestError):
|
||||||
|
|
||||||
|
|
||||||
class HostChangedError(RequestError):
|
class HostChangedError(RequestError):
|
||||||
"Raised when an existing pool gets a request for a foreign host."
|
"""Raised when an existing pool gets a request for a foreign host."""
|
||||||
|
|
||||||
def __init__(self, pool, url, retries=3):
|
def __init__(self, pool, url, retries=3):
|
||||||
message = "Tried to open a foreign host with url: %s" % url
|
message = "Tried to open a foreign host with url: %s" % url
|
||||||
|
@ -96,13 +102,13 @@ class HostChangedError(RequestError):
|
||||||
|
|
||||||
|
|
||||||
class TimeoutStateError(HTTPError):
|
class TimeoutStateError(HTTPError):
|
||||||
""" Raised when passing an invalid state to a timeout """
|
"""Raised when passing an invalid state to a timeout"""
|
||||||
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class TimeoutError(HTTPError):
|
class TimeoutError(HTTPError):
|
||||||
""" Raised when a socket timeout error occurs.
|
"""Raised when a socket timeout error occurs.
|
||||||
|
|
||||||
Catching this error will catch both :exc:`ReadTimeoutErrors
|
Catching this error will catch both :exc:`ReadTimeoutErrors
|
||||||
<ReadTimeoutError>` and :exc:`ConnectTimeoutErrors <ConnectTimeoutError>`.
|
<ReadTimeoutError>` and :exc:`ConnectTimeoutErrors <ConnectTimeoutError>`.
|
||||||
|
@ -112,39 +118,45 @@ class TimeoutError(HTTPError):
|
||||||
|
|
||||||
|
|
||||||
class ReadTimeoutError(TimeoutError, RequestError):
|
class ReadTimeoutError(TimeoutError, RequestError):
|
||||||
"Raised when a socket timeout occurs while receiving data from a server"
|
"""Raised when a socket timeout occurs while receiving data from a server"""
|
||||||
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
# This timeout error does not have a URL attached and needs to inherit from the
|
# This timeout error does not have a URL attached and needs to inherit from the
|
||||||
# base HTTPError
|
# base HTTPError
|
||||||
class ConnectTimeoutError(TimeoutError):
|
class ConnectTimeoutError(TimeoutError):
|
||||||
"Raised when a socket timeout occurs while connecting to a server"
|
"""Raised when a socket timeout occurs while connecting to a server"""
|
||||||
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class NewConnectionError(ConnectTimeoutError, PoolError):
|
class NewConnectionError(ConnectTimeoutError, PoolError):
|
||||||
"Raised when we fail to establish a new connection. Usually ECONNREFUSED."
|
"""Raised when we fail to establish a new connection. Usually ECONNREFUSED."""
|
||||||
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class EmptyPoolError(PoolError):
|
class EmptyPoolError(PoolError):
|
||||||
"Raised when a pool runs out of connections and no more are allowed."
|
"""Raised when a pool runs out of connections and no more are allowed."""
|
||||||
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class ClosedPoolError(PoolError):
|
class ClosedPoolError(PoolError):
|
||||||
"Raised when a request enters a pool after the pool has been closed."
|
"""Raised when a request enters a pool after the pool has been closed."""
|
||||||
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class LocationValueError(ValueError, HTTPError):
|
class LocationValueError(ValueError, HTTPError):
|
||||||
"Raised when there is something wrong with a given URL input."
|
"""Raised when there is something wrong with a given URL input."""
|
||||||
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class LocationParseError(LocationValueError):
|
class LocationParseError(LocationValueError):
|
||||||
"Raised when get_host or similar fails to parse the URL input."
|
"""Raised when get_host or similar fails to parse the URL input."""
|
||||||
|
|
||||||
def __init__(self, location):
|
def __init__(self, location):
|
||||||
message = "Failed to parse: %s" % location
|
message = "Failed to parse: %s" % location
|
||||||
|
@ -153,39 +165,56 @@ class LocationParseError(LocationValueError):
|
||||||
self.location = location
|
self.location = location
|
||||||
|
|
||||||
|
|
||||||
|
class URLSchemeUnknown(LocationValueError):
|
||||||
|
"""Raised when a URL input has an unsupported scheme."""
|
||||||
|
|
||||||
|
def __init__(self, scheme):
|
||||||
|
message = "Not supported URL scheme %s" % scheme
|
||||||
|
super(URLSchemeUnknown, self).__init__(message)
|
||||||
|
|
||||||
|
self.scheme = scheme
|
||||||
|
|
||||||
|
|
||||||
class ResponseError(HTTPError):
|
class ResponseError(HTTPError):
|
||||||
"Used as a container for an error reason supplied in a MaxRetryError."
|
"""Used as a container for an error reason supplied in a MaxRetryError."""
|
||||||
|
|
||||||
GENERIC_ERROR = "too many error responses"
|
GENERIC_ERROR = "too many error responses"
|
||||||
SPECIFIC_ERROR = "too many {status_code} error responses"
|
SPECIFIC_ERROR = "too many {status_code} error responses"
|
||||||
|
|
||||||
|
|
||||||
class SecurityWarning(HTTPWarning):
|
class SecurityWarning(HTTPWarning):
|
||||||
"Warned when performing security reducing actions"
|
"""Warned when performing security reducing actions"""
|
||||||
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class SubjectAltNameWarning(SecurityWarning):
|
class SubjectAltNameWarning(SecurityWarning):
|
||||||
"Warned when connecting to a host with a certificate missing a SAN."
|
"""Warned when connecting to a host with a certificate missing a SAN."""
|
||||||
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class InsecureRequestWarning(SecurityWarning):
|
class InsecureRequestWarning(SecurityWarning):
|
||||||
"Warned when making an unverified HTTPS request."
|
"""Warned when making an unverified HTTPS request."""
|
||||||
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class SystemTimeWarning(SecurityWarning):
|
class SystemTimeWarning(SecurityWarning):
|
||||||
"Warned when system time is suspected to be wrong"
|
"""Warned when system time is suspected to be wrong"""
|
||||||
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class InsecurePlatformWarning(SecurityWarning):
|
class InsecurePlatformWarning(SecurityWarning):
|
||||||
"Warned when certain SSL configuration is not available on a platform."
|
"""Warned when certain TLS/SSL configuration is not available on a platform."""
|
||||||
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class SNIMissingWarning(HTTPWarning):
|
class SNIMissingWarning(HTTPWarning):
|
||||||
"Warned when making a HTTPS request without SNI available."
|
"""Warned when making a HTTPS request without SNI available."""
|
||||||
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
@ -198,29 +227,16 @@ class DependencyWarning(HTTPWarning):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class InvalidProxyConfigurationWarning(HTTPWarning):
|
|
||||||
"""
|
|
||||||
Warned when using an HTTPS proxy and an HTTPS URL. Currently
|
|
||||||
urllib3 doesn't support HTTPS proxies and the proxy will be
|
|
||||||
contacted via HTTP instead. This warning can be fixed by
|
|
||||||
changing your HTTPS proxy URL into an HTTP proxy URL.
|
|
||||||
|
|
||||||
If you encounter this warning read this:
|
|
||||||
https://github.com/urllib3/urllib3/issues/1850
|
|
||||||
"""
|
|
||||||
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class ResponseNotChunked(ProtocolError, ValueError):
|
class ResponseNotChunked(ProtocolError, ValueError):
|
||||||
"Response needs to be chunked in order to read it as chunks."
|
"""Response needs to be chunked in order to read it as chunks."""
|
||||||
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class BodyNotHttplibCompatible(HTTPError):
|
class BodyNotHttplibCompatible(HTTPError):
|
||||||
"""
|
"""
|
||||||
Body should be httplib.HTTPResponse like (have an fp attribute which
|
Body should be :class:`http.client.HTTPResponse` like
|
||||||
returns raw chunks) for read_chunked().
|
(have an fp attribute which returns raw chunks) for read_chunked().
|
||||||
"""
|
"""
|
||||||
|
|
||||||
pass
|
pass
|
||||||
|
@ -230,9 +246,8 @@ class IncompleteRead(HTTPError, httplib_IncompleteRead):
|
||||||
"""
|
"""
|
||||||
Response length doesn't match expected Content-Length
|
Response length doesn't match expected Content-Length
|
||||||
|
|
||||||
Subclass of http_client.IncompleteRead to allow int value
|
Subclass of :class:`http.client.IncompleteRead` to allow int value
|
||||||
for `partial` to avoid creating large objects on streamed
|
for ``partial`` to avoid creating large objects on streamed reads.
|
||||||
reads.
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, partial, expected):
|
def __init__(self, partial, expected):
|
||||||
|
@ -245,13 +260,32 @@ class IncompleteRead(HTTPError, httplib_IncompleteRead):
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidChunkLength(HTTPError, httplib_IncompleteRead):
|
||||||
|
"""Invalid chunk length in a chunked response."""
|
||||||
|
|
||||||
|
def __init__(self, response, length):
|
||||||
|
super(InvalidChunkLength, self).__init__(
|
||||||
|
response.tell(), response.length_remaining
|
||||||
|
)
|
||||||
|
self.response = response
|
||||||
|
self.length = length
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "InvalidChunkLength(got length %r, %i bytes read)" % (
|
||||||
|
self.length,
|
||||||
|
self.partial,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class InvalidHeader(HTTPError):
|
class InvalidHeader(HTTPError):
|
||||||
"The header provided was somehow invalid."
|
"""The header provided was somehow invalid."""
|
||||||
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class ProxySchemeUnknown(AssertionError, ValueError):
|
class ProxySchemeUnknown(AssertionError, URLSchemeUnknown):
|
||||||
"ProxyManager does not support the supplied scheme"
|
"""ProxyManager does not support the supplied scheme"""
|
||||||
|
|
||||||
# TODO(t-8ch): Stop inheriting from AssertionError in v2.0.
|
# TODO(t-8ch): Stop inheriting from AssertionError in v2.0.
|
||||||
|
|
||||||
def __init__(self, scheme):
|
def __init__(self, scheme):
|
||||||
|
@ -259,8 +293,14 @@ class ProxySchemeUnknown(AssertionError, ValueError):
|
||||||
super(ProxySchemeUnknown, self).__init__(message)
|
super(ProxySchemeUnknown, self).__init__(message)
|
||||||
|
|
||||||
|
|
||||||
|
class ProxySchemeUnsupported(ValueError):
|
||||||
|
"""Fetching HTTPS resources through HTTPS proxies is unsupported"""
|
||||||
|
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
class HeaderParsingError(HTTPError):
|
class HeaderParsingError(HTTPError):
|
||||||
"Raised by assert_header_parsing, but we convert it to a log.warning statement."
|
"""Raised by assert_header_parsing, but we convert it to a log.warning statement."""
|
||||||
|
|
||||||
def __init__(self, defects, unparsed_data):
|
def __init__(self, defects, unparsed_data):
|
||||||
message = "%s, unparsed data: %r" % (defects or "Unknown", unparsed_data)
|
message = "%s, unparsed data: %r" % (defects or "Unknown", unparsed_data)
|
||||||
|
@ -268,5 +308,6 @@ class HeaderParsingError(HTTPError):
|
||||||
|
|
||||||
|
|
||||||
class UnrewindableBodyError(HTTPError):
|
class UnrewindableBodyError(HTTPError):
|
||||||
"urllib3 encountered an error when trying to rewind a body"
|
"""urllib3 encountered an error when trying to rewind a body"""
|
||||||
|
|
||||||
pass
|
pass
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
from __future__ import absolute_import
|
from __future__ import absolute_import
|
||||||
|
|
||||||
import email.utils
|
import email.utils
|
||||||
import mimetypes
|
import mimetypes
|
||||||
import re
|
import re
|
||||||
|
@ -26,7 +27,8 @@ def format_header_param_rfc2231(name, value):
|
||||||
strategy defined in RFC 2231.
|
strategy defined in RFC 2231.
|
||||||
|
|
||||||
Particularly useful for header parameters which might contain
|
Particularly useful for header parameters which might contain
|
||||||
non-ASCII values, like file names. This follows RFC 2388 Section 4.4.
|
non-ASCII values, like file names. This follows
|
||||||
|
`RFC 2388 Section 4.4 <https://tools.ietf.org/html/rfc2388#section-4.4>`_.
|
||||||
|
|
||||||
:param name:
|
:param name:
|
||||||
The name of the parameter, a string expected to be ASCII only.
|
The name of the parameter, a string expected to be ASCII only.
|
||||||
|
@ -65,7 +67,6 @@ _HTML5_REPLACEMENTS = {
|
||||||
u"\u0022": u"%22",
|
u"\u0022": u"%22",
|
||||||
# Replace "\" with "\\".
|
# Replace "\" with "\\".
|
||||||
u"\u005C": u"\u005C\u005C",
|
u"\u005C": u"\u005C\u005C",
|
||||||
u"\u005C": u"\u005C\u005C",
|
|
||||||
}
|
}
|
||||||
|
|
||||||
# All control characters from 0x00 to 0x1F *except* 0x1B.
|
# All control characters from 0x00 to 0x1F *except* 0x1B.
|
||||||
|
|
|
@ -1,13 +1,13 @@
|
||||||
from __future__ import absolute_import
|
from __future__ import absolute_import
|
||||||
|
|
||||||
import binascii
|
import binascii
|
||||||
import codecs
|
import codecs
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
|
|
||||||
|
from .fields import RequestField
|
||||||
from .packages import six
|
from .packages import six
|
||||||
from .packages.six import b
|
from .packages.six import b
|
||||||
from .fields import RequestField
|
|
||||||
|
|
||||||
writer = codecs.lookup("utf-8")[3]
|
writer = codecs.lookup("utf-8")[3]
|
||||||
|
|
||||||
|
|
|
@ -7,7 +7,6 @@ Backports the Python 3 ``socket.makefile`` method for use with anything that
|
||||||
wants to create a "fake" socket object.
|
wants to create a "fake" socket object.
|
||||||
"""
|
"""
|
||||||
import io
|
import io
|
||||||
|
|
||||||
from socket import SocketIO
|
from socket import SocketIO
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -10,10 +10,13 @@ try:
|
||||||
except ImportError:
|
except ImportError:
|
||||||
try:
|
try:
|
||||||
# Backport of the function from a pypi module
|
# Backport of the function from a pypi module
|
||||||
from backports.ssl_match_hostname import CertificateError, match_hostname
|
from backports.ssl_match_hostname import ( # type: ignore
|
||||||
|
CertificateError,
|
||||||
|
match_hostname,
|
||||||
|
)
|
||||||
except ImportError:
|
except ImportError:
|
||||||
# Our vendored copy
|
# Our vendored copy
|
||||||
from ._implementation import CertificateError, match_hostname
|
from ._implementation import CertificateError, match_hostname # type: ignore
|
||||||
|
|
||||||
# Not needed, but documenting what we provide.
|
# Not needed, but documenting what we provide.
|
||||||
__all__ = ("CertificateError", "match_hostname")
|
__all__ = ("CertificateError", "match_hostname")
|
||||||
|
|
|
@ -1,24 +1,24 @@
|
||||||
from __future__ import absolute_import
|
from __future__ import absolute_import
|
||||||
|
|
||||||
import collections
|
import collections
|
||||||
import functools
|
import functools
|
||||||
import logging
|
import logging
|
||||||
import warnings
|
|
||||||
|
|
||||||
from ._collections import RecentlyUsedContainer
|
from ._collections import RecentlyUsedContainer
|
||||||
from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool
|
from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool, port_by_scheme
|
||||||
from .connectionpool import port_by_scheme
|
|
||||||
from .exceptions import (
|
from .exceptions import (
|
||||||
LocationValueError,
|
LocationValueError,
|
||||||
MaxRetryError,
|
MaxRetryError,
|
||||||
ProxySchemeUnknown,
|
ProxySchemeUnknown,
|
||||||
InvalidProxyConfigurationWarning,
|
ProxySchemeUnsupported,
|
||||||
|
URLSchemeUnknown,
|
||||||
)
|
)
|
||||||
from .packages import six
|
from .packages import six
|
||||||
from .packages.six.moves.urllib.parse import urljoin
|
from .packages.six.moves.urllib.parse import urljoin
|
||||||
from .request import RequestMethods
|
from .request import RequestMethods
|
||||||
from .util.url import parse_url
|
from .util.proxy import connection_requires_http_tunnel
|
||||||
from .util.retry import Retry
|
from .util.retry import Retry
|
||||||
|
from .util.url import parse_url
|
||||||
|
|
||||||
__all__ = ["PoolManager", "ProxyManager", "proxy_from_url"]
|
__all__ = ["PoolManager", "ProxyManager", "proxy_from_url"]
|
||||||
|
|
||||||
|
@ -59,6 +59,7 @@ _key_fields = (
|
||||||
"key_headers", # dict
|
"key_headers", # dict
|
||||||
"key__proxy", # parsed proxy url
|
"key__proxy", # parsed proxy url
|
||||||
"key__proxy_headers", # dict
|
"key__proxy_headers", # dict
|
||||||
|
"key__proxy_config", # class
|
||||||
"key_socket_options", # list of (level (int), optname (int), value (int or str)) tuples
|
"key_socket_options", # list of (level (int), optname (int), value (int or str)) tuples
|
||||||
"key__socks_options", # dict
|
"key__socks_options", # dict
|
||||||
"key_assert_hostname", # bool or string
|
"key_assert_hostname", # bool or string
|
||||||
|
@ -70,6 +71,9 @@ _key_fields = (
|
||||||
#: All custom key schemes should include the fields in this key at a minimum.
|
#: All custom key schemes should include the fields in this key at a minimum.
|
||||||
PoolKey = collections.namedtuple("PoolKey", _key_fields)
|
PoolKey = collections.namedtuple("PoolKey", _key_fields)
|
||||||
|
|
||||||
|
_proxy_config_fields = ("ssl_context", "use_forwarding_for_https")
|
||||||
|
ProxyConfig = collections.namedtuple("ProxyConfig", _proxy_config_fields)
|
||||||
|
|
||||||
|
|
||||||
def _default_key_normalizer(key_class, request_context):
|
def _default_key_normalizer(key_class, request_context):
|
||||||
"""
|
"""
|
||||||
|
@ -161,6 +165,7 @@ class PoolManager(RequestMethods):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
proxy = None
|
proxy = None
|
||||||
|
proxy_config = None
|
||||||
|
|
||||||
def __init__(self, num_pools=10, headers=None, **connection_pool_kw):
|
def __init__(self, num_pools=10, headers=None, **connection_pool_kw):
|
||||||
RequestMethods.__init__(self, headers)
|
RequestMethods.__init__(self, headers)
|
||||||
|
@ -182,7 +187,7 @@ class PoolManager(RequestMethods):
|
||||||
|
|
||||||
def _new_pool(self, scheme, host, port, request_context=None):
|
def _new_pool(self, scheme, host, port, request_context=None):
|
||||||
"""
|
"""
|
||||||
Create a new :class:`ConnectionPool` based on host, port, scheme, and
|
Create a new :class:`urllib3.connectionpool.ConnectionPool` based on host, port, scheme, and
|
||||||
any additional pool keyword arguments.
|
any additional pool keyword arguments.
|
||||||
|
|
||||||
If ``request_context`` is provided, it is provided as keyword arguments
|
If ``request_context`` is provided, it is provided as keyword arguments
|
||||||
|
@ -218,7 +223,7 @@ class PoolManager(RequestMethods):
|
||||||
|
|
||||||
def connection_from_host(self, host, port=None, scheme="http", pool_kwargs=None):
|
def connection_from_host(self, host, port=None, scheme="http", pool_kwargs=None):
|
||||||
"""
|
"""
|
||||||
Get a :class:`ConnectionPool` based on the host, port, and scheme.
|
Get a :class:`urllib3.connectionpool.ConnectionPool` based on the host, port, and scheme.
|
||||||
|
|
||||||
If ``port`` isn't given, it will be derived from the ``scheme`` using
|
If ``port`` isn't given, it will be derived from the ``scheme`` using
|
||||||
``urllib3.connectionpool.port_by_scheme``. If ``pool_kwargs`` is
|
``urllib3.connectionpool.port_by_scheme``. If ``pool_kwargs`` is
|
||||||
|
@ -241,20 +246,22 @@ class PoolManager(RequestMethods):
|
||||||
|
|
||||||
def connection_from_context(self, request_context):
|
def connection_from_context(self, request_context):
|
||||||
"""
|
"""
|
||||||
Get a :class:`ConnectionPool` based on the request context.
|
Get a :class:`urllib3.connectionpool.ConnectionPool` based on the request context.
|
||||||
|
|
||||||
``request_context`` must at least contain the ``scheme`` key and its
|
``request_context`` must at least contain the ``scheme`` key and its
|
||||||
value must be a key in ``key_fn_by_scheme`` instance variable.
|
value must be a key in ``key_fn_by_scheme`` instance variable.
|
||||||
"""
|
"""
|
||||||
scheme = request_context["scheme"].lower()
|
scheme = request_context["scheme"].lower()
|
||||||
pool_key_constructor = self.key_fn_by_scheme[scheme]
|
pool_key_constructor = self.key_fn_by_scheme.get(scheme)
|
||||||
|
if not pool_key_constructor:
|
||||||
|
raise URLSchemeUnknown(scheme)
|
||||||
pool_key = pool_key_constructor(request_context)
|
pool_key = pool_key_constructor(request_context)
|
||||||
|
|
||||||
return self.connection_from_pool_key(pool_key, request_context=request_context)
|
return self.connection_from_pool_key(pool_key, request_context=request_context)
|
||||||
|
|
||||||
def connection_from_pool_key(self, pool_key, request_context=None):
|
def connection_from_pool_key(self, pool_key, request_context=None):
|
||||||
"""
|
"""
|
||||||
Get a :class:`ConnectionPool` based on the provided pool key.
|
Get a :class:`urllib3.connectionpool.ConnectionPool` based on the provided pool key.
|
||||||
|
|
||||||
``pool_key`` should be a namedtuple that only contains immutable
|
``pool_key`` should be a namedtuple that only contains immutable
|
||||||
objects. At a minimum it must have the ``scheme``, ``host``, and
|
objects. At a minimum it must have the ``scheme``, ``host``, and
|
||||||
|
@ -312,9 +319,39 @@ class PoolManager(RequestMethods):
|
||||||
base_pool_kwargs[key] = value
|
base_pool_kwargs[key] = value
|
||||||
return base_pool_kwargs
|
return base_pool_kwargs
|
||||||
|
|
||||||
|
def _proxy_requires_url_absolute_form(self, parsed_url):
|
||||||
|
"""
|
||||||
|
Indicates if the proxy requires the complete destination URL in the
|
||||||
|
request. Normally this is only needed when not using an HTTP CONNECT
|
||||||
|
tunnel.
|
||||||
|
"""
|
||||||
|
if self.proxy is None:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return not connection_requires_http_tunnel(
|
||||||
|
self.proxy, self.proxy_config, parsed_url.scheme
|
||||||
|
)
|
||||||
|
|
||||||
|
def _validate_proxy_scheme_url_selection(self, url_scheme):
|
||||||
|
"""
|
||||||
|
Validates that were not attempting to do TLS in TLS connections on
|
||||||
|
Python2 or with unsupported SSL implementations.
|
||||||
|
"""
|
||||||
|
if self.proxy is None or url_scheme != "https":
|
||||||
|
return
|
||||||
|
|
||||||
|
if self.proxy.scheme != "https":
|
||||||
|
return
|
||||||
|
|
||||||
|
if six.PY2 and not self.proxy_config.use_forwarding_for_https:
|
||||||
|
raise ProxySchemeUnsupported(
|
||||||
|
"Contacting HTTPS destinations through HTTPS proxies "
|
||||||
|
"'via CONNECT tunnels' is not supported in Python 2"
|
||||||
|
)
|
||||||
|
|
||||||
def urlopen(self, method, url, redirect=True, **kw):
|
def urlopen(self, method, url, redirect=True, **kw):
|
||||||
"""
|
"""
|
||||||
Same as :meth:`urllib3.connectionpool.HTTPConnectionPool.urlopen`
|
Same as :meth:`urllib3.HTTPConnectionPool.urlopen`
|
||||||
with custom cross-host redirect logic and only sends the request-uri
|
with custom cross-host redirect logic and only sends the request-uri
|
||||||
portion of the ``url``.
|
portion of the ``url``.
|
||||||
|
|
||||||
|
@ -322,6 +359,8 @@ class PoolManager(RequestMethods):
|
||||||
:class:`urllib3.connectionpool.ConnectionPool` can be chosen for it.
|
:class:`urllib3.connectionpool.ConnectionPool` can be chosen for it.
|
||||||
"""
|
"""
|
||||||
u = parse_url(url)
|
u = parse_url(url)
|
||||||
|
self._validate_proxy_scheme_url_selection(u.scheme)
|
||||||
|
|
||||||
conn = self.connection_from_host(u.host, port=u.port, scheme=u.scheme)
|
conn = self.connection_from_host(u.host, port=u.port, scheme=u.scheme)
|
||||||
|
|
||||||
kw["assert_same_host"] = False
|
kw["assert_same_host"] = False
|
||||||
|
@ -330,7 +369,7 @@ class PoolManager(RequestMethods):
|
||||||
if "headers" not in kw:
|
if "headers" not in kw:
|
||||||
kw["headers"] = self.headers.copy()
|
kw["headers"] = self.headers.copy()
|
||||||
|
|
||||||
if self.proxy is not None and u.scheme == "http":
|
if self._proxy_requires_url_absolute_form(u):
|
||||||
response = conn.urlopen(method, url, **kw)
|
response = conn.urlopen(method, url, **kw)
|
||||||
else:
|
else:
|
||||||
response = conn.urlopen(method, u.request_uri, **kw)
|
response = conn.urlopen(method, u.request_uri, **kw)
|
||||||
|
@ -392,6 +431,19 @@ class ProxyManager(PoolManager):
|
||||||
HTTPS/CONNECT case they are sent only once. Could be used for proxy
|
HTTPS/CONNECT case they are sent only once. Could be used for proxy
|
||||||
authentication.
|
authentication.
|
||||||
|
|
||||||
|
:param proxy_ssl_context:
|
||||||
|
The proxy SSL context is used to establish the TLS connection to the
|
||||||
|
proxy when using HTTPS proxies.
|
||||||
|
|
||||||
|
:param use_forwarding_for_https:
|
||||||
|
(Defaults to False) If set to True will forward requests to the HTTPS
|
||||||
|
proxy to be made on behalf of the client instead of creating a TLS
|
||||||
|
tunnel via the CONNECT method. **Enabling this flag means that request
|
||||||
|
and response headers and content will be visible from the HTTPS proxy**
|
||||||
|
whereas tunneling keeps request and response headers and content
|
||||||
|
private. IP address, target hostname, SNI, and port are always visible
|
||||||
|
to an HTTPS proxy even when this flag is disabled.
|
||||||
|
|
||||||
Example:
|
Example:
|
||||||
>>> proxy = urllib3.ProxyManager('http://localhost:3128/')
|
>>> proxy = urllib3.ProxyManager('http://localhost:3128/')
|
||||||
>>> r1 = proxy.request('GET', 'http://google.com/')
|
>>> r1 = proxy.request('GET', 'http://google.com/')
|
||||||
|
@ -411,6 +463,8 @@ class ProxyManager(PoolManager):
|
||||||
num_pools=10,
|
num_pools=10,
|
||||||
headers=None,
|
headers=None,
|
||||||
proxy_headers=None,
|
proxy_headers=None,
|
||||||
|
proxy_ssl_context=None,
|
||||||
|
use_forwarding_for_https=False,
|
||||||
**connection_pool_kw
|
**connection_pool_kw
|
||||||
):
|
):
|
||||||
|
|
||||||
|
@ -421,18 +475,22 @@ class ProxyManager(PoolManager):
|
||||||
proxy_url.port,
|
proxy_url.port,
|
||||||
)
|
)
|
||||||
proxy = parse_url(proxy_url)
|
proxy = parse_url(proxy_url)
|
||||||
if not proxy.port:
|
|
||||||
port = port_by_scheme.get(proxy.scheme, 80)
|
|
||||||
proxy = proxy._replace(port=port)
|
|
||||||
|
|
||||||
if proxy.scheme not in ("http", "https"):
|
if proxy.scheme not in ("http", "https"):
|
||||||
raise ProxySchemeUnknown(proxy.scheme)
|
raise ProxySchemeUnknown(proxy.scheme)
|
||||||
|
|
||||||
|
if not proxy.port:
|
||||||
|
port = port_by_scheme.get(proxy.scheme, 80)
|
||||||
|
proxy = proxy._replace(port=port)
|
||||||
|
|
||||||
self.proxy = proxy
|
self.proxy = proxy
|
||||||
self.proxy_headers = proxy_headers or {}
|
self.proxy_headers = proxy_headers or {}
|
||||||
|
self.proxy_ssl_context = proxy_ssl_context
|
||||||
|
self.proxy_config = ProxyConfig(proxy_ssl_context, use_forwarding_for_https)
|
||||||
|
|
||||||
connection_pool_kw["_proxy"] = self.proxy
|
connection_pool_kw["_proxy"] = self.proxy
|
||||||
connection_pool_kw["_proxy_headers"] = self.proxy_headers
|
connection_pool_kw["_proxy_headers"] = self.proxy_headers
|
||||||
|
connection_pool_kw["_proxy_config"] = self.proxy_config
|
||||||
|
|
||||||
super(ProxyManager, self).__init__(num_pools, headers, **connection_pool_kw)
|
super(ProxyManager, self).__init__(num_pools, headers, **connection_pool_kw)
|
||||||
|
|
||||||
|
@ -461,27 +519,13 @@ class ProxyManager(PoolManager):
|
||||||
headers_.update(headers)
|
headers_.update(headers)
|
||||||
return headers_
|
return headers_
|
||||||
|
|
||||||
def _validate_proxy_scheme_url_selection(self, url_scheme):
|
|
||||||
if url_scheme == "https" and self.proxy.scheme == "https":
|
|
||||||
warnings.warn(
|
|
||||||
"Your proxy configuration specified an HTTPS scheme for the proxy. "
|
|
||||||
"Are you sure you want to use HTTPS to contact the proxy? "
|
|
||||||
"This most likely indicates an error in your configuration. "
|
|
||||||
"Read this issue for more info: "
|
|
||||||
"https://github.com/urllib3/urllib3/issues/1850",
|
|
||||||
InvalidProxyConfigurationWarning,
|
|
||||||
stacklevel=3,
|
|
||||||
)
|
|
||||||
|
|
||||||
def urlopen(self, method, url, redirect=True, **kw):
|
def urlopen(self, method, url, redirect=True, **kw):
|
||||||
"Same as HTTP(S)ConnectionPool.urlopen, ``url`` must be absolute."
|
"Same as HTTP(S)ConnectionPool.urlopen, ``url`` must be absolute."
|
||||||
u = parse_url(url)
|
u = parse_url(url)
|
||||||
self._validate_proxy_scheme_url_selection(u.scheme)
|
if not connection_requires_http_tunnel(self.proxy, self.proxy_config, u.scheme):
|
||||||
|
# For connections using HTTP CONNECT, httplib sets the necessary
|
||||||
if u.scheme == "http":
|
# headers on the CONNECT to the proxy. If we're not using CONNECT,
|
||||||
# For proxied HTTPS requests, httplib sets the necessary headers
|
# we'll definitely need to set 'Host' at the very least.
|
||||||
# on the CONNECT to the proxy. For HTTP, we'll definitely
|
|
||||||
# need to set 'Host' at the very least.
|
|
||||||
headers = kw.get("headers", self.headers)
|
headers = kw.get("headers", self.headers)
|
||||||
kw["headers"] = self._set_proxy_headers(url, headers)
|
kw["headers"] = self._set_proxy_headers(url, headers)
|
||||||
|
|
||||||
|
|
|
@ -3,15 +3,14 @@ from __future__ import absolute_import
|
||||||
from .filepost import encode_multipart_formdata
|
from .filepost import encode_multipart_formdata
|
||||||
from .packages.six.moves.urllib.parse import urlencode
|
from .packages.six.moves.urllib.parse import urlencode
|
||||||
|
|
||||||
|
|
||||||
__all__ = ["RequestMethods"]
|
__all__ = ["RequestMethods"]
|
||||||
|
|
||||||
|
|
||||||
class RequestMethods(object):
|
class RequestMethods(object):
|
||||||
"""
|
"""
|
||||||
Convenience mixin for classes who implement a :meth:`urlopen` method, such
|
Convenience mixin for classes who implement a :meth:`urlopen` method, such
|
||||||
as :class:`~urllib3.connectionpool.HTTPConnectionPool` and
|
as :class:`urllib3.HTTPConnectionPool` and
|
||||||
:class:`~urllib3.poolmanager.PoolManager`.
|
:class:`urllib3.PoolManager`.
|
||||||
|
|
||||||
Provides behavior for making common types of HTTP request methods and
|
Provides behavior for making common types of HTTP request methods and
|
||||||
decides which type of request field encoding to use.
|
decides which type of request field encoding to use.
|
||||||
|
@ -111,9 +110,9 @@ class RequestMethods(object):
|
||||||
the body. This is useful for request methods like POST, PUT, PATCH, etc.
|
the body. This is useful for request methods like POST, PUT, PATCH, etc.
|
||||||
|
|
||||||
When ``encode_multipart=True`` (default), then
|
When ``encode_multipart=True`` (default), then
|
||||||
:meth:`urllib3.filepost.encode_multipart_formdata` is used to encode
|
:func:`urllib3.encode_multipart_formdata` is used to encode
|
||||||
the payload with the appropriate content type. Otherwise
|
the payload with the appropriate content type. Otherwise
|
||||||
:meth:`urllib.urlencode` is used with the
|
:func:`urllib.parse.urlencode` is used with the
|
||||||
'application/x-www-form-urlencoded' content type.
|
'application/x-www-form-urlencoded' content type.
|
||||||
|
|
||||||
Multipart encoding must be used when posting files, and it's reasonably
|
Multipart encoding must be used when posting files, and it's reasonably
|
||||||
|
|
|
@ -1,10 +1,11 @@
|
||||||
from __future__ import absolute_import
|
from __future__ import absolute_import
|
||||||
from contextlib import contextmanager
|
|
||||||
import zlib
|
|
||||||
import io
|
import io
|
||||||
import logging
|
import logging
|
||||||
from socket import timeout as SocketTimeout
|
import zlib
|
||||||
|
from contextlib import contextmanager
|
||||||
from socket import error as SocketError
|
from socket import error as SocketError
|
||||||
|
from socket import timeout as SocketTimeout
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import brotli
|
import brotli
|
||||||
|
@ -12,19 +13,20 @@ except ImportError:
|
||||||
brotli = None
|
brotli = None
|
||||||
|
|
||||||
from ._collections import HTTPHeaderDict
|
from ._collections import HTTPHeaderDict
|
||||||
|
from .connection import BaseSSLError, HTTPException
|
||||||
from .exceptions import (
|
from .exceptions import (
|
||||||
BodyNotHttplibCompatible,
|
BodyNotHttplibCompatible,
|
||||||
ProtocolError,
|
|
||||||
DecodeError,
|
DecodeError,
|
||||||
|
HTTPError,
|
||||||
|
IncompleteRead,
|
||||||
|
InvalidChunkLength,
|
||||||
|
InvalidHeader,
|
||||||
|
ProtocolError,
|
||||||
ReadTimeoutError,
|
ReadTimeoutError,
|
||||||
ResponseNotChunked,
|
ResponseNotChunked,
|
||||||
IncompleteRead,
|
SSLError,
|
||||||
InvalidHeader,
|
|
||||||
HTTPError,
|
|
||||||
)
|
)
|
||||||
from .packages.six import string_types as basestring, PY3
|
from .packages import six
|
||||||
from .packages.six.moves import http_client as httplib
|
|
||||||
from .connection import HTTPException, BaseSSLError
|
|
||||||
from .util.response import is_fp_closed, is_response_to_head
|
from .util.response import is_fp_closed, is_response_to_head
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
@ -107,11 +109,10 @@ if brotli is not None:
|
||||||
# are for 'brotlipy' and bottom branches for 'Brotli'
|
# are for 'brotlipy' and bottom branches for 'Brotli'
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self._obj = brotli.Decompressor()
|
self._obj = brotli.Decompressor()
|
||||||
|
|
||||||
def decompress(self, data):
|
|
||||||
if hasattr(self._obj, "decompress"):
|
if hasattr(self._obj, "decompress"):
|
||||||
return self._obj.decompress(data)
|
self.decompress = self._obj.decompress
|
||||||
return self._obj.process(data)
|
else:
|
||||||
|
self.decompress = self._obj.process
|
||||||
|
|
||||||
def flush(self):
|
def flush(self):
|
||||||
if hasattr(self._obj, "flush"):
|
if hasattr(self._obj, "flush"):
|
||||||
|
@ -157,13 +158,13 @@ class HTTPResponse(io.IOBase):
|
||||||
"""
|
"""
|
||||||
HTTP Response container.
|
HTTP Response container.
|
||||||
|
|
||||||
Backwards-compatible to httplib's HTTPResponse but the response ``body`` is
|
Backwards-compatible with :class:`http.client.HTTPResponse` but the response ``body`` is
|
||||||
loaded and decoded on-demand when the ``data`` property is accessed. This
|
loaded and decoded on-demand when the ``data`` property is accessed. This
|
||||||
class is also compatible with the Python standard library's :mod:`io`
|
class is also compatible with the Python standard library's :mod:`io`
|
||||||
module, and can hence be treated as a readable object in the context of that
|
module, and can hence be treated as a readable object in the context of that
|
||||||
framework.
|
framework.
|
||||||
|
|
||||||
Extra parameters for behaviour not present in httplib.HTTPResponse:
|
Extra parameters for behaviour not present in :class:`http.client.HTTPResponse`:
|
||||||
|
|
||||||
:param preload_content:
|
:param preload_content:
|
||||||
If True, the response's body will be preloaded during construction.
|
If True, the response's body will be preloaded during construction.
|
||||||
|
@ -173,7 +174,7 @@ class HTTPResponse(io.IOBase):
|
||||||
'content-encoding' header.
|
'content-encoding' header.
|
||||||
|
|
||||||
:param original_response:
|
:param original_response:
|
||||||
When this HTTPResponse wrapper is generated from an httplib.HTTPResponse
|
When this HTTPResponse wrapper is generated from an :class:`http.client.HTTPResponse`
|
||||||
object, it's convenient to include the original for debug purposes. It's
|
object, it's convenient to include the original for debug purposes. It's
|
||||||
otherwise unused.
|
otherwise unused.
|
||||||
|
|
||||||
|
@ -233,7 +234,7 @@ class HTTPResponse(io.IOBase):
|
||||||
self.msg = msg
|
self.msg = msg
|
||||||
self._request_url = request_url
|
self._request_url = request_url
|
||||||
|
|
||||||
if body and isinstance(body, (basestring, bytes)):
|
if body and isinstance(body, (six.string_types, bytes)):
|
||||||
self._body = body
|
self._body = body
|
||||||
|
|
||||||
self._pool = pool
|
self._pool = pool
|
||||||
|
@ -291,7 +292,7 @@ class HTTPResponse(io.IOBase):
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def data(self):
|
def data(self):
|
||||||
# For backwords-compat with earlier urllib3 0.4 and earlier.
|
# For backwards-compat with earlier urllib3 0.4 and earlier.
|
||||||
if self._body:
|
if self._body:
|
||||||
return self._body
|
return self._body
|
||||||
|
|
||||||
|
@ -308,8 +309,8 @@ class HTTPResponse(io.IOBase):
|
||||||
def tell(self):
|
def tell(self):
|
||||||
"""
|
"""
|
||||||
Obtain the number of bytes pulled over the wire so far. May differ from
|
Obtain the number of bytes pulled over the wire so far. May differ from
|
||||||
the amount of content returned by :meth:``HTTPResponse.read`` if bytes
|
the amount of content returned by :meth:``urllib3.response.HTTPResponse.read``
|
||||||
are encoded on the wire (e.g, compressed).
|
if bytes are encoded on the wire (e.g, compressed).
|
||||||
"""
|
"""
|
||||||
return self._fp_bytes_read
|
return self._fp_bytes_read
|
||||||
|
|
||||||
|
@ -443,10 +444,9 @@ class HTTPResponse(io.IOBase):
|
||||||
|
|
||||||
except BaseSSLError as e:
|
except BaseSSLError as e:
|
||||||
# FIXME: Is there a better way to differentiate between SSLErrors?
|
# FIXME: Is there a better way to differentiate between SSLErrors?
|
||||||
if "read operation timed out" not in str(e): # Defensive:
|
if "read operation timed out" not in str(e):
|
||||||
# This shouldn't happen but just in case we're missing an edge
|
# SSL errors related to framing/MAC get wrapped and reraised here
|
||||||
# case, let's avoid swallowing SSL errors.
|
raise SSLError(e)
|
||||||
raise
|
|
||||||
|
|
||||||
raise ReadTimeoutError(self._pool, None, "Read timed out.")
|
raise ReadTimeoutError(self._pool, None, "Read timed out.")
|
||||||
|
|
||||||
|
@ -480,7 +480,7 @@ class HTTPResponse(io.IOBase):
|
||||||
|
|
||||||
def read(self, amt=None, decode_content=None, cache_content=False):
|
def read(self, amt=None, decode_content=None, cache_content=False):
|
||||||
"""
|
"""
|
||||||
Similar to :meth:`httplib.HTTPResponse.read`, but with two additional
|
Similar to :meth:`http.client.HTTPResponse.read`, but with two additional
|
||||||
parameters: ``decode_content`` and ``cache_content``.
|
parameters: ``decode_content`` and ``cache_content``.
|
||||||
|
|
||||||
:param amt:
|
:param amt:
|
||||||
|
@ -581,7 +581,7 @@ class HTTPResponse(io.IOBase):
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_httplib(ResponseCls, r, **response_kw):
|
def from_httplib(ResponseCls, r, **response_kw):
|
||||||
"""
|
"""
|
||||||
Given an :class:`httplib.HTTPResponse` instance ``r``, return a
|
Given an :class:`http.client.HTTPResponse` instance ``r``, return a
|
||||||
corresponding :class:`urllib3.response.HTTPResponse` object.
|
corresponding :class:`urllib3.response.HTTPResponse` object.
|
||||||
|
|
||||||
Remaining parameters are passed to the HTTPResponse constructor, along
|
Remaining parameters are passed to the HTTPResponse constructor, along
|
||||||
|
@ -590,11 +590,11 @@ class HTTPResponse(io.IOBase):
|
||||||
headers = r.msg
|
headers = r.msg
|
||||||
|
|
||||||
if not isinstance(headers, HTTPHeaderDict):
|
if not isinstance(headers, HTTPHeaderDict):
|
||||||
if PY3:
|
if six.PY2:
|
||||||
headers = HTTPHeaderDict(headers.items())
|
|
||||||
else:
|
|
||||||
# Python 2.7
|
# Python 2.7
|
||||||
headers = HTTPHeaderDict.from_httplib(headers)
|
headers = HTTPHeaderDict.from_httplib(headers)
|
||||||
|
else:
|
||||||
|
headers = HTTPHeaderDict(headers.items())
|
||||||
|
|
||||||
# HTTPResponse objects in Python 3 don't have a .strict attribute
|
# HTTPResponse objects in Python 3 don't have a .strict attribute
|
||||||
strict = getattr(r, "strict", 0)
|
strict = getattr(r, "strict", 0)
|
||||||
|
@ -610,7 +610,7 @@ class HTTPResponse(io.IOBase):
|
||||||
)
|
)
|
||||||
return resp
|
return resp
|
||||||
|
|
||||||
# Backwards-compatibility methods for httplib.HTTPResponse
|
# Backwards-compatibility methods for http.client.HTTPResponse
|
||||||
def getheaders(self):
|
def getheaders(self):
|
||||||
return self.headers
|
return self.headers
|
||||||
|
|
||||||
|
@ -680,8 +680,8 @@ class HTTPResponse(io.IOBase):
|
||||||
def supports_chunked_reads(self):
|
def supports_chunked_reads(self):
|
||||||
"""
|
"""
|
||||||
Checks if the underlying file-like object looks like a
|
Checks if the underlying file-like object looks like a
|
||||||
httplib.HTTPResponse object. We do this by testing for the fp
|
:class:`http.client.HTTPResponse` object. We do this by testing for
|
||||||
attribute. If it is present we assume it returns raw chunks as
|
the fp attribute. If it is present we assume it returns raw chunks as
|
||||||
processed by read_chunked().
|
processed by read_chunked().
|
||||||
"""
|
"""
|
||||||
return hasattr(self._fp, "fp")
|
return hasattr(self._fp, "fp")
|
||||||
|
@ -698,7 +698,7 @@ class HTTPResponse(io.IOBase):
|
||||||
except ValueError:
|
except ValueError:
|
||||||
# Invalid chunked protocol response, abort.
|
# Invalid chunked protocol response, abort.
|
||||||
self.close()
|
self.close()
|
||||||
raise httplib.IncompleteRead(line)
|
raise InvalidChunkLength(self, line)
|
||||||
|
|
||||||
def _handle_chunk(self, amt):
|
def _handle_chunk(self, amt):
|
||||||
returned_chunk = None
|
returned_chunk = None
|
||||||
|
@ -745,7 +745,7 @@ class HTTPResponse(io.IOBase):
|
||||||
)
|
)
|
||||||
if not self.supports_chunked_reads():
|
if not self.supports_chunked_reads():
|
||||||
raise BodyNotHttplibCompatible(
|
raise BodyNotHttplibCompatible(
|
||||||
"Body should be httplib.HTTPResponse like. "
|
"Body should be http.client.HTTPResponse like. "
|
||||||
"It should have have an fp attribute which returns raw chunks."
|
"It should have have an fp attribute which returns raw chunks."
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -2,23 +2,23 @@ from __future__ import absolute_import
|
||||||
|
|
||||||
# For backwards compatibility, provide imports that used to be here.
|
# For backwards compatibility, provide imports that used to be here.
|
||||||
from .connection import is_connection_dropped
|
from .connection import is_connection_dropped
|
||||||
from .request import make_headers
|
from .request import SKIP_HEADER, SKIPPABLE_HEADERS, make_headers
|
||||||
from .response import is_fp_closed
|
from .response import is_fp_closed
|
||||||
|
from .retry import Retry
|
||||||
from .ssl_ import (
|
from .ssl_ import (
|
||||||
SSLContext,
|
ALPN_PROTOCOLS,
|
||||||
HAS_SNI,
|
HAS_SNI,
|
||||||
IS_PYOPENSSL,
|
IS_PYOPENSSL,
|
||||||
IS_SECURETRANSPORT,
|
IS_SECURETRANSPORT,
|
||||||
|
PROTOCOL_TLS,
|
||||||
|
SSLContext,
|
||||||
assert_fingerprint,
|
assert_fingerprint,
|
||||||
resolve_cert_reqs,
|
resolve_cert_reqs,
|
||||||
resolve_ssl_version,
|
resolve_ssl_version,
|
||||||
ssl_wrap_socket,
|
ssl_wrap_socket,
|
||||||
PROTOCOL_TLS,
|
|
||||||
)
|
)
|
||||||
from .timeout import current_time, Timeout
|
from .timeout import Timeout, current_time
|
||||||
|
from .url import Url, get_host, parse_url, split_first
|
||||||
from .retry import Retry
|
|
||||||
from .url import get_host, parse_url, split_first, Url
|
|
||||||
from .wait import wait_for_read, wait_for_write
|
from .wait import wait_for_read, wait_for_write
|
||||||
|
|
||||||
__all__ = (
|
__all__ = (
|
||||||
|
@ -27,6 +27,7 @@ __all__ = (
|
||||||
"IS_SECURETRANSPORT",
|
"IS_SECURETRANSPORT",
|
||||||
"SSLContext",
|
"SSLContext",
|
||||||
"PROTOCOL_TLS",
|
"PROTOCOL_TLS",
|
||||||
|
"ALPN_PROTOCOLS",
|
||||||
"Retry",
|
"Retry",
|
||||||
"Timeout",
|
"Timeout",
|
||||||
"Url",
|
"Url",
|
||||||
|
@ -43,4 +44,6 @@ __all__ = (
|
||||||
"ssl_wrap_socket",
|
"ssl_wrap_socket",
|
||||||
"wait_for_read",
|
"wait_for_read",
|
||||||
"wait_for_write",
|
"wait_for_write",
|
||||||
|
"SKIP_HEADER",
|
||||||
|
"SKIPPABLE_HEADERS",
|
||||||
)
|
)
|
||||||
|
|
|
@ -1,7 +1,12 @@
|
||||||
from __future__ import absolute_import
|
from __future__ import absolute_import
|
||||||
|
|
||||||
import socket
|
import socket
|
||||||
from .wait import NoWayToWaitForSocketError, wait_for_read
|
|
||||||
|
from pip._vendor.urllib3.exceptions import LocationParseError
|
||||||
|
|
||||||
from ..contrib import _appengine_environ
|
from ..contrib import _appengine_environ
|
||||||
|
from ..packages import six
|
||||||
|
from .wait import NoWayToWaitForSocketError, wait_for_read
|
||||||
|
|
||||||
|
|
||||||
def is_connection_dropped(conn): # Platform-specific
|
def is_connection_dropped(conn): # Platform-specific
|
||||||
|
@ -9,7 +14,7 @@ def is_connection_dropped(conn): # Platform-specific
|
||||||
Returns True if the connection is dropped and should be closed.
|
Returns True if the connection is dropped and should be closed.
|
||||||
|
|
||||||
:param conn:
|
:param conn:
|
||||||
:class:`httplib.HTTPConnection` object.
|
:class:`http.client.HTTPConnection` object.
|
||||||
|
|
||||||
Note: For platforms like AppEngine, this will always return ``False`` to
|
Note: For platforms like AppEngine, this will always return ``False`` to
|
||||||
let the platform handle connection recycling transparently for us.
|
let the platform handle connection recycling transparently for us.
|
||||||
|
@ -42,7 +47,7 @@ def create_connection(
|
||||||
port)``) and return the socket object. Passing the optional
|
port)``) and return the socket object. Passing the optional
|
||||||
*timeout* parameter will set the timeout on the socket instance
|
*timeout* parameter will set the timeout on the socket instance
|
||||||
before attempting to connect. If no *timeout* is supplied, the
|
before attempting to connect. If no *timeout* is supplied, the
|
||||||
global default timeout setting returned by :func:`getdefaulttimeout`
|
global default timeout setting returned by :func:`socket.getdefaulttimeout`
|
||||||
is used. If *source_address* is set it must be a tuple of (host, port)
|
is used. If *source_address* is set it must be a tuple of (host, port)
|
||||||
for the socket to bind as a source address before making the connection.
|
for the socket to bind as a source address before making the connection.
|
||||||
An host of '' or port 0 tells the OS to use the default.
|
An host of '' or port 0 tells the OS to use the default.
|
||||||
|
@ -58,6 +63,13 @@ def create_connection(
|
||||||
# The original create_connection function always returns all records.
|
# The original create_connection function always returns all records.
|
||||||
family = allowed_gai_family()
|
family = allowed_gai_family()
|
||||||
|
|
||||||
|
try:
|
||||||
|
host.encode("idna")
|
||||||
|
except UnicodeError:
|
||||||
|
return six.raise_from(
|
||||||
|
LocationParseError(u"'%s', label empty or too long" % host), None
|
||||||
|
)
|
||||||
|
|
||||||
for res in socket.getaddrinfo(host, port, family, socket.SOCK_STREAM):
|
for res in socket.getaddrinfo(host, port, family, socket.SOCK_STREAM):
|
||||||
af, socktype, proto, canonname, sa = res
|
af, socktype, proto, canonname, sa = res
|
||||||
sock = None
|
sock = None
|
||||||
|
|
56
src/pip/_vendor/urllib3/util/proxy.py
Normal file
56
src/pip/_vendor/urllib3/util/proxy.py
Normal file
|
@ -0,0 +1,56 @@
|
||||||
|
from .ssl_ import create_urllib3_context, resolve_cert_reqs, resolve_ssl_version
|
||||||
|
|
||||||
|
|
||||||
|
def connection_requires_http_tunnel(
|
||||||
|
proxy_url=None, proxy_config=None, destination_scheme=None
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Returns True if the connection requires an HTTP CONNECT through the proxy.
|
||||||
|
|
||||||
|
:param URL proxy_url:
|
||||||
|
URL of the proxy.
|
||||||
|
:param ProxyConfig proxy_config:
|
||||||
|
Proxy configuration from poolmanager.py
|
||||||
|
:param str destination_scheme:
|
||||||
|
The scheme of the destination. (i.e https, http, etc)
|
||||||
|
"""
|
||||||
|
# If we're not using a proxy, no way to use a tunnel.
|
||||||
|
if proxy_url is None:
|
||||||
|
return False
|
||||||
|
|
||||||
|
# HTTP destinations never require tunneling, we always forward.
|
||||||
|
if destination_scheme == "http":
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Support for forwarding with HTTPS proxies and HTTPS destinations.
|
||||||
|
if (
|
||||||
|
proxy_url.scheme == "https"
|
||||||
|
and proxy_config
|
||||||
|
and proxy_config.use_forwarding_for_https
|
||||||
|
):
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Otherwise always use a tunnel.
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def create_proxy_ssl_context(
|
||||||
|
ssl_version, cert_reqs, ca_certs=None, ca_cert_dir=None, ca_cert_data=None
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Generates a default proxy ssl context if one hasn't been provided by the
|
||||||
|
user.
|
||||||
|
"""
|
||||||
|
ssl_context = create_urllib3_context(
|
||||||
|
ssl_version=resolve_ssl_version(ssl_version),
|
||||||
|
cert_reqs=resolve_cert_reqs(cert_reqs),
|
||||||
|
)
|
||||||
|
if (
|
||||||
|
not ca_certs
|
||||||
|
and not ca_cert_dir
|
||||||
|
and not ca_cert_data
|
||||||
|
and hasattr(ssl_context, "load_default_certs")
|
||||||
|
):
|
||||||
|
ssl_context.load_default_certs()
|
||||||
|
|
||||||
|
return ssl_context
|
|
@ -1,4 +1,5 @@
|
||||||
import collections
|
import collections
|
||||||
|
|
||||||
from ..packages import six
|
from ..packages import six
|
||||||
from ..packages.six.moves import queue
|
from ..packages.six.moves import queue
|
||||||
|
|
||||||
|
|
|
@ -1,8 +1,16 @@
|
||||||
from __future__ import absolute_import
|
from __future__ import absolute_import
|
||||||
|
|
||||||
from base64 import b64encode
|
from base64 import b64encode
|
||||||
|
|
||||||
from ..packages.six import b, integer_types
|
|
||||||
from ..exceptions import UnrewindableBodyError
|
from ..exceptions import UnrewindableBodyError
|
||||||
|
from ..packages.six import b, integer_types
|
||||||
|
|
||||||
|
# Pass as a value within ``headers`` to skip
|
||||||
|
# emitting some HTTP headers that are added automatically.
|
||||||
|
# The only headers that are supported are ``Accept-Encoding``,
|
||||||
|
# ``Host``, and ``User-Agent``.
|
||||||
|
SKIP_HEADER = "@@@SKIP_HEADER@@@"
|
||||||
|
SKIPPABLE_HEADERS = frozenset(["accept-encoding", "host", "user-agent"])
|
||||||
|
|
||||||
ACCEPT_ENCODING = "gzip,deflate"
|
ACCEPT_ENCODING = "gzip,deflate"
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -1,7 +1,9 @@
|
||||||
from __future__ import absolute_import
|
from __future__ import absolute_import
|
||||||
from ..packages.six.moves import http_client as httplib
|
|
||||||
|
from email.errors import MultipartInvariantViolationDefect, StartBoundaryNotFoundDefect
|
||||||
|
|
||||||
from ..exceptions import HeaderParsingError
|
from ..exceptions import HeaderParsingError
|
||||||
|
from ..packages.six.moves import http_client as httplib
|
||||||
|
|
||||||
|
|
||||||
def is_fp_closed(obj):
|
def is_fp_closed(obj):
|
||||||
|
@ -42,8 +44,7 @@ def assert_header_parsing(headers):
|
||||||
|
|
||||||
Only works on Python 3.
|
Only works on Python 3.
|
||||||
|
|
||||||
:param headers: Headers to verify.
|
:param http.client.HTTPMessage headers: Headers to verify.
|
||||||
:type headers: `httplib.HTTPMessage`.
|
|
||||||
|
|
||||||
:raises urllib3.exceptions.HeaderParsingError:
|
:raises urllib3.exceptions.HeaderParsingError:
|
||||||
If parsing errors are found.
|
If parsing errors are found.
|
||||||
|
@ -66,6 +67,25 @@ def assert_header_parsing(headers):
|
||||||
|
|
||||||
if isinstance(payload, (bytes, str)):
|
if isinstance(payload, (bytes, str)):
|
||||||
unparsed_data = payload
|
unparsed_data = payload
|
||||||
|
if defects:
|
||||||
|
# httplib is assuming a response body is available
|
||||||
|
# when parsing headers even when httplib only sends
|
||||||
|
# header data to parse_headers() This results in
|
||||||
|
# defects on multipart responses in particular.
|
||||||
|
# See: https://github.com/urllib3/urllib3/issues/800
|
||||||
|
|
||||||
|
# So we ignore the following defects:
|
||||||
|
# - StartBoundaryNotFoundDefect:
|
||||||
|
# The claimed start boundary was never found.
|
||||||
|
# - MultipartInvariantViolationDefect:
|
||||||
|
# A message claimed to be a multipart but no subparts were found.
|
||||||
|
defects = [
|
||||||
|
defect
|
||||||
|
for defect in defects
|
||||||
|
if not isinstance(
|
||||||
|
defect, (StartBoundaryNotFoundDefect, MultipartInvariantViolationDefect)
|
||||||
|
)
|
||||||
|
]
|
||||||
|
|
||||||
if defects or unparsed_data:
|
if defects or unparsed_data:
|
||||||
raise HeaderParsingError(defects=defects, unparsed_data=unparsed_data)
|
raise HeaderParsingError(defects=defects, unparsed_data=unparsed_data)
|
||||||
|
@ -76,8 +96,9 @@ def is_response_to_head(response):
|
||||||
Checks whether the request of a response has been a HEAD-request.
|
Checks whether the request of a response has been a HEAD-request.
|
||||||
Handles the quirks of AppEngine.
|
Handles the quirks of AppEngine.
|
||||||
|
|
||||||
:param conn:
|
:param http.client.HTTPResponse response:
|
||||||
:type conn: :class:`httplib.HTTPResponse`
|
Response to check if the originating request
|
||||||
|
used 'HEAD' as a method.
|
||||||
"""
|
"""
|
||||||
# FIXME: Can we do this somehow without accessing private httplib _method?
|
# FIXME: Can we do this somehow without accessing private httplib _method?
|
||||||
method = response._method
|
method = response._method
|
||||||
|
|
|
@ -1,23 +1,24 @@
|
||||||
from __future__ import absolute_import
|
from __future__ import absolute_import
|
||||||
import time
|
|
||||||
|
import email
|
||||||
import logging
|
import logging
|
||||||
|
import re
|
||||||
|
import time
|
||||||
|
import warnings
|
||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
from itertools import takewhile
|
from itertools import takewhile
|
||||||
import email
|
|
||||||
import re
|
|
||||||
|
|
||||||
from ..exceptions import (
|
from ..exceptions import (
|
||||||
ConnectTimeoutError,
|
ConnectTimeoutError,
|
||||||
|
InvalidHeader,
|
||||||
MaxRetryError,
|
MaxRetryError,
|
||||||
ProtocolError,
|
ProtocolError,
|
||||||
|
ProxyError,
|
||||||
ReadTimeoutError,
|
ReadTimeoutError,
|
||||||
ResponseError,
|
ResponseError,
|
||||||
InvalidHeader,
|
|
||||||
ProxyError,
|
|
||||||
)
|
)
|
||||||
from ..packages import six
|
from ..packages import six
|
||||||
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@ -27,8 +28,51 @@ RequestHistory = namedtuple(
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: In v2 we can remove this sentinel and metaclass with deprecated options.
|
||||||
|
_Default = object()
|
||||||
|
|
||||||
|
|
||||||
|
class _RetryMeta(type):
|
||||||
|
@property
|
||||||
|
def DEFAULT_METHOD_WHITELIST(cls):
|
||||||
|
warnings.warn(
|
||||||
|
"Using 'Retry.DEFAULT_METHOD_WHITELIST' is deprecated and "
|
||||||
|
"will be removed in v2.0. Use 'Retry.DEFAULT_METHODS_ALLOWED' instead",
|
||||||
|
DeprecationWarning,
|
||||||
|
)
|
||||||
|
return cls.DEFAULT_ALLOWED_METHODS
|
||||||
|
|
||||||
|
@DEFAULT_METHOD_WHITELIST.setter
|
||||||
|
def DEFAULT_METHOD_WHITELIST(cls, value):
|
||||||
|
warnings.warn(
|
||||||
|
"Using 'Retry.DEFAULT_METHOD_WHITELIST' is deprecated and "
|
||||||
|
"will be removed in v2.0. Use 'Retry.DEFAULT_ALLOWED_METHODS' instead",
|
||||||
|
DeprecationWarning,
|
||||||
|
)
|
||||||
|
cls.DEFAULT_ALLOWED_METHODS = value
|
||||||
|
|
||||||
|
@property
|
||||||
|
def DEFAULT_REDIRECT_HEADERS_BLACKLIST(cls):
|
||||||
|
warnings.warn(
|
||||||
|
"Using 'Retry.DEFAULT_REDIRECT_HEADERS_BLACKLIST' is deprecated and "
|
||||||
|
"will be removed in v2.0. Use 'Retry.DEFAULT_REMOVE_HEADERS_ON_REDIRECT' instead",
|
||||||
|
DeprecationWarning,
|
||||||
|
)
|
||||||
|
return cls.DEFAULT_REMOVE_HEADERS_ON_REDIRECT
|
||||||
|
|
||||||
|
@DEFAULT_REDIRECT_HEADERS_BLACKLIST.setter
|
||||||
|
def DEFAULT_REDIRECT_HEADERS_BLACKLIST(cls, value):
|
||||||
|
warnings.warn(
|
||||||
|
"Using 'Retry.DEFAULT_REDIRECT_HEADERS_BLACKLIST' is deprecated and "
|
||||||
|
"will be removed in v2.0. Use 'Retry.DEFAULT_REMOVE_HEADERS_ON_REDIRECT' instead",
|
||||||
|
DeprecationWarning,
|
||||||
|
)
|
||||||
|
cls.DEFAULT_REMOVE_HEADERS_ON_REDIRECT = value
|
||||||
|
|
||||||
|
|
||||||
|
@six.add_metaclass(_RetryMeta)
|
||||||
class Retry(object):
|
class Retry(object):
|
||||||
""" Retry configuration.
|
"""Retry configuration.
|
||||||
|
|
||||||
Each retry attempt will create a new Retry object with updated values, so
|
Each retry attempt will create a new Retry object with updated values, so
|
||||||
they can be safely reused.
|
they can be safely reused.
|
||||||
|
@ -54,8 +98,7 @@ class Retry(object):
|
||||||
Total number of retries to allow. Takes precedence over other counts.
|
Total number of retries to allow. Takes precedence over other counts.
|
||||||
|
|
||||||
Set to ``None`` to remove this constraint and fall back on other
|
Set to ``None`` to remove this constraint and fall back on other
|
||||||
counts. It's a good idea to set this to some sensibly-high value to
|
counts.
|
||||||
account for unexpected edge cases and avoid infinite retry loops.
|
|
||||||
|
|
||||||
Set to ``0`` to fail on the first retry.
|
Set to ``0`` to fail on the first retry.
|
||||||
|
|
||||||
|
@ -96,18 +139,35 @@ class Retry(object):
|
||||||
|
|
||||||
Set to ``0`` to fail on the first retry of this type.
|
Set to ``0`` to fail on the first retry of this type.
|
||||||
|
|
||||||
:param iterable method_whitelist:
|
:param int other:
|
||||||
|
How many times to retry on other errors.
|
||||||
|
|
||||||
|
Other errors are errors that are not connect, read, redirect or status errors.
|
||||||
|
These errors might be raised after the request was sent to the server, so the
|
||||||
|
request might have side-effects.
|
||||||
|
|
||||||
|
Set to ``0`` to fail on the first retry of this type.
|
||||||
|
|
||||||
|
If ``total`` is not set, it's a good idea to set this to 0 to account
|
||||||
|
for unexpected edge cases and avoid infinite retry loops.
|
||||||
|
|
||||||
|
:param iterable allowed_methods:
|
||||||
Set of uppercased HTTP method verbs that we should retry on.
|
Set of uppercased HTTP method verbs that we should retry on.
|
||||||
|
|
||||||
By default, we only retry on methods which are considered to be
|
By default, we only retry on methods which are considered to be
|
||||||
idempotent (multiple requests with the same parameters end with the
|
idempotent (multiple requests with the same parameters end with the
|
||||||
same state). See :attr:`Retry.DEFAULT_METHOD_WHITELIST`.
|
same state). See :attr:`Retry.DEFAULT_ALLOWED_METHODS`.
|
||||||
|
|
||||||
Set to a ``False`` value to retry on any verb.
|
Set to a ``False`` value to retry on any verb.
|
||||||
|
|
||||||
|
.. warning::
|
||||||
|
|
||||||
|
Previously this parameter was named ``method_whitelist``, that
|
||||||
|
usage is deprecated in v1.26.0 and will be removed in v2.0.
|
||||||
|
|
||||||
:param iterable status_forcelist:
|
:param iterable status_forcelist:
|
||||||
A set of integer HTTP status codes that we should force a retry on.
|
A set of integer HTTP status codes that we should force a retry on.
|
||||||
A retry is initiated if the request method is in ``method_whitelist``
|
A retry is initiated if the request method is in ``allowed_methods``
|
||||||
and the response status code is in ``status_forcelist``.
|
and the response status code is in ``status_forcelist``.
|
||||||
|
|
||||||
By default, this is disabled with ``None``.
|
By default, this is disabled with ``None``.
|
||||||
|
@ -148,13 +208,16 @@ class Retry(object):
|
||||||
request.
|
request.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
DEFAULT_METHOD_WHITELIST = frozenset(
|
#: Default methods to be used for ``allowed_methods``
|
||||||
|
DEFAULT_ALLOWED_METHODS = frozenset(
|
||||||
["HEAD", "GET", "PUT", "DELETE", "OPTIONS", "TRACE"]
|
["HEAD", "GET", "PUT", "DELETE", "OPTIONS", "TRACE"]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
#: Default status codes to be used for ``status_forcelist``
|
||||||
RETRY_AFTER_STATUS_CODES = frozenset([413, 429, 503])
|
RETRY_AFTER_STATUS_CODES = frozenset([413, 429, 503])
|
||||||
|
|
||||||
DEFAULT_REDIRECT_HEADERS_BLACKLIST = frozenset(["Authorization"])
|
#: Default headers to be used for ``remove_headers_on_redirect``
|
||||||
|
DEFAULT_REMOVE_HEADERS_ON_REDIRECT = frozenset(["Authorization"])
|
||||||
|
|
||||||
#: Maximum backoff time.
|
#: Maximum backoff time.
|
||||||
BACKOFF_MAX = 120
|
BACKOFF_MAX = 120
|
||||||
|
@ -166,20 +229,42 @@ class Retry(object):
|
||||||
read=None,
|
read=None,
|
||||||
redirect=None,
|
redirect=None,
|
||||||
status=None,
|
status=None,
|
||||||
method_whitelist=DEFAULT_METHOD_WHITELIST,
|
other=None,
|
||||||
|
allowed_methods=_Default,
|
||||||
status_forcelist=None,
|
status_forcelist=None,
|
||||||
backoff_factor=0,
|
backoff_factor=0,
|
||||||
raise_on_redirect=True,
|
raise_on_redirect=True,
|
||||||
raise_on_status=True,
|
raise_on_status=True,
|
||||||
history=None,
|
history=None,
|
||||||
respect_retry_after_header=True,
|
respect_retry_after_header=True,
|
||||||
remove_headers_on_redirect=DEFAULT_REDIRECT_HEADERS_BLACKLIST,
|
remove_headers_on_redirect=_Default,
|
||||||
|
# TODO: Deprecated, remove in v2.0
|
||||||
|
method_whitelist=_Default,
|
||||||
):
|
):
|
||||||
|
|
||||||
|
if method_whitelist is not _Default:
|
||||||
|
if allowed_methods is not _Default:
|
||||||
|
raise ValueError(
|
||||||
|
"Using both 'allowed_methods' and "
|
||||||
|
"'method_whitelist' together is not allowed. "
|
||||||
|
"Instead only use 'allowed_methods'"
|
||||||
|
)
|
||||||
|
warnings.warn(
|
||||||
|
"Using 'method_whitelist' with Retry is deprecated and "
|
||||||
|
"will be removed in v2.0. Use 'allowed_methods' instead",
|
||||||
|
DeprecationWarning,
|
||||||
|
)
|
||||||
|
allowed_methods = method_whitelist
|
||||||
|
if allowed_methods is _Default:
|
||||||
|
allowed_methods = self.DEFAULT_ALLOWED_METHODS
|
||||||
|
if remove_headers_on_redirect is _Default:
|
||||||
|
remove_headers_on_redirect = self.DEFAULT_REMOVE_HEADERS_ON_REDIRECT
|
||||||
|
|
||||||
self.total = total
|
self.total = total
|
||||||
self.connect = connect
|
self.connect = connect
|
||||||
self.read = read
|
self.read = read
|
||||||
self.status = status
|
self.status = status
|
||||||
|
self.other = other
|
||||||
|
|
||||||
if redirect is False or total is False:
|
if redirect is False or total is False:
|
||||||
redirect = 0
|
redirect = 0
|
||||||
|
@ -187,7 +272,7 @@ class Retry(object):
|
||||||
|
|
||||||
self.redirect = redirect
|
self.redirect = redirect
|
||||||
self.status_forcelist = status_forcelist or set()
|
self.status_forcelist = status_forcelist or set()
|
||||||
self.method_whitelist = method_whitelist
|
self.allowed_methods = allowed_methods
|
||||||
self.backoff_factor = backoff_factor
|
self.backoff_factor = backoff_factor
|
||||||
self.raise_on_redirect = raise_on_redirect
|
self.raise_on_redirect = raise_on_redirect
|
||||||
self.raise_on_status = raise_on_status
|
self.raise_on_status = raise_on_status
|
||||||
|
@ -204,7 +289,7 @@ class Retry(object):
|
||||||
read=self.read,
|
read=self.read,
|
||||||
redirect=self.redirect,
|
redirect=self.redirect,
|
||||||
status=self.status,
|
status=self.status,
|
||||||
method_whitelist=self.method_whitelist,
|
other=self.other,
|
||||||
status_forcelist=self.status_forcelist,
|
status_forcelist=self.status_forcelist,
|
||||||
backoff_factor=self.backoff_factor,
|
backoff_factor=self.backoff_factor,
|
||||||
raise_on_redirect=self.raise_on_redirect,
|
raise_on_redirect=self.raise_on_redirect,
|
||||||
|
@ -213,6 +298,23 @@ class Retry(object):
|
||||||
remove_headers_on_redirect=self.remove_headers_on_redirect,
|
remove_headers_on_redirect=self.remove_headers_on_redirect,
|
||||||
respect_retry_after_header=self.respect_retry_after_header,
|
respect_retry_after_header=self.respect_retry_after_header,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# TODO: If already given in **kw we use what's given to us
|
||||||
|
# If not given we need to figure out what to pass. We decide
|
||||||
|
# based on whether our class has the 'method_whitelist' property
|
||||||
|
# and if so we pass the deprecated 'method_whitelist' otherwise
|
||||||
|
# we use 'allowed_methods'. Remove in v2.0
|
||||||
|
if "method_whitelist" not in kw and "allowed_methods" not in kw:
|
||||||
|
if "method_whitelist" in self.__dict__:
|
||||||
|
warnings.warn(
|
||||||
|
"Using 'method_whitelist' with Retry is deprecated and "
|
||||||
|
"will be removed in v2.0. Use 'allowed_methods' instead",
|
||||||
|
DeprecationWarning,
|
||||||
|
)
|
||||||
|
params["method_whitelist"] = self.allowed_methods
|
||||||
|
else:
|
||||||
|
params["allowed_methods"] = self.allowed_methods
|
||||||
|
|
||||||
params.update(kw)
|
params.update(kw)
|
||||||
return type(self)(**params)
|
return type(self)(**params)
|
||||||
|
|
||||||
|
@ -231,7 +333,7 @@ class Retry(object):
|
||||||
return new_retries
|
return new_retries
|
||||||
|
|
||||||
def get_backoff_time(self):
|
def get_backoff_time(self):
|
||||||
""" Formula for computing the current backoff
|
"""Formula for computing the current backoff
|
||||||
|
|
||||||
:rtype: float
|
:rtype: float
|
||||||
"""
|
"""
|
||||||
|
@ -252,10 +354,17 @@ class Retry(object):
|
||||||
if re.match(r"^\s*[0-9]+\s*$", retry_after):
|
if re.match(r"^\s*[0-9]+\s*$", retry_after):
|
||||||
seconds = int(retry_after)
|
seconds = int(retry_after)
|
||||||
else:
|
else:
|
||||||
retry_date_tuple = email.utils.parsedate(retry_after)
|
retry_date_tuple = email.utils.parsedate_tz(retry_after)
|
||||||
if retry_date_tuple is None:
|
if retry_date_tuple is None:
|
||||||
raise InvalidHeader("Invalid Retry-After header: %s" % retry_after)
|
raise InvalidHeader("Invalid Retry-After header: %s" % retry_after)
|
||||||
retry_date = time.mktime(retry_date_tuple)
|
if retry_date_tuple[9] is None: # Python 2
|
||||||
|
# Assume UTC if no timezone was specified
|
||||||
|
# On Python2.7, parsedate_tz returns None for a timezone offset
|
||||||
|
# instead of 0 if no timezone is given, where mktime_tz treats
|
||||||
|
# a None timezone offset as local time.
|
||||||
|
retry_date_tuple = retry_date_tuple[:9] + (0,) + retry_date_tuple[10:]
|
||||||
|
|
||||||
|
retry_date = email.utils.mktime_tz(retry_date_tuple)
|
||||||
seconds = retry_date - time.time()
|
seconds = retry_date - time.time()
|
||||||
|
|
||||||
if seconds < 0:
|
if seconds < 0:
|
||||||
|
@ -288,7 +397,7 @@ class Retry(object):
|
||||||
time.sleep(backoff)
|
time.sleep(backoff)
|
||||||
|
|
||||||
def sleep(self, response=None):
|
def sleep(self, response=None):
|
||||||
""" Sleep between retry attempts.
|
"""Sleep between retry attempts.
|
||||||
|
|
||||||
This method will respect a server's ``Retry-After`` response header
|
This method will respect a server's ``Retry-After`` response header
|
||||||
and sleep the duration of the time requested. If that is not present, it
|
and sleep the duration of the time requested. If that is not present, it
|
||||||
|
@ -304,7 +413,7 @@ class Retry(object):
|
||||||
self._sleep_backoff()
|
self._sleep_backoff()
|
||||||
|
|
||||||
def _is_connection_error(self, err):
|
def _is_connection_error(self, err):
|
||||||
""" Errors when we're fairly sure that the server did not receive the
|
"""Errors when we're fairly sure that the server did not receive the
|
||||||
request, so it should be safe to retry.
|
request, so it should be safe to retry.
|
||||||
"""
|
"""
|
||||||
if isinstance(err, ProxyError):
|
if isinstance(err, ProxyError):
|
||||||
|
@ -312,22 +421,33 @@ class Retry(object):
|
||||||
return isinstance(err, ConnectTimeoutError)
|
return isinstance(err, ConnectTimeoutError)
|
||||||
|
|
||||||
def _is_read_error(self, err):
|
def _is_read_error(self, err):
|
||||||
""" Errors that occur after the request has been started, so we should
|
"""Errors that occur after the request has been started, so we should
|
||||||
assume that the server began processing it.
|
assume that the server began processing it.
|
||||||
"""
|
"""
|
||||||
return isinstance(err, (ReadTimeoutError, ProtocolError))
|
return isinstance(err, (ReadTimeoutError, ProtocolError))
|
||||||
|
|
||||||
def _is_method_retryable(self, method):
|
def _is_method_retryable(self, method):
|
||||||
""" Checks if a given HTTP method should be retried upon, depending if
|
"""Checks if a given HTTP method should be retried upon, depending if
|
||||||
it is included on the method whitelist.
|
it is included in the allowed_methods
|
||||||
"""
|
"""
|
||||||
if self.method_whitelist and method.upper() not in self.method_whitelist:
|
# TODO: For now favor if the Retry implementation sets its own method_whitelist
|
||||||
return False
|
# property outside of our constructor to avoid breaking custom implementations.
|
||||||
|
if "method_whitelist" in self.__dict__:
|
||||||
|
warnings.warn(
|
||||||
|
"Using 'method_whitelist' with Retry is deprecated and "
|
||||||
|
"will be removed in v2.0. Use 'allowed_methods' instead",
|
||||||
|
DeprecationWarning,
|
||||||
|
)
|
||||||
|
allowed_methods = self.method_whitelist
|
||||||
|
else:
|
||||||
|
allowed_methods = self.allowed_methods
|
||||||
|
|
||||||
|
if allowed_methods and method.upper() not in allowed_methods:
|
||||||
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def is_retry(self, method, status_code, has_retry_after=False):
|
def is_retry(self, method, status_code, has_retry_after=False):
|
||||||
""" Is this method/status code retryable? (Based on whitelists and control
|
"""Is this method/status code retryable? (Based on allowlists and control
|
||||||
variables such as the number of total retries to allow, whether to
|
variables such as the number of total retries to allow, whether to
|
||||||
respect the Retry-After header, whether this header is present, and
|
respect the Retry-After header, whether this header is present, and
|
||||||
whether the returned status code is on the list of status codes to
|
whether the returned status code is on the list of status codes to
|
||||||
|
@ -348,7 +468,14 @@ class Retry(object):
|
||||||
|
|
||||||
def is_exhausted(self):
|
def is_exhausted(self):
|
||||||
""" Are we out of retries? """
|
""" Are we out of retries? """
|
||||||
retry_counts = (self.total, self.connect, self.read, self.redirect, self.status)
|
retry_counts = (
|
||||||
|
self.total,
|
||||||
|
self.connect,
|
||||||
|
self.read,
|
||||||
|
self.redirect,
|
||||||
|
self.status,
|
||||||
|
self.other,
|
||||||
|
)
|
||||||
retry_counts = list(filter(None, retry_counts))
|
retry_counts = list(filter(None, retry_counts))
|
||||||
if not retry_counts:
|
if not retry_counts:
|
||||||
return False
|
return False
|
||||||
|
@ -364,7 +491,7 @@ class Retry(object):
|
||||||
_pool=None,
|
_pool=None,
|
||||||
_stacktrace=None,
|
_stacktrace=None,
|
||||||
):
|
):
|
||||||
""" Return a new Retry object with incremented retry counters.
|
"""Return a new Retry object with incremented retry counters.
|
||||||
|
|
||||||
:param response: A response object, or None, if the server did not
|
:param response: A response object, or None, if the server did not
|
||||||
return a response.
|
return a response.
|
||||||
|
@ -386,6 +513,7 @@ class Retry(object):
|
||||||
read = self.read
|
read = self.read
|
||||||
redirect = self.redirect
|
redirect = self.redirect
|
||||||
status_count = self.status
|
status_count = self.status
|
||||||
|
other = self.other
|
||||||
cause = "unknown"
|
cause = "unknown"
|
||||||
status = None
|
status = None
|
||||||
redirect_location = None
|
redirect_location = None
|
||||||
|
@ -404,6 +532,11 @@ class Retry(object):
|
||||||
elif read is not None:
|
elif read is not None:
|
||||||
read -= 1
|
read -= 1
|
||||||
|
|
||||||
|
elif error:
|
||||||
|
# Other retry?
|
||||||
|
if other is not None:
|
||||||
|
other -= 1
|
||||||
|
|
||||||
elif response and response.get_redirect_location():
|
elif response and response.get_redirect_location():
|
||||||
# Redirect retry?
|
# Redirect retry?
|
||||||
if redirect is not None:
|
if redirect is not None:
|
||||||
|
@ -414,7 +547,7 @@ class Retry(object):
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# Incrementing because of a server error like a 500 in
|
# Incrementing because of a server error like a 500 in
|
||||||
# status_forcelist and a the given method is in the whitelist
|
# status_forcelist and the given method is in the allowed_methods
|
||||||
cause = ResponseError.GENERIC_ERROR
|
cause = ResponseError.GENERIC_ERROR
|
||||||
if response and response.status:
|
if response and response.status:
|
||||||
if status_count is not None:
|
if status_count is not None:
|
||||||
|
@ -432,6 +565,7 @@ class Retry(object):
|
||||||
read=read,
|
read=read,
|
||||||
redirect=redirect,
|
redirect=redirect,
|
||||||
status=status_count,
|
status=status_count,
|
||||||
|
other=other,
|
||||||
history=history,
|
history=history,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -448,6 +582,20 @@ class Retry(object):
|
||||||
"read={self.read}, redirect={self.redirect}, status={self.status})"
|
"read={self.read}, redirect={self.redirect}, status={self.status})"
|
||||||
).format(cls=type(self), self=self)
|
).format(cls=type(self), self=self)
|
||||||
|
|
||||||
|
def __getattr__(self, item):
|
||||||
|
if item == "method_whitelist":
|
||||||
|
# TODO: Remove this deprecated alias in v2.0
|
||||||
|
warnings.warn(
|
||||||
|
"Using 'method_whitelist' with Retry is deprecated and "
|
||||||
|
"will be removed in v2.0. Use 'allowed_methods' instead",
|
||||||
|
DeprecationWarning,
|
||||||
|
)
|
||||||
|
return self.allowed_methods
|
||||||
|
try:
|
||||||
|
return getattr(super(Retry, self), item)
|
||||||
|
except AttributeError:
|
||||||
|
return getattr(Retry, item)
|
||||||
|
|
||||||
|
|
||||||
# For backwards compatibility (equivalent to pre-v1.9):
|
# For backwards compatibility (equivalent to pre-v1.9):
|
||||||
Retry.DEFAULT = Retry(3)
|
Retry.DEFAULT = Retry(3)
|
||||||
|
|
|
@ -1,21 +1,27 @@
|
||||||
from __future__ import absolute_import
|
from __future__ import absolute_import
|
||||||
import errno
|
|
||||||
import warnings
|
|
||||||
import hmac
|
|
||||||
import sys
|
|
||||||
|
|
||||||
|
import hmac
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import warnings
|
||||||
from binascii import hexlify, unhexlify
|
from binascii import hexlify, unhexlify
|
||||||
from hashlib import md5, sha1, sha256
|
from hashlib import md5, sha1, sha256
|
||||||
|
|
||||||
from .url import IPV4_RE, BRACELESS_IPV6_ADDRZ_RE
|
from ..exceptions import (
|
||||||
from ..exceptions import SSLError, InsecurePlatformWarning, SNIMissingWarning
|
InsecurePlatformWarning,
|
||||||
|
ProxySchemeUnsupported,
|
||||||
|
SNIMissingWarning,
|
||||||
|
SSLError,
|
||||||
|
)
|
||||||
from ..packages import six
|
from ..packages import six
|
||||||
|
from .url import BRACELESS_IPV6_ADDRZ_RE, IPV4_RE
|
||||||
|
|
||||||
SSLContext = None
|
SSLContext = None
|
||||||
|
SSLTransport = None
|
||||||
HAS_SNI = False
|
HAS_SNI = False
|
||||||
IS_PYOPENSSL = False
|
IS_PYOPENSSL = False
|
||||||
IS_SECURETRANSPORT = False
|
IS_SECURETRANSPORT = False
|
||||||
|
ALPN_PROTOCOLS = ["http/1.1"]
|
||||||
|
|
||||||
# Maps the length of a digest to a possible hash function producing this digest
|
# Maps the length of a digest to a possible hash function producing this digest
|
||||||
HASHFUNC_MAP = {32: md5, 40: sha1, 64: sha256}
|
HASHFUNC_MAP = {32: md5, 40: sha1, 64: sha256}
|
||||||
|
@ -29,8 +35,8 @@ def _const_compare_digest_backport(a, b):
|
||||||
Returns True if the digests match, and False otherwise.
|
Returns True if the digests match, and False otherwise.
|
||||||
"""
|
"""
|
||||||
result = abs(len(a) - len(b))
|
result = abs(len(a) - len(b))
|
||||||
for l, r in zip(bytearray(a), bytearray(b)):
|
for left, right in zip(bytearray(a), bytearray(b)):
|
||||||
result |= l ^ r
|
result |= left ^ right
|
||||||
return result == 0
|
return result == 0
|
||||||
|
|
||||||
|
|
||||||
|
@ -38,11 +44,21 @@ _const_compare_digest = getattr(hmac, "compare_digest", _const_compare_digest_ba
|
||||||
|
|
||||||
try: # Test for SSL features
|
try: # Test for SSL features
|
||||||
import ssl
|
import ssl
|
||||||
from ssl import wrap_socket, CERT_REQUIRED
|
from ssl import CERT_REQUIRED, wrap_socket
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
from ssl import HAS_SNI # Has SNI?
|
from ssl import HAS_SNI # Has SNI?
|
||||||
except ImportError:
|
except ImportError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
from .ssltransport import SSLTransport
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
try: # Platform-specific: Python 3.6
|
try: # Platform-specific: Python 3.6
|
||||||
from ssl import PROTOCOL_TLS
|
from ssl import PROTOCOL_TLS
|
||||||
|
|
||||||
|
@ -57,12 +73,18 @@ except ImportError:
|
||||||
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from ssl import OP_NO_SSLv2, OP_NO_SSLv3, OP_NO_COMPRESSION
|
from ssl import OP_NO_COMPRESSION, OP_NO_SSLv2, OP_NO_SSLv3
|
||||||
except ImportError:
|
except ImportError:
|
||||||
OP_NO_SSLv2, OP_NO_SSLv3 = 0x1000000, 0x2000000
|
OP_NO_SSLv2, OP_NO_SSLv3 = 0x1000000, 0x2000000
|
||||||
OP_NO_COMPRESSION = 0x20000
|
OP_NO_COMPRESSION = 0x20000
|
||||||
|
|
||||||
|
|
||||||
|
try: # OP_NO_TICKET was added in Python 3.6
|
||||||
|
from ssl import OP_NO_TICKET
|
||||||
|
except ImportError:
|
||||||
|
OP_NO_TICKET = 0x4000
|
||||||
|
|
||||||
|
|
||||||
# A secure default.
|
# A secure default.
|
||||||
# Sources for more information on TLS ciphers:
|
# Sources for more information on TLS ciphers:
|
||||||
#
|
#
|
||||||
|
@ -249,7 +271,7 @@ def create_urllib3_context(
|
||||||
``ssl.CERT_REQUIRED``.
|
``ssl.CERT_REQUIRED``.
|
||||||
:param options:
|
:param options:
|
||||||
Specific OpenSSL options. These default to ``ssl.OP_NO_SSLv2``,
|
Specific OpenSSL options. These default to ``ssl.OP_NO_SSLv2``,
|
||||||
``ssl.OP_NO_SSLv3``, ``ssl.OP_NO_COMPRESSION``.
|
``ssl.OP_NO_SSLv3``, ``ssl.OP_NO_COMPRESSION``, and ``ssl.OP_NO_TICKET``.
|
||||||
:param ciphers:
|
:param ciphers:
|
||||||
Which cipher suites to allow the server to select.
|
Which cipher suites to allow the server to select.
|
||||||
:returns:
|
:returns:
|
||||||
|
@ -272,6 +294,11 @@ def create_urllib3_context(
|
||||||
# Disable compression to prevent CRIME attacks for OpenSSL 1.0+
|
# Disable compression to prevent CRIME attacks for OpenSSL 1.0+
|
||||||
# (issue #309)
|
# (issue #309)
|
||||||
options |= OP_NO_COMPRESSION
|
options |= OP_NO_COMPRESSION
|
||||||
|
# TLSv1.2 only. Unless set explicitly, do not request tickets.
|
||||||
|
# This may save some bandwidth on wire, and although the ticket is encrypted,
|
||||||
|
# there is a risk associated with it being on wire,
|
||||||
|
# if the server is not rotating its ticketing keys properly.
|
||||||
|
options |= OP_NO_TICKET
|
||||||
|
|
||||||
context.options |= options
|
context.options |= options
|
||||||
|
|
||||||
|
@ -293,6 +320,14 @@ def create_urllib3_context(
|
||||||
# We do our own verification, including fingerprints and alternative
|
# We do our own verification, including fingerprints and alternative
|
||||||
# hostnames. So disable it here
|
# hostnames. So disable it here
|
||||||
context.check_hostname = False
|
context.check_hostname = False
|
||||||
|
|
||||||
|
# Enable logging of TLS session keys via defacto standard environment variable
|
||||||
|
# 'SSLKEYLOGFILE', if the feature is available (Python 3.8+). Skip empty values.
|
||||||
|
if hasattr(context, "keylog_filename"):
|
||||||
|
sslkeylogfile = os.environ.get("SSLKEYLOGFILE")
|
||||||
|
if sslkeylogfile:
|
||||||
|
context.keylog_filename = sslkeylogfile
|
||||||
|
|
||||||
return context
|
return context
|
||||||
|
|
||||||
|
|
||||||
|
@ -309,6 +344,7 @@ def ssl_wrap_socket(
|
||||||
ca_cert_dir=None,
|
ca_cert_dir=None,
|
||||||
key_password=None,
|
key_password=None,
|
||||||
ca_cert_data=None,
|
ca_cert_data=None,
|
||||||
|
tls_in_tls=False,
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
All arguments except for server_hostname, ssl_context, and ca_cert_dir have
|
All arguments except for server_hostname, ssl_context, and ca_cert_dir have
|
||||||
|
@ -330,6 +366,8 @@ def ssl_wrap_socket(
|
||||||
:param ca_cert_data:
|
:param ca_cert_data:
|
||||||
Optional string containing CA certificates in PEM format suitable for
|
Optional string containing CA certificates in PEM format suitable for
|
||||||
passing as the cadata parameter to SSLContext.load_verify_locations()
|
passing as the cadata parameter to SSLContext.load_verify_locations()
|
||||||
|
:param tls_in_tls:
|
||||||
|
Use SSLTransport to wrap the existing socket.
|
||||||
"""
|
"""
|
||||||
context = ssl_context
|
context = ssl_context
|
||||||
if context is None:
|
if context is None:
|
||||||
|
@ -341,14 +379,8 @@ def ssl_wrap_socket(
|
||||||
if ca_certs or ca_cert_dir or ca_cert_data:
|
if ca_certs or ca_cert_dir or ca_cert_data:
|
||||||
try:
|
try:
|
||||||
context.load_verify_locations(ca_certs, ca_cert_dir, ca_cert_data)
|
context.load_verify_locations(ca_certs, ca_cert_dir, ca_cert_data)
|
||||||
except IOError as e: # Platform-specific: Python 2.7
|
except (IOError, OSError) as e:
|
||||||
raise SSLError(e)
|
raise SSLError(e)
|
||||||
# Py33 raises FileNotFoundError which subclasses OSError
|
|
||||||
# These are not equivalent unless we check the errno attribute
|
|
||||||
except OSError as e: # Platform-specific: Python 3.3 and beyond
|
|
||||||
if e.errno == errno.ENOENT:
|
|
||||||
raise SSLError(e)
|
|
||||||
raise
|
|
||||||
|
|
||||||
elif ssl_context is None and hasattr(context, "load_default_certs"):
|
elif ssl_context is None and hasattr(context, "load_default_certs"):
|
||||||
# try to load OS default certs; works well on Windows (require Python3.4+)
|
# try to load OS default certs; works well on Windows (require Python3.4+)
|
||||||
|
@ -366,16 +398,21 @@ def ssl_wrap_socket(
|
||||||
else:
|
else:
|
||||||
context.load_cert_chain(certfile, keyfile, key_password)
|
context.load_cert_chain(certfile, keyfile, key_password)
|
||||||
|
|
||||||
|
try:
|
||||||
|
if hasattr(context, "set_alpn_protocols"):
|
||||||
|
context.set_alpn_protocols(ALPN_PROTOCOLS)
|
||||||
|
except NotImplementedError:
|
||||||
|
pass
|
||||||
|
|
||||||
# If we detect server_hostname is an IP address then the SNI
|
# If we detect server_hostname is an IP address then the SNI
|
||||||
# extension should not be used according to RFC3546 Section 3.1
|
# extension should not be used according to RFC3546 Section 3.1
|
||||||
# We shouldn't warn the user if SNI isn't available but we would
|
use_sni_hostname = server_hostname and not is_ipaddress(server_hostname)
|
||||||
# not be using SNI anyways due to IP address for server_hostname.
|
# SecureTransport uses server_hostname in certificate verification.
|
||||||
if (
|
send_sni = (use_sni_hostname and HAS_SNI) or (
|
||||||
server_hostname is not None and not is_ipaddress(server_hostname)
|
IS_SECURETRANSPORT and server_hostname
|
||||||
) or IS_SECURETRANSPORT:
|
)
|
||||||
if HAS_SNI and server_hostname is not None:
|
# Do not warn the user if server_hostname is an invalid SNI hostname.
|
||||||
return context.wrap_socket(sock, server_hostname=server_hostname)
|
if not HAS_SNI and use_sni_hostname:
|
||||||
|
|
||||||
warnings.warn(
|
warnings.warn(
|
||||||
"An HTTPS request has been made, but the SNI (Server Name "
|
"An HTTPS request has been made, but the SNI (Server Name "
|
||||||
"Indication) extension to TLS is not available on this platform. "
|
"Indication) extension to TLS is not available on this platform. "
|
||||||
|
@ -387,7 +424,13 @@ def ssl_wrap_socket(
|
||||||
SNIMissingWarning,
|
SNIMissingWarning,
|
||||||
)
|
)
|
||||||
|
|
||||||
return context.wrap_socket(sock)
|
if send_sni:
|
||||||
|
ssl_sock = _ssl_wrap_socket_impl(
|
||||||
|
sock, context, tls_in_tls, server_hostname=server_hostname
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
ssl_sock = _ssl_wrap_socket_impl(sock, context, tls_in_tls)
|
||||||
|
return ssl_sock
|
||||||
|
|
||||||
|
|
||||||
def is_ipaddress(hostname):
|
def is_ipaddress(hostname):
|
||||||
|
@ -412,3 +455,20 @@ def _is_key_file_encrypted(key_file):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def _ssl_wrap_socket_impl(sock, ssl_context, tls_in_tls, server_hostname=None):
|
||||||
|
if tls_in_tls:
|
||||||
|
if not SSLTransport:
|
||||||
|
# Import error, ssl is not available.
|
||||||
|
raise ProxySchemeUnsupported(
|
||||||
|
"TLS in TLS requires support for the 'ssl' module"
|
||||||
|
)
|
||||||
|
|
||||||
|
SSLTransport._validate_ssl_context_for_tls_in_tls(ssl_context)
|
||||||
|
return SSLTransport(sock, ssl_context, server_hostname)
|
||||||
|
|
||||||
|
if server_hostname:
|
||||||
|
return ssl_context.wrap_socket(sock, server_hostname=server_hostname)
|
||||||
|
else:
|
||||||
|
return ssl_context.wrap_socket(sock)
|
||||||
|
|
221
src/pip/_vendor/urllib3/util/ssltransport.py
Normal file
221
src/pip/_vendor/urllib3/util/ssltransport.py
Normal file
|
@ -0,0 +1,221 @@
|
||||||
|
import io
|
||||||
|
import socket
|
||||||
|
import ssl
|
||||||
|
|
||||||
|
from pip._vendor.urllib3.exceptions import ProxySchemeUnsupported
|
||||||
|
from pip._vendor.urllib3.packages import six
|
||||||
|
|
||||||
|
SSL_BLOCKSIZE = 16384
|
||||||
|
|
||||||
|
|
||||||
|
class SSLTransport:
|
||||||
|
"""
|
||||||
|
The SSLTransport wraps an existing socket and establishes an SSL connection.
|
||||||
|
|
||||||
|
Contrary to Python's implementation of SSLSocket, it allows you to chain
|
||||||
|
multiple TLS connections together. It's particularly useful if you need to
|
||||||
|
implement TLS within TLS.
|
||||||
|
|
||||||
|
The class supports most of the socket API operations.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _validate_ssl_context_for_tls_in_tls(ssl_context):
|
||||||
|
"""
|
||||||
|
Raises a ProxySchemeUnsupported if the provided ssl_context can't be used
|
||||||
|
for TLS in TLS.
|
||||||
|
|
||||||
|
The only requirement is that the ssl_context provides the 'wrap_bio'
|
||||||
|
methods.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if not hasattr(ssl_context, "wrap_bio"):
|
||||||
|
if six.PY2:
|
||||||
|
raise ProxySchemeUnsupported(
|
||||||
|
"TLS in TLS requires SSLContext.wrap_bio() which isn't "
|
||||||
|
"supported on Python 2"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
raise ProxySchemeUnsupported(
|
||||||
|
"TLS in TLS requires SSLContext.wrap_bio() which isn't "
|
||||||
|
"available on non-native SSLContext"
|
||||||
|
)
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self, socket, ssl_context, server_hostname=None, suppress_ragged_eofs=True
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Create an SSLTransport around socket using the provided ssl_context.
|
||||||
|
"""
|
||||||
|
self.incoming = ssl.MemoryBIO()
|
||||||
|
self.outgoing = ssl.MemoryBIO()
|
||||||
|
|
||||||
|
self.suppress_ragged_eofs = suppress_ragged_eofs
|
||||||
|
self.socket = socket
|
||||||
|
|
||||||
|
self.sslobj = ssl_context.wrap_bio(
|
||||||
|
self.incoming, self.outgoing, server_hostname=server_hostname
|
||||||
|
)
|
||||||
|
|
||||||
|
# Perform initial handshake.
|
||||||
|
self._ssl_io_loop(self.sslobj.do_handshake)
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __exit__(self, *_):
|
||||||
|
self.close()
|
||||||
|
|
||||||
|
def fileno(self):
|
||||||
|
return self.socket.fileno()
|
||||||
|
|
||||||
|
def read(self, len=1024, buffer=None):
|
||||||
|
return self._wrap_ssl_read(len, buffer)
|
||||||
|
|
||||||
|
def recv(self, len=1024, flags=0):
|
||||||
|
if flags != 0:
|
||||||
|
raise ValueError("non-zero flags not allowed in calls to recv")
|
||||||
|
return self._wrap_ssl_read(len)
|
||||||
|
|
||||||
|
def recv_into(self, buffer, nbytes=None, flags=0):
|
||||||
|
if flags != 0:
|
||||||
|
raise ValueError("non-zero flags not allowed in calls to recv_into")
|
||||||
|
if buffer and (nbytes is None):
|
||||||
|
nbytes = len(buffer)
|
||||||
|
elif nbytes is None:
|
||||||
|
nbytes = 1024
|
||||||
|
return self.read(nbytes, buffer)
|
||||||
|
|
||||||
|
def sendall(self, data, flags=0):
|
||||||
|
if flags != 0:
|
||||||
|
raise ValueError("non-zero flags not allowed in calls to sendall")
|
||||||
|
count = 0
|
||||||
|
with memoryview(data) as view, view.cast("B") as byte_view:
|
||||||
|
amount = len(byte_view)
|
||||||
|
while count < amount:
|
||||||
|
v = self.send(byte_view[count:])
|
||||||
|
count += v
|
||||||
|
|
||||||
|
def send(self, data, flags=0):
|
||||||
|
if flags != 0:
|
||||||
|
raise ValueError("non-zero flags not allowed in calls to send")
|
||||||
|
response = self._ssl_io_loop(self.sslobj.write, data)
|
||||||
|
return response
|
||||||
|
|
||||||
|
def makefile(
|
||||||
|
self, mode="r", buffering=None, encoding=None, errors=None, newline=None
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Python's httpclient uses makefile and buffered io when reading HTTP
|
||||||
|
messages and we need to support it.
|
||||||
|
|
||||||
|
This is unfortunately a copy and paste of socket.py makefile with small
|
||||||
|
changes to point to the socket directly.
|
||||||
|
"""
|
||||||
|
if not set(mode) <= {"r", "w", "b"}:
|
||||||
|
raise ValueError("invalid mode %r (only r, w, b allowed)" % (mode,))
|
||||||
|
|
||||||
|
writing = "w" in mode
|
||||||
|
reading = "r" in mode or not writing
|
||||||
|
assert reading or writing
|
||||||
|
binary = "b" in mode
|
||||||
|
rawmode = ""
|
||||||
|
if reading:
|
||||||
|
rawmode += "r"
|
||||||
|
if writing:
|
||||||
|
rawmode += "w"
|
||||||
|
raw = socket.SocketIO(self, rawmode)
|
||||||
|
self.socket._io_refs += 1
|
||||||
|
if buffering is None:
|
||||||
|
buffering = -1
|
||||||
|
if buffering < 0:
|
||||||
|
buffering = io.DEFAULT_BUFFER_SIZE
|
||||||
|
if buffering == 0:
|
||||||
|
if not binary:
|
||||||
|
raise ValueError("unbuffered streams must be binary")
|
||||||
|
return raw
|
||||||
|
if reading and writing:
|
||||||
|
buffer = io.BufferedRWPair(raw, raw, buffering)
|
||||||
|
elif reading:
|
||||||
|
buffer = io.BufferedReader(raw, buffering)
|
||||||
|
else:
|
||||||
|
assert writing
|
||||||
|
buffer = io.BufferedWriter(raw, buffering)
|
||||||
|
if binary:
|
||||||
|
return buffer
|
||||||
|
text = io.TextIOWrapper(buffer, encoding, errors, newline)
|
||||||
|
text.mode = mode
|
||||||
|
return text
|
||||||
|
|
||||||
|
def unwrap(self):
|
||||||
|
self._ssl_io_loop(self.sslobj.unwrap)
|
||||||
|
|
||||||
|
def close(self):
|
||||||
|
self.socket.close()
|
||||||
|
|
||||||
|
def getpeercert(self, binary_form=False):
|
||||||
|
return self.sslobj.getpeercert(binary_form)
|
||||||
|
|
||||||
|
def version(self):
|
||||||
|
return self.sslobj.version()
|
||||||
|
|
||||||
|
def cipher(self):
|
||||||
|
return self.sslobj.cipher()
|
||||||
|
|
||||||
|
def selected_alpn_protocol(self):
|
||||||
|
return self.sslobj.selected_alpn_protocol()
|
||||||
|
|
||||||
|
def selected_npn_protocol(self):
|
||||||
|
return self.sslobj.selected_npn_protocol()
|
||||||
|
|
||||||
|
def shared_ciphers(self):
|
||||||
|
return self.sslobj.shared_ciphers()
|
||||||
|
|
||||||
|
def compression(self):
|
||||||
|
return self.sslobj.compression()
|
||||||
|
|
||||||
|
def settimeout(self, value):
|
||||||
|
self.socket.settimeout(value)
|
||||||
|
|
||||||
|
def gettimeout(self):
|
||||||
|
return self.socket.gettimeout()
|
||||||
|
|
||||||
|
def _decref_socketios(self):
|
||||||
|
self.socket._decref_socketios()
|
||||||
|
|
||||||
|
def _wrap_ssl_read(self, len, buffer=None):
|
||||||
|
try:
|
||||||
|
return self._ssl_io_loop(self.sslobj.read, len, buffer)
|
||||||
|
except ssl.SSLError as e:
|
||||||
|
if e.errno == ssl.SSL_ERROR_EOF and self.suppress_ragged_eofs:
|
||||||
|
return 0 # eof, return 0.
|
||||||
|
else:
|
||||||
|
raise
|
||||||
|
|
||||||
|
def _ssl_io_loop(self, func, *args):
|
||||||
|
""" Performs an I/O loop between incoming/outgoing and the socket."""
|
||||||
|
should_loop = True
|
||||||
|
ret = None
|
||||||
|
|
||||||
|
while should_loop:
|
||||||
|
errno = None
|
||||||
|
try:
|
||||||
|
ret = func(*args)
|
||||||
|
except ssl.SSLError as e:
|
||||||
|
if e.errno not in (ssl.SSL_ERROR_WANT_READ, ssl.SSL_ERROR_WANT_WRITE):
|
||||||
|
# WANT_READ, and WANT_WRITE are expected, others are not.
|
||||||
|
raise e
|
||||||
|
errno = e.errno
|
||||||
|
|
||||||
|
buf = self.outgoing.read()
|
||||||
|
self.socket.sendall(buf)
|
||||||
|
|
||||||
|
if errno is None:
|
||||||
|
should_loop = False
|
||||||
|
elif errno == ssl.SSL_ERROR_WANT_READ:
|
||||||
|
buf = self.socket.recv(SSL_BLOCKSIZE)
|
||||||
|
if buf:
|
||||||
|
self.incoming.write(buf)
|
||||||
|
else:
|
||||||
|
self.incoming.write_eof()
|
||||||
|
return ret
|
|
@ -1,9 +1,10 @@
|
||||||
from __future__ import absolute_import
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import time
|
||||||
|
|
||||||
# The default socket timeout, used by httplib to indicate that no timeout was
|
# The default socket timeout, used by httplib to indicate that no timeout was
|
||||||
# specified by the user
|
# specified by the user
|
||||||
from socket import _GLOBAL_DEFAULT_TIMEOUT
|
from socket import _GLOBAL_DEFAULT_TIMEOUT
|
||||||
import time
|
|
||||||
|
|
||||||
from ..exceptions import TimeoutStateError
|
from ..exceptions import TimeoutStateError
|
||||||
|
|
||||||
|
@ -17,22 +18,28 @@ current_time = getattr(time, "monotonic", time.time)
|
||||||
|
|
||||||
|
|
||||||
class Timeout(object):
|
class Timeout(object):
|
||||||
""" Timeout configuration.
|
"""Timeout configuration.
|
||||||
|
|
||||||
Timeouts can be defined as a default for a pool::
|
Timeouts can be defined as a default for a pool:
|
||||||
|
|
||||||
timeout = Timeout(connect=2.0, read=7.0)
|
.. code-block:: python
|
||||||
http = PoolManager(timeout=timeout)
|
|
||||||
response = http.request('GET', 'http://example.com/')
|
|
||||||
|
|
||||||
Or per-request (which overrides the default for the pool)::
|
timeout = Timeout(connect=2.0, read=7.0)
|
||||||
|
http = PoolManager(timeout=timeout)
|
||||||
|
response = http.request('GET', 'http://example.com/')
|
||||||
|
|
||||||
response = http.request('GET', 'http://example.com/', timeout=Timeout(10))
|
Or per-request (which overrides the default for the pool):
|
||||||
|
|
||||||
Timeouts can be disabled by setting all the parameters to ``None``::
|
.. code-block:: python
|
||||||
|
|
||||||
no_timeout = Timeout(connect=None, read=None)
|
response = http.request('GET', 'http://example.com/', timeout=Timeout(10))
|
||||||
response = http.request('GET', 'http://example.com/, timeout=no_timeout)
|
|
||||||
|
Timeouts can be disabled by setting all the parameters to ``None``:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
no_timeout = Timeout(connect=None, read=None)
|
||||||
|
response = http.request('GET', 'http://example.com/, timeout=no_timeout)
|
||||||
|
|
||||||
|
|
||||||
:param total:
|
:param total:
|
||||||
|
@ -43,7 +50,7 @@ class Timeout(object):
|
||||||
|
|
||||||
Defaults to None.
|
Defaults to None.
|
||||||
|
|
||||||
:type total: integer, float, or None
|
:type total: int, float, or None
|
||||||
|
|
||||||
:param connect:
|
:param connect:
|
||||||
The maximum amount of time (in seconds) to wait for a connection
|
The maximum amount of time (in seconds) to wait for a connection
|
||||||
|
@ -53,7 +60,7 @@ class Timeout(object):
|
||||||
<http://hg.python.org/cpython/file/603b4d593758/Lib/socket.py#l535>`_.
|
<http://hg.python.org/cpython/file/603b4d593758/Lib/socket.py#l535>`_.
|
||||||
None will set an infinite timeout for connection attempts.
|
None will set an infinite timeout for connection attempts.
|
||||||
|
|
||||||
:type connect: integer, float, or None
|
:type connect: int, float, or None
|
||||||
|
|
||||||
:param read:
|
:param read:
|
||||||
The maximum amount of time (in seconds) to wait between consecutive
|
The maximum amount of time (in seconds) to wait between consecutive
|
||||||
|
@ -63,7 +70,7 @@ class Timeout(object):
|
||||||
<http://hg.python.org/cpython/file/603b4d593758/Lib/socket.py#l535>`_.
|
<http://hg.python.org/cpython/file/603b4d593758/Lib/socket.py#l535>`_.
|
||||||
None will set an infinite timeout.
|
None will set an infinite timeout.
|
||||||
|
|
||||||
:type read: integer, float, or None
|
:type read: int, float, or None
|
||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
|
@ -111,7 +118,7 @@ class Timeout(object):
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _validate_timeout(cls, value, name):
|
def _validate_timeout(cls, value, name):
|
||||||
""" Check that a timeout attribute is valid.
|
"""Check that a timeout attribute is valid.
|
||||||
|
|
||||||
:param value: The timeout value to validate
|
:param value: The timeout value to validate
|
||||||
:param name: The name of the timeout attribute to validate. This is
|
:param name: The name of the timeout attribute to validate. This is
|
||||||
|
@ -157,7 +164,7 @@ class Timeout(object):
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_float(cls, timeout):
|
def from_float(cls, timeout):
|
||||||
""" Create a new Timeout from a legacy timeout value.
|
"""Create a new Timeout from a legacy timeout value.
|
||||||
|
|
||||||
The timeout value used by httplib.py sets the same timeout on the
|
The timeout value used by httplib.py sets the same timeout on the
|
||||||
connect(), and recv() socket requests. This creates a :class:`Timeout`
|
connect(), and recv() socket requests. This creates a :class:`Timeout`
|
||||||
|
@ -172,7 +179,7 @@ class Timeout(object):
|
||||||
return Timeout(read=timeout, connect=timeout)
|
return Timeout(read=timeout, connect=timeout)
|
||||||
|
|
||||||
def clone(self):
|
def clone(self):
|
||||||
""" Create a copy of the timeout object
|
"""Create a copy of the timeout object
|
||||||
|
|
||||||
Timeout properties are stored per-pool but each request needs a fresh
|
Timeout properties are stored per-pool but each request needs a fresh
|
||||||
Timeout object to ensure each one has its own start/stop configured.
|
Timeout object to ensure each one has its own start/stop configured.
|
||||||
|
@ -186,7 +193,7 @@ class Timeout(object):
|
||||||
return Timeout(connect=self._connect, read=self._read, total=self.total)
|
return Timeout(connect=self._connect, read=self._read, total=self.total)
|
||||||
|
|
||||||
def start_connect(self):
|
def start_connect(self):
|
||||||
""" Start the timeout clock, used during a connect() attempt
|
"""Start the timeout clock, used during a connect() attempt
|
||||||
|
|
||||||
:raises urllib3.exceptions.TimeoutStateError: if you attempt
|
:raises urllib3.exceptions.TimeoutStateError: if you attempt
|
||||||
to start a timer that has been started already.
|
to start a timer that has been started already.
|
||||||
|
@ -197,7 +204,7 @@ class Timeout(object):
|
||||||
return self._start_connect
|
return self._start_connect
|
||||||
|
|
||||||
def get_connect_duration(self):
|
def get_connect_duration(self):
|
||||||
""" Gets the time elapsed since the call to :meth:`start_connect`.
|
"""Gets the time elapsed since the call to :meth:`start_connect`.
|
||||||
|
|
||||||
:return: Elapsed time in seconds.
|
:return: Elapsed time in seconds.
|
||||||
:rtype: float
|
:rtype: float
|
||||||
|
@ -212,7 +219,7 @@ class Timeout(object):
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def connect_timeout(self):
|
def connect_timeout(self):
|
||||||
""" Get the value to use when setting a connection timeout.
|
"""Get the value to use when setting a connection timeout.
|
||||||
|
|
||||||
This will be a positive float or integer, the value None
|
This will be a positive float or integer, the value None
|
||||||
(never timeout), or the default system timeout.
|
(never timeout), or the default system timeout.
|
||||||
|
@ -230,7 +237,7 @@ class Timeout(object):
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def read_timeout(self):
|
def read_timeout(self):
|
||||||
""" Get the value for the read timeout.
|
"""Get the value for the read timeout.
|
||||||
|
|
||||||
This assumes some time has elapsed in the connection timeout and
|
This assumes some time has elapsed in the connection timeout and
|
||||||
computes the read timeout appropriately.
|
computes the read timeout appropriately.
|
||||||
|
|
|
@ -1,11 +1,11 @@
|
||||||
from __future__ import absolute_import
|
from __future__ import absolute_import
|
||||||
|
|
||||||
import re
|
import re
|
||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
|
|
||||||
from ..exceptions import LocationParseError
|
from ..exceptions import LocationParseError
|
||||||
from ..packages import six
|
from ..packages import six
|
||||||
|
|
||||||
|
|
||||||
url_attrs = ["scheme", "auth", "host", "port", "path", "query", "fragment"]
|
url_attrs = ["scheme", "auth", "host", "port", "path", "query", "fragment"]
|
||||||
|
|
||||||
# We only want to normalize urls with an HTTP(S) scheme.
|
# We only want to normalize urls with an HTTP(S) scheme.
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
import errno
|
import errno
|
||||||
from functools import partial
|
|
||||||
import select
|
import select
|
||||||
import sys
|
import sys
|
||||||
|
from functools import partial
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from time import monotonic
|
from time import monotonic
|
||||||
|
@ -140,14 +140,14 @@ def wait_for_socket(*args, **kwargs):
|
||||||
|
|
||||||
|
|
||||||
def wait_for_read(sock, timeout=None):
|
def wait_for_read(sock, timeout=None):
|
||||||
""" Waits for reading to be available on a given socket.
|
"""Waits for reading to be available on a given socket.
|
||||||
Returns True if the socket is readable, or False if the timeout expired.
|
Returns True if the socket is readable, or False if the timeout expired.
|
||||||
"""
|
"""
|
||||||
return wait_for_socket(sock, read=True, timeout=timeout)
|
return wait_for_socket(sock, read=True, timeout=timeout)
|
||||||
|
|
||||||
|
|
||||||
def wait_for_write(sock, timeout=None):
|
def wait_for_write(sock, timeout=None):
|
||||||
""" Waits for writing to be available on a given socket.
|
"""Waits for writing to be available on a given socket.
|
||||||
Returns True if the socket is readable, or False if the timeout expired.
|
Returns True if the socket is readable, or False if the timeout expired.
|
||||||
"""
|
"""
|
||||||
return wait_for_socket(sock, write=True, timeout=timeout)
|
return wait_for_socket(sock, write=True, timeout=timeout)
|
||||||
|
|
|
@ -15,7 +15,7 @@ requests==2.25.0
|
||||||
certifi==2020.11.8
|
certifi==2020.11.8
|
||||||
chardet==3.0.4
|
chardet==3.0.4
|
||||||
idna==2.10
|
idna==2.10
|
||||||
urllib3==1.25.9
|
urllib3==1.26.2
|
||||||
resolvelib==0.4.0
|
resolvelib==0.4.0
|
||||||
retrying==1.3.3
|
retrying==1.3.3
|
||||||
setuptools==44.0.0
|
setuptools==44.0.0
|
||||||
|
|
Loading…
Reference in a new issue