Browse Source

Update urllib3 1.26.0.dev0 (41eae64) → 1.25.11 (5eb604f).

tags/release_0.25.1
JackDandy 5 years ago
parent
commit
1cac775e42
  1. 3
      CHANGES.md
  2. 18
      lib/urllib3/__init__.py
  3. 9
      lib/urllib3/_collections.py
  4. 2
      lib/urllib3/_version.py
  5. 121
      lib/urllib3/connection.py
  6. 84
      lib/urllib3/connectionpool.py
  7. 18
      lib/urllib3/contrib/_securetransport/bindings.py
  8. 29
      lib/urllib3/contrib/_securetransport/low_level.py
  9. 8
      lib/urllib3/contrib/appengine.py
  10. 2
      lib/urllib3/contrib/ntlmpool.py
  11. 6
      lib/urllib3/contrib/pyopenssl.py
  12. 47
      lib/urllib3/contrib/securetransport.py
  13. 4
      lib/urllib3/contrib/socks.py
  14. 1
      lib/urllib3/exceptions.py
  15. 2
      lib/urllib3/exceptions.pyi
  16. 1
      lib/urllib3/fields.py
  17. 4
      lib/urllib3/filepost.py
  18. 1
      lib/urllib3/packages/backports/makefile.py
  19. 5
      lib/urllib3/packages/ssl_match_hostname/__init__.py
  20. 103
      lib/urllib3/poolmanager.py
  21. 1
      lib/urllib3/request.py
  22. 27
      lib/urllib3/response.py
  23. 18
      lib/urllib3/util/__init__.py
  24. 14
      lib/urllib3/util/connection.py
  25. 56
      lib/urllib3/util/proxy.py
  26. 1
      lib/urllib3/util/queue.py
  27. 14
      lib/urllib3/util/request.py
  28. 5
      lib/urllib3/util/response.py
  29. 153
      lib/urllib3/util/retry.py
  30. 66
      lib/urllib3/util/ssl_.py
  31. 29
      lib/urllib3/util/ssltransport.py
  32. 3
      lib/urllib3/util/timeout.py
  33. 2
      lib/urllib3/util/url.py
  34. 2
      lib/urllib3/util/wait.py

3
CHANGES.md

@ -60,7 +60,7 @@
* Update soupsieve_py3 2.0.0.final (e66c311) to 2.0.2.dev (05086ef) * Update soupsieve_py3 2.0.0.final (e66c311) to 2.0.2.dev (05086ef)
* Update soupsieve_py2 backport * Update soupsieve_py2 backport
* Update tmdbsimple 2.2.6 (310d933) to 2.6.6 (679e343) * Update tmdbsimple 2.2.6 (310d933) to 2.6.6 (679e343)
* Update urllib3 1.25.9 (a5a45dc) to 1.26.0.dev0 (41eae64) * Update urllib3 1.25.9 (a5a45dc) to 1.25.11 (5eb604f)
* Change add remove duplicates in newznab provider list based on name and url * Change add remove duplicates in newznab provider list based on name and url
* Change remove old provider dupe cleanup * Change remove old provider dupe cleanup
@ -71,6 +71,7 @@
* Update Beautiful Soup 4.8.2 (r559) to 4.9.1 (r585) * Update Beautiful Soup 4.8.2 (r559) to 4.9.1 (r585)
* Update Requests library 2.24.0 (967a05b) to 2.24.0 (2f70990) * Update Requests library 2.24.0 (967a05b) to 2.24.0 (2f70990)
* Fix update Soupsieve (05086ef) broke MC and TVC browse cards * Fix update Soupsieve (05086ef) broke MC and TVC browse cards
* Update urllib3 1.26.0.dev0 (41eae64) to 1.25.11 (5eb604f)
* Change move scantree to sg_helpers to prevent circular reference of it ever again * Change move scantree to sg_helpers to prevent circular reference of it ever again
* Update included fallback timezone info file to 2020c * Update included fallback timezone info file to 2020c
* Change use natural time for age if now is night time and the last recent is yesterday evening, otherwise output age as 'yesterday' * Change use natural time for age if now is night time and the last recent is yesterday evening, otherwise output age as 'yesterday'

18
lib/urllib3/__init__.py

@ -2,24 +2,22 @@
Python HTTP library with thread-safe connection pooling, file post support, user friendly, and more Python HTTP library with thread-safe connection pooling, file post support, user friendly, and more
""" """
from __future__ import absolute_import from __future__ import absolute_import
import warnings
from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool, connection_from_url # Set default logging handler to avoid "No handler found" warnings.
import logging
import warnings
from logging import NullHandler
from . import exceptions from . import exceptions
from ._version import __version__
from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool, connection_from_url
from .filepost import encode_multipart_formdata from .filepost import encode_multipart_formdata
from .poolmanager import PoolManager, ProxyManager, proxy_from_url from .poolmanager import PoolManager, ProxyManager, proxy_from_url
from .response import HTTPResponse from .response import HTTPResponse
from .util.request import make_headers from .util.request import make_headers
from .util.url import get_host
from .util.timeout import Timeout
from .util.retry import Retry from .util.retry import Retry
from ._version import __version__ from .util.timeout import Timeout
from .util.url import get_host
# Set default logging handler to avoid "No handler found" warnings.
import logging
from logging import NullHandler
__author__ = "Andrey Petrov (andrey.petrov@shazow.net)" __author__ = "Andrey Petrov (andrey.petrov@shazow.net)"
__license__ = "MIT" __license__ = "MIT"

9
lib/urllib3/_collections.py

@ -17,9 +17,10 @@ except ImportError: # Platform-specific: No threads available
from collections import OrderedDict from collections import OrderedDict
from .exceptions import InvalidHeader
from .packages.six import ensure_str, iterkeys, itervalues, PY3
from .exceptions import InvalidHeader
from .packages import six
from .packages.six import iterkeys, itervalues
__all__ = ["RecentlyUsedContainer", "HTTPHeaderDict"] __all__ = ["RecentlyUsedContainer", "HTTPHeaderDict"]
@ -154,7 +155,7 @@ class HTTPHeaderDict(MutableMapping):
def __getitem__(self, key): def __getitem__(self, key):
val = self._container[key.lower()] val = self._container[key.lower()]
return ", ".join([ensure_str(v, "ascii") for v in val[1:]]) return ", ".join(val[1:])
def __delitem__(self, key): def __delitem__(self, key):
del self._container[key.lower()] del self._container[key.lower()]
@ -174,7 +175,7 @@ class HTTPHeaderDict(MutableMapping):
def __ne__(self, other): def __ne__(self, other):
return not self.__eq__(other) return not self.__eq__(other)
if not PY3: # Python 2 if six.PY2: # Python 2
iterkeys = MutableMapping.iterkeys iterkeys = MutableMapping.iterkeys
itervalues = MutableMapping.itervalues itervalues = MutableMapping.itervalues

2
lib/urllib3/_version.py

@ -1,2 +1,2 @@
# This file is protected via CODEOWNERS # This file is protected via CODEOWNERS
__version__ = "1.25.9" __version__ = "1.25.11"

121
lib/urllib3/connection.py

@ -1,14 +1,18 @@
from __future__ import absolute_import from __future__ import absolute_import
import re
import datetime import datetime
import logging import logging
import os import os
import re
import socket import socket
from socket import error as SocketError, timeout as SocketTimeout
import warnings import warnings
from socket import error as SocketError
from socket import timeout as SocketTimeout
from .packages import six from .packages import six
from .packages.six.moves.http_client import HTTPConnection as _HTTPConnection from .packages.six.moves.http_client import HTTPConnection as _HTTPConnection
from .packages.six.moves.http_client import HTTPException # noqa: F401 from .packages.six.moves.http_client import HTTPException # noqa: F401
from .util.proxy import create_proxy_ssl_context
try: # Compiled with SSL? try: # Compiled with SSL?
import ssl import ssl
@ -39,28 +43,23 @@ except NameError: # Python 2:
pass pass
from ._version import __version__
from .exceptions import ( from .exceptions import (
NewConnectionError,
ConnectTimeoutError, ConnectTimeoutError,
NewConnectionError,
SubjectAltNameWarning, SubjectAltNameWarning,
SystemTimeWarning, SystemTimeWarning,
) )
from .packages.ssl_match_hostname import match_hostname, CertificateError from .packages.ssl_match_hostname import CertificateError, match_hostname
from .util import SKIP_HEADER, SKIPPABLE_HEADERS, connection
from .util.ssl_ import ( from .util.ssl_ import (
resolve_cert_reqs,
resolve_ssl_version,
assert_fingerprint, assert_fingerprint,
create_urllib3_context, create_urllib3_context,
resolve_cert_reqs,
resolve_ssl_version,
ssl_wrap_socket, ssl_wrap_socket,
) )
from .util import connection, SUPPRESS_USER_AGENT
from ._collections import HTTPHeaderDict
from ._version import __version__
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
port_by_scheme = {"http": 80, "https": 443} port_by_scheme = {"http": 80, "https": 443}
@ -117,6 +116,11 @@ class HTTPConnection(_HTTPConnection, object):
#: The socket options provided by the user. If no options are #: The socket options provided by the user. If no options are
#: provided, we use the default options. #: provided, we use the default options.
self.socket_options = kw.pop("socket_options", self.default_socket_options) self.socket_options = kw.pop("socket_options", self.default_socket_options)
# Proxy options provided by the user.
self.proxy = kw.pop("proxy", None)
self.proxy_config = kw.pop("proxy_config", None)
_HTTPConnection.__init__(self, *args, **kw) _HTTPConnection.__init__(self, *args, **kw)
@property @property
@ -208,12 +212,24 @@ class HTTPConnection(_HTTPConnection, object):
return _HTTPConnection.putrequest(self, method, url, *args, **kwargs) return _HTTPConnection.putrequest(self, method, url, *args, **kwargs)
def putheader(self, header, *values):
""""""
if SKIP_HEADER not in values:
_HTTPConnection.putheader(self, header, *values)
elif six.ensure_str(header.lower()) not in SKIPPABLE_HEADERS:
raise ValueError(
"urllib3.util.SKIP_HEADER only supports '%s'"
% ("', '".join(map(str.title, sorted(SKIPPABLE_HEADERS))),)
)
def request(self, method, url, body=None, headers=None): def request(self, method, url, body=None, headers=None):
headers = HTTPHeaderDict(headers if headers is not None else {}) if headers is None:
if "user-agent" not in headers: headers = {}
else:
# Avoid modifying the headers passed into .request()
headers = headers.copy()
if "user-agent" not in (k.lower() for k in headers):
headers["User-Agent"] = _get_default_user_agent() headers["User-Agent"] = _get_default_user_agent()
elif headers["user-agent"] == SUPPRESS_USER_AGENT:
del headers["user-agent"]
super(HTTPConnection, self).request(method, url, body=body, headers=headers) super(HTTPConnection, self).request(method, url, body=body, headers=headers)
def request_chunked(self, method, url, body=None, headers=None): def request_chunked(self, method, url, body=None, headers=None):
@ -221,16 +237,15 @@ class HTTPConnection(_HTTPConnection, object):
Alternative to the common request method, which sends the Alternative to the common request method, which sends the
body with chunked encoding and not as one block body with chunked encoding and not as one block
""" """
headers = HTTPHeaderDict(headers if headers is not None else {}) headers = headers or {}
skip_accept_encoding = "accept-encoding" in headers header_keys = set([six.ensure_str(k.lower()) for k in headers])
skip_host = "host" in headers skip_accept_encoding = "accept-encoding" in header_keys
skip_host = "host" in header_keys
self.putrequest( self.putrequest(
method, url, skip_accept_encoding=skip_accept_encoding, skip_host=skip_host method, url, skip_accept_encoding=skip_accept_encoding, skip_host=skip_host
) )
if "user-agent" not in headers: if "user-agent" not in header_keys:
headers["User-Agent"] = _get_default_user_agent() self.putheader("User-Agent", _get_default_user_agent())
elif headers["user-agent"] == SUPPRESS_USER_AGENT:
del headers["user-agent"]
for header, value in headers.items(): for header, value in headers.items():
self.putheader(header, value) self.putheader(header, value)
if "transfer-encoding" not in headers: if "transfer-encoding" not in headers:
@ -271,6 +286,7 @@ class HTTPSConnection(HTTPConnection):
ca_cert_data = None ca_cert_data = None
ssl_version = None ssl_version = None
assert_fingerprint = None assert_fingerprint = None
tls_in_tls_required = False
def __init__( def __init__(
self, self,
@ -335,8 +351,13 @@ class HTTPSConnection(HTTPConnection):
# Add certificate verification # Add certificate verification
conn = self._new_conn() conn = self._new_conn()
hostname = self.host hostname = self.host
tls_in_tls = False
if self._is_using_tunnel(): if self._is_using_tunnel():
if self.tls_in_tls_required:
conn = self._connect_tls_proxy(hostname, conn)
tls_in_tls = True
self.sock = conn self.sock = conn
# Calls self._set_hostport(), so self.host is # Calls self._set_hostport(), so self.host is
@ -396,8 +417,26 @@ class HTTPSConnection(HTTPConnection):
ca_cert_data=self.ca_cert_data, ca_cert_data=self.ca_cert_data,
server_hostname=server_hostname, server_hostname=server_hostname,
ssl_context=context, ssl_context=context,
tls_in_tls=tls_in_tls,
) )
# If we're using all defaults and the connection
# is TLSv1 or TLSv1.1 we throw a DeprecationWarning
# for the host.
if (
default_ssl_context
and self.ssl_version is None
and hasattr(self.sock, "version")
and self.sock.version() in {"TLSv1", "TLSv1.1"}
):
warnings.warn(
"Negotiating TLSv1/TLSv1.1 by default is deprecated "
"and will be disabled in urllib3 v2.0.0. Connecting to "
"'%s' with '%s' can be enabled by explicitly opting-in "
"with 'ssl_version'" % (self.host, self.sock.version()),
DeprecationWarning,
)
if self.assert_fingerprint: if self.assert_fingerprint:
assert_fingerprint( assert_fingerprint(
self.sock.getpeercert(binary_form=True), self.assert_fingerprint self.sock.getpeercert(binary_form=True), self.assert_fingerprint
@ -428,6 +467,40 @@ class HTTPSConnection(HTTPConnection):
or self.assert_fingerprint is not None or self.assert_fingerprint is not None
) )
def _connect_tls_proxy(self, hostname, conn):
"""
Establish a TLS connection to the proxy using the provided SSL context.
"""
proxy_config = self.proxy_config
ssl_context = proxy_config.ssl_context
if ssl_context:
# If the user provided a proxy context, we assume CA and client
# certificates have already been set
return ssl_wrap_socket(
sock=conn,
server_hostname=hostname,
ssl_context=ssl_context,
)
ssl_context = create_proxy_ssl_context(
self.ssl_version,
self.cert_reqs,
self.ca_certs,
self.ca_cert_dir,
self.ca_cert_data,
)
# If no cert was provided, use only the default options for server
# certificate validation
return ssl_wrap_socket(
sock=conn,
ca_certs=self.ca_certs,
ca_cert_dir=self.ca_cert_dir,
ca_cert_data=self.ca_cert_data,
server_hostname=hostname,
ssl_context=ssl_context,
)
def _match_hostname(cert, asserted_hostname): def _match_hostname(cert, asserted_hostname):
try: try:

84
lib/urllib3/connectionpool.py

@ -1,58 +1,53 @@
from __future__ import absolute_import from __future__ import absolute_import
import errno import errno
import logging import logging
import socket
import sys import sys
import warnings import warnings
from socket import error as SocketError
from socket import timeout as SocketTimeout
from socket import error as SocketError, timeout as SocketTimeout from .connection import (
import socket BaseSSLError,
BrokenPipeError,
DummyConnection,
HTTPConnection,
HTTPException,
HTTPSConnection,
VerifiedHTTPSConnection,
port_by_scheme,
)
from .exceptions import ( from .exceptions import (
ClosedPoolError, ClosedPoolError,
ProtocolError,
EmptyPoolError, EmptyPoolError,
HeaderParsingError, HeaderParsingError,
HostChangedError, HostChangedError,
InsecureRequestWarning,
LocationValueError, LocationValueError,
MaxRetryError, MaxRetryError,
NewConnectionError,
ProtocolError,
ProxyError, ProxyError,
ReadTimeoutError, ReadTimeoutError,
SSLError, SSLError,
TimeoutError, TimeoutError,
InsecureRequestWarning,
NewConnectionError,
) )
from .packages.ssl_match_hostname import CertificateError
from .packages import six from .packages import six
from .packages.six.moves import queue from .packages.six.moves import queue
from .connection import ( from .packages.ssl_match_hostname import CertificateError
port_by_scheme,
DummyConnection,
HTTPConnection,
HTTPSConnection,
VerifiedHTTPSConnection,
HTTPException,
BaseSSLError,
BrokenPipeError,
)
from .request import RequestMethods from .request import RequestMethods
from .response import HTTPResponse from .response import HTTPResponse
from .util.connection import is_connection_dropped from .util.connection import is_connection_dropped
from .util.proxy import connection_requires_http_tunnel
from .util.queue import LifoQueue
from .util.request import set_file_position from .util.request import set_file_position
from .util.response import assert_header_parsing from .util.response import assert_header_parsing
from .util.retry import Retry from .util.retry import Retry
from .util.timeout import Timeout from .util.timeout import Timeout
from .util.url import ( from .util.url import Url, _encode_target
get_host, from .util.url import _normalize_host as normalize_host
parse_url, from .util.url import get_host, parse_url
Url,
_normalize_host as normalize_host,
_encode_target,
)
from .util.queue import LifoQueue
xrange = six.moves.xrange xrange = six.moves.xrange
@ -182,6 +177,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
retries=None, retries=None,
_proxy=None, _proxy=None,
_proxy_headers=None, _proxy_headers=None,
_proxy_config=None,
**conn_kw **conn_kw
): ):
ConnectionPool.__init__(self, host, port) ConnectionPool.__init__(self, host, port)
@ -203,6 +199,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
self.proxy = _proxy self.proxy = _proxy
self.proxy_headers = _proxy_headers or {} self.proxy_headers = _proxy_headers or {}
self.proxy_config = _proxy_config
# Fill the queue up so that doing get() on it will block properly # Fill the queue up so that doing get() on it will block properly
for _ in xrange(maxsize): for _ in xrange(maxsize):
@ -219,6 +216,9 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
# list. # list.
self.conn_kw.setdefault("socket_options", []) self.conn_kw.setdefault("socket_options", [])
self.conn_kw["proxy"] = self.proxy
self.conn_kw["proxy_config"] = self.proxy_config
def _new_conn(self): def _new_conn(self):
""" """
Return a fresh :class:`HTTPConnection`. Return a fresh :class:`HTTPConnection`.
@ -551,10 +551,12 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
:param method: :param method:
HTTP request method (such as GET, POST, PUT, etc.) HTTP request method (such as GET, POST, PUT, etc.)
:param url:
The URL to perform the request on.
:param body: :param body:
Data to send in the request body (useful for creating Data to send in the request body, either :class:`str`, :class:`bytes`,
POST requests, see HTTPConnectionPool.post_url for an iterable of :class:`str`/:class:`bytes`, or a file-like object.
more convenience).
:param headers: :param headers:
Dictionary of custom headers to send, such as User-Agent, Dictionary of custom headers to send, such as User-Agent,
@ -621,6 +623,10 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
Additional parameters are passed to Additional parameters are passed to
:meth:`urllib3.response.HTTPResponse.from_httplib` :meth:`urllib3.response.HTTPResponse.from_httplib`
""" """
parsed_url = parse_url(url)
destination_scheme = parsed_url.scheme
if headers is None: if headers is None:
headers = self.headers headers = self.headers
@ -638,7 +644,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
if url.startswith("/"): if url.startswith("/"):
url = six.ensure_str(_encode_target(url)) url = six.ensure_str(_encode_target(url))
else: else:
url = six.ensure_str(parse_url(url).url) url = six.ensure_str(parsed_url.url)
conn = None conn = None
@ -653,10 +659,14 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
# [1] <https://github.com/urllib3/urllib3/issues/651> # [1] <https://github.com/urllib3/urllib3/issues/651>
release_this_conn = release_conn release_this_conn = release_conn
http_tunnel_required = connection_requires_http_tunnel(
self.proxy, self.proxy_config, destination_scheme
)
# Merge the proxy headers. Only done when not using HTTP CONNECT. We # Merge the proxy headers. Only done when not using HTTP CONNECT. We
# have to copy the headers dict so we can safely change it without those # have to copy the headers dict so we can safely change it without those
# changes being reflected in anyone else's copy. # changes being reflected in anyone else's copy.
if self.scheme == "http" or (self.proxy and self.proxy.scheme == "https"): if not http_tunnel_required:
headers = headers.copy() headers = headers.copy()
headers.update(self.proxy_headers) headers.update(self.proxy_headers)
@ -682,7 +692,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
is_new_proxy_conn = self.proxy is not None and not getattr( is_new_proxy_conn = self.proxy is not None and not getattr(
conn, "sock", None conn, "sock", None
) )
if is_new_proxy_conn: if is_new_proxy_conn and http_tunnel_required:
self._prepare_proxy(conn) self._prepare_proxy(conn)
# Make the request on the httplib connection object. # Make the request on the httplib connection object.
@ -946,8 +956,10 @@ class HTTPSConnectionPool(HTTPConnectionPool):
improperly set Host: header to proxy's IP:port. improperly set Host: header to proxy's IP:port.
""" """
if self.proxy.scheme != "https": conn.set_tunnel(self._proxy_host, self.port, self.proxy_headers)
conn.set_tunnel(self._proxy_host, self.port, self.proxy_headers)
if self.proxy.scheme == "https":
conn.tls_in_tls_required = True
conn.connect() conn.connect()

18
lib/urllib3/contrib/_securetransport/bindings.py

@ -32,21 +32,23 @@ license and by oscrypto's:
from __future__ import absolute_import from __future__ import absolute_import
import platform import platform
from ctypes.util import find_library
from ctypes import ( from ctypes import (
c_void_p, CDLL,
c_int32, CFUNCTYPE,
POINTER,
c_bool,
c_byte,
c_char_p, c_char_p,
c_int32,
c_long,
c_size_t, c_size_t,
c_byte,
c_uint32, c_uint32,
c_ulong, c_ulong,
c_long, c_void_p,
c_bool,
) )
from ctypes import CDLL, POINTER, CFUNCTYPE from ctypes.util import find_library
from urllib3.packages.six import raise_from
from urllib3.packages.six import raise_from
if platform.system() != "Darwin": if platform.system() != "Darwin":
raise ImportError("Only macOS is supported") raise ImportError("Only macOS is supported")

29
lib/urllib3/contrib/_securetransport/low_level.py

@ -10,13 +10,13 @@ appropriate and useful assistance to the higher-level code.
import base64 import base64
import ctypes import ctypes
import itertools import itertools
import re
import os import os
import re
import ssl import ssl
import struct
import tempfile import tempfile
from .bindings import Security, CoreFoundation, CFConst from .bindings import CFConst, CoreFoundation, Security
# This regular expression is used to grab PEM data out of a PEM bundle. # This regular expression is used to grab PEM data out of a PEM bundle.
_PEM_CERTS_RE = re.compile( _PEM_CERTS_RE = re.compile(
@ -371,3 +371,26 @@ def _load_client_cert_chain(keychain, *paths):
finally: finally:
for obj in itertools.chain(identities, certificates): for obj in itertools.chain(identities, certificates):
CoreFoundation.CFRelease(obj) CoreFoundation.CFRelease(obj)
TLS_PROTOCOL_VERSIONS = {
"SSLv2": (0, 2),
"SSLv3": (3, 0),
"TLSv1": (3, 1),
"TLSv1.1": (3, 2),
"TLSv1.2": (3, 3),
}
def _build_tls_unknown_ca_alert(version):
"""
Builds a TLS alert record for an unknown CA.
"""
ver_maj, ver_min = TLS_PROTOCOL_VERSIONS[version]
severity_fatal = 0x02
description_unknown_ca = 0x30
msg = struct.pack(">BB", severity_fatal, description_unknown_ca)
msg_len = len(msg)
record_type_alert = 0x15
record = struct.pack(">BBBH", record_type_alert, ver_maj, ver_min, msg_len) + msg
return record

8
lib/urllib3/contrib/appengine.py

@ -39,24 +39,24 @@ urllib3 on Google App Engine:
""" """
from __future__ import absolute_import from __future__ import absolute_import
import io import io
import logging import logging
import warnings import warnings
from ..packages.six.moves.urllib.parse import urljoin
from ..exceptions import ( from ..exceptions import (
HTTPError, HTTPError,
HTTPWarning, HTTPWarning,
MaxRetryError, MaxRetryError,
ProtocolError, ProtocolError,
TimeoutError,
SSLError, SSLError,
TimeoutError,
) )
from ..packages.six.moves.urllib.parse import urljoin
from ..request import RequestMethods from ..request import RequestMethods
from ..response import HTTPResponse from ..response import HTTPResponse
from ..util.timeout import Timeout
from ..util.retry import Retry from ..util.retry import Retry
from ..util.timeout import Timeout
from . import _appengine_environ from . import _appengine_environ
try: try:

2
lib/urllib3/contrib/ntlmpool.py

@ -6,12 +6,12 @@ Issue #10, see: http://code.google.com/p/urllib3/issues/detail?id=10
from __future__ import absolute_import from __future__ import absolute_import
from logging import getLogger from logging import getLogger
from ntlm import ntlm from ntlm import ntlm
from .. import HTTPSConnectionPool from .. import HTTPSConnectionPool
from ..packages.six.moves.http_client import HTTPSConnection from ..packages.six.moves.http_client import HTTPSConnection
log = getLogger(__name__) log = getLogger(__name__)

6
lib/urllib3/contrib/pyopenssl.py

@ -60,8 +60,9 @@ except ImportError:
pass pass
from socket import timeout, error as SocketError
from io import BytesIO from io import BytesIO
from socket import error as SocketError
from socket import timeout
try: # Platform-specific: Python 2 try: # Platform-specific: Python 2
from socket import _fileobject from socket import _fileobject
@ -71,11 +72,10 @@ except ImportError: # Platform-specific: Python 3
import logging import logging
import ssl import ssl
from ..packages import six
import sys import sys
from .. import util from .. import util
from ..packages import six
__all__ = ["inject_into_urllib3", "extract_from_urllib3"] __all__ = ["inject_into_urllib3", "extract_from_urllib3"]

47
lib/urllib3/contrib/securetransport.py

@ -58,20 +58,23 @@ import ctypes
import errno import errno
import os.path import os.path
import shutil import shutil
import six
import socket import socket
import ssl import ssl
import struct
import threading import threading
import weakref import weakref
import six
from .. import util from .. import util
from ._securetransport.bindings import Security, SecurityConst, CoreFoundation from ._securetransport.bindings import CoreFoundation, Security, SecurityConst
from ._securetransport.low_level import ( from ._securetransport.low_level import (
_assert_no_error, _assert_no_error,
_build_tls_unknown_ca_alert,
_cert_array_from_pem, _cert_array_from_pem,
_temporary_keychain,
_load_client_cert_chain,
_create_cfstring_array, _create_cfstring_array,
_load_client_cert_chain,
_temporary_keychain,
) )
try: # Platform-specific: Python 2 try: # Platform-specific: Python 2
@ -396,11 +399,37 @@ class WrappedSocket(object):
Called when we have set custom validation. We do this in two cases: Called when we have set custom validation. We do this in two cases:
first, when cert validation is entirely disabled; and second, when first, when cert validation is entirely disabled; and second, when
using a custom trust DB. using a custom trust DB.
Raises an SSLError if the connection is not trusted.
""" """
# If we disabled cert validation, just say: cool. # If we disabled cert validation, just say: cool.
if not verify: if not verify:
return return
successes = (
SecurityConst.kSecTrustResultUnspecified,
SecurityConst.kSecTrustResultProceed,
)
try:
trust_result = self._evaluate_trust(trust_bundle)
if trust_result in successes:
return
reason = "error code: %d" % (trust_result,)
except Exception as e:
# Do not trust on error
reason = "exception: %r" % (e,)
# SecureTransport does not send an alert nor shuts down the connection.
rec = _build_tls_unknown_ca_alert(self.version())
self.socket.sendall(rec)
# close the connection immediately
# l_onoff = 1, activate linger
# l_linger = 0, linger for 0 seoncds
opts = struct.pack("ii", 1, 0)
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_LINGER, opts)
self.close()
raise ssl.SSLError("certificate verify failed, %s" % reason)
def _evaluate_trust(self, trust_bundle):
# We want data in memory, so load it up. # We want data in memory, so load it up.
if os.path.isfile(trust_bundle): if os.path.isfile(trust_bundle):
with open(trust_bundle, "rb") as f: with open(trust_bundle, "rb") as f:
@ -438,15 +467,7 @@ class WrappedSocket(object):
if cert_array is not None: if cert_array is not None:
CoreFoundation.CFRelease(cert_array) CoreFoundation.CFRelease(cert_array)
# Ok, now we can look at what the result was. return trust_result.value
successes = (
SecurityConst.kSecTrustResultUnspecified,
SecurityConst.kSecTrustResultProceed,
)
if trust_result.value not in successes:
raise ssl.SSLError(
"certificate verify failed, error code: %d" % trust_result.value
)
def handshake( def handshake(
self, self,

4
lib/urllib3/contrib/socks.py

@ -44,6 +44,7 @@ try:
import socks import socks
except ImportError: except ImportError:
import warnings import warnings
from ..exceptions import DependencyWarning from ..exceptions import DependencyWarning
warnings.warn( warnings.warn(
@ -56,7 +57,8 @@ except ImportError:
) )
raise raise
from socket import error as SocketError, timeout as SocketTimeout from socket import error as SocketError
from socket import timeout as SocketTimeout
from ..connection import HTTPConnection, HTTPSConnection from ..connection import HTTPConnection, HTTPSConnection
from ..connectionpool import HTTPConnectionPool, HTTPSConnectionPool from ..connectionpool import HTTPConnectionPool, HTTPSConnectionPool

1
lib/urllib3/exceptions.py

@ -1,4 +1,5 @@
from __future__ import absolute_import from __future__ import absolute_import
from .packages.six.moves.http_client import IncompleteRead as httplib_IncompleteRead from .packages.six.moves.http_client import IncompleteRead as httplib_IncompleteRead
# Base Exceptions # Base Exceptions

2
lib/urllib3/exceptions.pyi

@ -1,4 +1,4 @@
from typing import Any, Optional, Union, Tuple, TYPE_CHECKING from typing import TYPE_CHECKING, Any, Optional, Tuple, Union
if TYPE_CHECKING: if TYPE_CHECKING:
from urllib3.connectionpool import ConnectionPool from urllib3.connectionpool import ConnectionPool

1
lib/urllib3/fields.py

@ -1,4 +1,5 @@
from __future__ import absolute_import from __future__ import absolute_import
import email.utils import email.utils
import mimetypes import mimetypes
import re import re

4
lib/urllib3/filepost.py

@ -1,13 +1,13 @@
from __future__ import absolute_import from __future__ import absolute_import
import binascii import binascii
import codecs import codecs
import os import os
from io import BytesIO from io import BytesIO
from .fields import RequestField
from .packages import six from .packages import six
from .packages.six import b from .packages.six import b
from .fields import RequestField
writer = codecs.lookup("utf-8")[3] writer = codecs.lookup("utf-8")[3]

1
lib/urllib3/packages/backports/makefile.py

@ -7,7 +7,6 @@ Backports the Python 3 ``socket.makefile`` method for use with anything that
wants to create a "fake" socket object. wants to create a "fake" socket object.
""" """
import io import io
from socket import SocketIO from socket import SocketIO

5
lib/urllib3/packages/ssl_match_hostname/__init__.py

@ -10,7 +10,10 @@ try:
except ImportError: except ImportError:
try: try:
# Backport of the function from a pypi module # Backport of the function from a pypi module
from backports.ssl_match_hostname import CertificateError, match_hostname # type: ignore from backports.ssl_match_hostname import ( # type: ignore
CertificateError,
match_hostname,
)
except ImportError: except ImportError:
# Our vendored copy # Our vendored copy
from ._implementation import CertificateError, match_hostname # type: ignore from ._implementation import CertificateError, match_hostname # type: ignore

103
lib/urllib3/poolmanager.py

@ -1,15 +1,12 @@
from __future__ import absolute_import from __future__ import absolute_import
import collections import collections
import functools import functools
import logging import logging
import warnings
from ._collections import RecentlyUsedContainer from ._collections import RecentlyUsedContainer
from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool, port_by_scheme
from .connectionpool import port_by_scheme
from .exceptions import ( from .exceptions import (
HTTPWarning,
LocationValueError, LocationValueError,
MaxRetryError, MaxRetryError,
ProxySchemeUnknown, ProxySchemeUnknown,
@ -19,19 +16,13 @@ from .exceptions import (
from .packages import six from .packages import six
from .packages.six.moves.urllib.parse import urljoin from .packages.six.moves.urllib.parse import urljoin
from .request import RequestMethods from .request import RequestMethods
from .util.url import parse_url from .util.proxy import connection_requires_http_tunnel
from .util.retry import Retry from .util.retry import Retry
from .util.url import parse_url
__all__ = ["PoolManager", "ProxyManager", "proxy_from_url"] __all__ = ["PoolManager", "ProxyManager", "proxy_from_url"]
class InvalidProxyConfigurationWarning(HTTPWarning):
"""Raised when a user has an HTTPS proxy without enabling HTTPS proxies."""
pass
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
SSL_KEYWORDS = ( SSL_KEYWORDS = (
@ -68,6 +59,7 @@ _key_fields = (
"key_headers", # dict "key_headers", # dict
"key__proxy", # parsed proxy url "key__proxy", # parsed proxy url
"key__proxy_headers", # dict "key__proxy_headers", # dict
"key__proxy_config", # class
"key_socket_options", # list of (level (int), optname (int), value (int or str)) tuples "key_socket_options", # list of (level (int), optname (int), value (int or str)) tuples
"key__socks_options", # dict "key__socks_options", # dict
"key_assert_hostname", # bool or string "key_assert_hostname", # bool or string
@ -79,6 +71,9 @@ _key_fields = (
#: All custom key schemes should include the fields in this key at a minimum. #: All custom key schemes should include the fields in this key at a minimum.
PoolKey = collections.namedtuple("PoolKey", _key_fields) PoolKey = collections.namedtuple("PoolKey", _key_fields)
_proxy_config_fields = ("ssl_context", "use_forwarding_for_https")
ProxyConfig = collections.namedtuple("ProxyConfig", _proxy_config_fields)
def _default_key_normalizer(key_class, request_context): def _default_key_normalizer(key_class, request_context):
""" """
@ -170,6 +165,7 @@ class PoolManager(RequestMethods):
""" """
proxy = None proxy = None
proxy_config = None
def __init__(self, num_pools=10, headers=None, **connection_pool_kw): def __init__(self, num_pools=10, headers=None, **connection_pool_kw):
RequestMethods.__init__(self, headers) RequestMethods.__init__(self, headers)
@ -326,14 +322,32 @@ class PoolManager(RequestMethods):
def _proxy_requires_url_absolute_form(self, parsed_url): def _proxy_requires_url_absolute_form(self, parsed_url):
""" """
Indicates if the proxy requires the complete destination URL in the Indicates if the proxy requires the complete destination URL in the
request. request. Normally this is only needed when not using an HTTP CONNECT
tunnel.
Normally this is only needed when not using an HTTP CONNECT tunnel.
""" """
if self.proxy is None: if self.proxy is None:
return False return False
return parsed_url.scheme == "http" or self.proxy.scheme == "https" return not connection_requires_http_tunnel(
self.proxy, self.proxy_config, parsed_url.scheme
)
def _validate_proxy_scheme_url_selection(self, url_scheme):
"""
Validates that were not attempting to do TLS in TLS connections on
Python2 or with unsupported SSL implementations.
"""
if self.proxy is None or url_scheme != "https":
return
if self.proxy.scheme != "https":
return
if six.PY2 and not self.proxy_config.use_forwarding_for_https:
raise ProxySchemeUnsupported(
"Contacting HTTPS destinations through HTTPS proxies "
"'via CONNECT tunnels' is not supported in Python 2"
)
def urlopen(self, method, url, redirect=True, **kw): def urlopen(self, method, url, redirect=True, **kw):
""" """
@ -345,6 +359,8 @@ class PoolManager(RequestMethods):
:class:`urllib3.connectionpool.ConnectionPool` can be chosen for it. :class:`urllib3.connectionpool.ConnectionPool` can be chosen for it.
""" """
u = parse_url(url) u = parse_url(url)
self._validate_proxy_scheme_url_selection(u.scheme)
conn = self.connection_from_host(u.host, port=u.port, scheme=u.scheme) conn = self.connection_from_host(u.host, port=u.port, scheme=u.scheme)
kw["assert_same_host"] = False kw["assert_same_host"] = False
@ -415,11 +431,18 @@ class ProxyManager(PoolManager):
HTTPS/CONNECT case they are sent only once. Could be used for proxy HTTPS/CONNECT case they are sent only once. Could be used for proxy
authentication. authentication.
:param _allow_https_proxy_to_see_traffic: :param proxy_ssl_context:
Allows forwarding of HTTPS requests to HTTPS proxies. The proxy will The proxy SSL context is used to establish the TLS connection to the
have visibility of all the traffic sent. ONLY USE IF YOU KNOW WHAT proxy when using HTTPS proxies.
YOU'RE DOING. This flag might be removed at any time in any future
update. :param use_forwarding_for_https:
(Defaults to False) If set to True will forward requests to the HTTPS
proxy to be made on behalf of the client instead of creating a TLS
tunnel via the CONNECT method. **Enabling this flag means that request
and response headers and content will be visible from the HTTPS proxy**
whereas tunneling keeps request and response headers and content
private. IP address, target hostname, SNI, and port are always visible
to an HTTPS proxy even when this flag is disabled.
Example: Example:
>>> proxy = urllib3.ProxyManager('http://localhost:3128/') >>> proxy = urllib3.ProxyManager('http://localhost:3128/')
@ -440,7 +463,8 @@ class ProxyManager(PoolManager):
num_pools=10, num_pools=10,
headers=None, headers=None,
proxy_headers=None, proxy_headers=None,
_allow_https_proxy_to_see_traffic=False, proxy_ssl_context=None,
use_forwarding_for_https=False,
**connection_pool_kw **connection_pool_kw
): ):
@ -461,11 +485,12 @@ class ProxyManager(PoolManager):
self.proxy = proxy self.proxy = proxy
self.proxy_headers = proxy_headers or {} self.proxy_headers = proxy_headers or {}
self.proxy_ssl_context = proxy_ssl_context
self.proxy_config = ProxyConfig(proxy_ssl_context, use_forwarding_for_https)
connection_pool_kw["_proxy"] = self.proxy connection_pool_kw["_proxy"] = self.proxy
connection_pool_kw["_proxy_headers"] = self.proxy_headers connection_pool_kw["_proxy_headers"] = self.proxy_headers
connection_pool_kw["_proxy_config"] = self.proxy_config
self.allow_insecure_proxy = _allow_https_proxy_to_see_traffic
super(ProxyManager, self).__init__(num_pools, headers, **connection_pool_kw) super(ProxyManager, self).__init__(num_pools, headers, **connection_pool_kw)
@ -494,35 +519,13 @@ class ProxyManager(PoolManager):
headers_.update(headers) headers_.update(headers)
return headers_ return headers_
def _validate_proxy_scheme_url_selection(self, url_scheme):
if (
url_scheme == "https"
and self.proxy.scheme == "https"
and not self.allow_insecure_proxy
):
warnings.warn(
"Your proxy configuration specified an HTTPS scheme for the proxy. "
"Are you sure you want to use HTTPS to contact the proxy? "
"This most likely indicates an error in your configuration."
"If you are sure you want use HTTPS to contact the proxy, enable "
"the _allow_https_proxy_to_see_traffic.",
InvalidProxyConfigurationWarning,
)
raise ProxySchemeUnsupported(
"Contacting HTTPS destinations through HTTPS proxies is not supported."
)
def urlopen(self, method, url, redirect=True, **kw): def urlopen(self, method, url, redirect=True, **kw):
"Same as HTTP(S)ConnectionPool.urlopen, ``url`` must be absolute." "Same as HTTP(S)ConnectionPool.urlopen, ``url`` must be absolute."
u = parse_url(url) u = parse_url(url)
self._validate_proxy_scheme_url_selection(u.scheme) if not connection_requires_http_tunnel(self.proxy, self.proxy_config, u.scheme):
if u.scheme == "http" or self.proxy.scheme == "https":
# For connections using HTTP CONNECT, httplib sets the necessary # For connections using HTTP CONNECT, httplib sets the necessary
# headers on the CONNECT to the proxy. For HTTP or when talking # headers on the CONNECT to the proxy. If we're not using CONNECT,
# HTTPS to the proxy, we'll definitely need to set 'Host' at the # we'll definitely need to set 'Host' at the very least.
# very least.
headers = kw.get("headers", self.headers) headers = kw.get("headers", self.headers)
kw["headers"] = self._set_proxy_headers(url, headers) kw["headers"] = self._set_proxy_headers(url, headers)

1
lib/urllib3/request.py

@ -3,7 +3,6 @@ from __future__ import absolute_import
from .filepost import encode_multipart_formdata from .filepost import encode_multipart_formdata
from .packages.six.moves.urllib.parse import urlencode from .packages.six.moves.urllib.parse import urlencode
__all__ = ["RequestMethods"] __all__ = ["RequestMethods"]

27
lib/urllib3/response.py

@ -1,10 +1,11 @@
from __future__ import absolute_import from __future__ import absolute_import
from contextlib import contextmanager
import zlib
import io import io
import logging import logging
from socket import timeout as SocketTimeout import zlib
from contextlib import contextmanager
from socket import error as SocketError from socket import error as SocketError
from socket import timeout as SocketTimeout
try: try:
import brotli import brotli
@ -12,20 +13,20 @@ except ImportError:
brotli = None brotli = None
from ._collections import HTTPHeaderDict from ._collections import HTTPHeaderDict
from .connection import BaseSSLError, HTTPException
from .exceptions import ( from .exceptions import (
BodyNotHttplibCompatible, BodyNotHttplibCompatible,
ProtocolError,
DecodeError, DecodeError,
ReadTimeoutError, HTTPError,
ResponseNotChunked,
IncompleteRead, IncompleteRead,
InvalidChunkLength, InvalidChunkLength,
InvalidHeader, InvalidHeader,
HTTPError, ProtocolError,
ReadTimeoutError,
ResponseNotChunked,
SSLError, SSLError,
) )
from .packages.six import string_types as basestring, PY3 from .packages import six
from .connection import HTTPException, BaseSSLError
from .util.response import is_fp_closed, is_response_to_head from .util.response import is_fp_closed, is_response_to_head
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
@ -233,7 +234,7 @@ class HTTPResponse(io.IOBase):
self.msg = msg self.msg = msg
self._request_url = request_url self._request_url = request_url
if body and isinstance(body, (basestring, bytes)): if body and isinstance(body, (six.string_types, bytes)):
self._body = body self._body = body
self._pool = pool self._pool = pool
@ -589,11 +590,11 @@ class HTTPResponse(io.IOBase):
headers = r.msg headers = r.msg
if not isinstance(headers, HTTPHeaderDict): if not isinstance(headers, HTTPHeaderDict):
if PY3: if six.PY2:
headers = HTTPHeaderDict(headers.items())
else:
# Python 2.7 # Python 2.7
headers = HTTPHeaderDict.from_httplib(headers) headers = HTTPHeaderDict.from_httplib(headers)
else:
headers = HTTPHeaderDict(headers.items())
# HTTPResponse objects in Python 3 don't have a .strict attribute # HTTPResponse objects in Python 3 don't have a .strict attribute
strict = getattr(r, "strict", 0) strict = getattr(r, "strict", 0)

18
lib/urllib3/util/__init__.py

@ -2,24 +2,23 @@ from __future__ import absolute_import
# For backwards compatibility, provide imports that used to be here. # For backwards compatibility, provide imports that used to be here.
from .connection import is_connection_dropped from .connection import is_connection_dropped
from .request import make_headers, SUPPRESS_USER_AGENT from .request import SKIP_HEADER, SKIPPABLE_HEADERS, make_headers
from .response import is_fp_closed from .response import is_fp_closed
from .retry import Retry
from .ssl_ import ( from .ssl_ import (
SSLContext, ALPN_PROTOCOLS,
HAS_SNI, HAS_SNI,
IS_PYOPENSSL, IS_PYOPENSSL,
IS_SECURETRANSPORT, IS_SECURETRANSPORT,
PROTOCOL_TLS,
SSLContext,
assert_fingerprint, assert_fingerprint,
resolve_cert_reqs, resolve_cert_reqs,
resolve_ssl_version, resolve_ssl_version,
ssl_wrap_socket, ssl_wrap_socket,
PROTOCOL_TLS,
ALPN_PROTOCOLS,
) )
from .timeout import current_time, Timeout from .timeout import Timeout, current_time
from .url import Url, get_host, parse_url, split_first
from .retry import Retry
from .url import get_host, parse_url, split_first, Url
from .wait import wait_for_read, wait_for_write from .wait import wait_for_read, wait_for_write
__all__ = ( __all__ = (
@ -45,5 +44,6 @@ __all__ = (
"ssl_wrap_socket", "ssl_wrap_socket",
"wait_for_read", "wait_for_read",
"wait_for_write", "wait_for_write",
"SUPPRESS_USER_AGENT", "SKIP_HEADER",
"SKIPPABLE_HEADERS",
) )

14
lib/urllib3/util/connection.py

@ -1,7 +1,12 @@
from __future__ import absolute_import from __future__ import absolute_import
import socket import socket
from .wait import NoWayToWaitForSocketError, wait_for_read
from urllib3.exceptions import LocationParseError
from ..contrib import _appengine_environ from ..contrib import _appengine_environ
from ..packages import six
from .wait import NoWayToWaitForSocketError, wait_for_read
def is_connection_dropped(conn): # Platform-specific def is_connection_dropped(conn): # Platform-specific
@ -58,6 +63,13 @@ def create_connection(
# The original create_connection function always returns all records. # The original create_connection function always returns all records.
family = allowed_gai_family() family = allowed_gai_family()
try:
host.encode("idna")
except UnicodeError:
return six.raise_from(
LocationParseError(u"'%s', label empty or too long" % host), None
)
for res in socket.getaddrinfo(host, port, family, socket.SOCK_STREAM): for res in socket.getaddrinfo(host, port, family, socket.SOCK_STREAM):
af, socktype, proto, canonname, sa = res af, socktype, proto, canonname, sa = res
sock = None sock = None

56
lib/urllib3/util/proxy.py

@ -0,0 +1,56 @@
from .ssl_ import create_urllib3_context, resolve_cert_reqs, resolve_ssl_version
def connection_requires_http_tunnel(
proxy_url=None, proxy_config=None, destination_scheme=None
):
"""
Returns True if the connection requires an HTTP CONNECT through the proxy.
:param URL proxy_url:
URL of the proxy.
:param ProxyConfig proxy_config:
Proxy configuration from poolmanager.py
:param str destination_scheme:
The scheme of the destination. (i.e https, http, etc)
"""
# If we're not using a proxy, no way to use a tunnel.
if proxy_url is None:
return False
# HTTP destinations never require tunneling, we always forward.
if destination_scheme == "http":
return False
# Support for forwarding with HTTPS proxies and HTTPS destinations.
if (
proxy_url.scheme == "https"
and proxy_config
and proxy_config.use_forwarding_for_https
):
return False
# Otherwise always use a tunnel.
return True
def create_proxy_ssl_context(
ssl_version, cert_reqs, ca_certs=None, ca_cert_dir=None, ca_cert_data=None
):
"""
Generates a default proxy ssl context if one hasn't been provided by the
user.
"""
ssl_context = create_urllib3_context(
ssl_version=resolve_ssl_version(ssl_version),
cert_reqs=resolve_cert_reqs(cert_reqs),
)
if (
not ca_certs
and not ca_cert_dir
and not ca_cert_data
and hasattr(ssl_context, "load_default_certs")
):
ssl_context.load_default_certs()
return ssl_context

1
lib/urllib3/util/queue.py

@ -1,4 +1,5 @@
import collections import collections
from ..packages import six from ..packages import six
from ..packages.six.moves import queue from ..packages.six.moves import queue

14
lib/urllib3/util/request.py

@ -1,13 +1,17 @@
from __future__ import absolute_import from __future__ import absolute_import
from base64 import b64encode from base64 import b64encode
from ..packages.six import b, integer_types
from ..exceptions import UnrewindableBodyError from ..exceptions import UnrewindableBodyError
from ..packages.six import b, integer_types
# Pass as a value within ``headers`` to skip
# emitting some HTTP headers that are added automatically.
# The only headers that are supported are ``Accept-Encoding``,
# ``Host``, and ``User-Agent``.
SKIP_HEADER = "@@@SKIP_HEADER@@@"
SKIPPABLE_HEADERS = frozenset(["accept-encoding", "host", "user-agent"])
# Use an invalid User-Agent to represent suppressing of default user agent.
# See https://tools.ietf.org/html/rfc7231#section-5.5.3 and
# https://tools.ietf.org/html/rfc7230#section-3.2.6
SUPPRESS_USER_AGENT = "@@@INVALID_USER_AGENT@@@"
ACCEPT_ENCODING = "gzip,deflate" ACCEPT_ENCODING = "gzip,deflate"
try: try:
import brotli as _unused_module_brotli # noqa: F401 import brotli as _unused_module_brotli # noqa: F401

5
lib/urllib3/util/response.py

@ -1,8 +1,9 @@
from __future__ import absolute_import from __future__ import absolute_import
from email.errors import StartBoundaryNotFoundDefect, MultipartInvariantViolationDefect
from ..packages.six.moves import http_client as httplib from email.errors import MultipartInvariantViolationDefect, StartBoundaryNotFoundDefect
from ..exceptions import HeaderParsingError from ..exceptions import HeaderParsingError
from ..packages.six.moves import http_client as httplib
def is_fp_closed(obj): def is_fp_closed(obj):

153
lib/urllib3/util/retry.py

@ -1,23 +1,24 @@
from __future__ import absolute_import from __future__ import absolute_import
import time
import email
import logging import logging
import re
import time
import warnings
from collections import namedtuple from collections import namedtuple
from itertools import takewhile from itertools import takewhile
import email
import re
from ..exceptions import ( from ..exceptions import (
ConnectTimeoutError, ConnectTimeoutError,
InvalidHeader,
MaxRetryError, MaxRetryError,
ProtocolError, ProtocolError,
ProxyError,
ReadTimeoutError, ReadTimeoutError,
ResponseError, ResponseError,
InvalidHeader,
ProxyError,
) )
from ..packages import six from ..packages import six
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
@ -27,6 +28,49 @@ RequestHistory = namedtuple(
) )
# TODO: In v2 we can remove this sentinel and metaclass with deprecated options.
_Default = object()
class _RetryMeta(type):
@property
def DEFAULT_METHOD_WHITELIST(cls):
warnings.warn(
"Using 'Retry.DEFAULT_METHOD_WHITELIST' is deprecated and "
"will be removed in v2.0. Use 'Retry.DEFAULT_METHODS_ALLOWED' instead",
DeprecationWarning,
)
return cls.DEFAULT_ALLOWED_METHODS
@DEFAULT_METHOD_WHITELIST.setter
def DEFAULT_METHOD_WHITELIST(cls, value):
warnings.warn(
"Using 'Retry.DEFAULT_METHOD_WHITELIST' is deprecated and "
"will be removed in v2.0. Use 'Retry.DEFAULT_ALLOWED_METHODS' instead",
DeprecationWarning,
)
cls.DEFAULT_ALLOWED_METHODS = value
@property
def DEFAULT_REDIRECT_HEADERS_BLACKLIST(cls):
warnings.warn(
"Using 'Retry.DEFAULT_REDIRECT_HEADERS_BLACKLIST' is deprecated and "
"will be removed in v2.0. Use 'Retry.DEFAULT_REMOVE_HEADERS_ON_REDIRECT' instead",
DeprecationWarning,
)
return cls.DEFAULT_REMOVE_HEADERS_ON_REDIRECT
@DEFAULT_REDIRECT_HEADERS_BLACKLIST.setter
def DEFAULT_REDIRECT_HEADERS_BLACKLIST(cls, value):
warnings.warn(
"Using 'Retry.DEFAULT_REDIRECT_HEADERS_BLACKLIST' is deprecated and "
"will be removed in v2.0. Use 'Retry.DEFAULT_REMOVE_HEADERS_ON_REDIRECT' instead",
DeprecationWarning,
)
cls.DEFAULT_REMOVE_HEADERS_ON_REDIRECT = value
@six.add_metaclass(_RetryMeta)
class Retry(object): class Retry(object):
"""Retry configuration. """Retry configuration.
@ -107,18 +151,23 @@ class Retry(object):
If ``total`` is not set, it's a good idea to set this to 0 to account If ``total`` is not set, it's a good idea to set this to 0 to account
for unexpected edge cases and avoid infinite retry loops. for unexpected edge cases and avoid infinite retry loops.
:param iterable method_whitelist: :param iterable allowed_methods:
Set of uppercased HTTP method verbs that we should retry on. Set of uppercased HTTP method verbs that we should retry on.
By default, we only retry on methods which are considered to be By default, we only retry on methods which are considered to be
idempotent (multiple requests with the same parameters end with the idempotent (multiple requests with the same parameters end with the
same state). See :attr:`Retry.DEFAULT_METHOD_WHITELIST`. same state). See :attr:`Retry.DEFAULT_ALLOWED_METHODS`.
Set to a ``False`` value to retry on any verb. Set to a ``False`` value to retry on any verb.
.. warning::
Previously this parameter was named ``method_whitelist``, that
usage is deprecated in v1.26.0 and will be removed in v2.0.
:param iterable status_forcelist: :param iterable status_forcelist:
A set of integer HTTP status codes that we should force a retry on. A set of integer HTTP status codes that we should force a retry on.
A retry is initiated if the request method is in ``method_whitelist`` A retry is initiated if the request method is in ``allowed_methods``
and the response status code is in ``status_forcelist``. and the response status code is in ``status_forcelist``.
By default, this is disabled with ``None``. By default, this is disabled with ``None``.
@ -159,13 +208,16 @@ class Retry(object):
request. request.
""" """
DEFAULT_METHOD_WHITELIST = frozenset( #: Default methods to be used for ``allowed_methods``
DEFAULT_ALLOWED_METHODS = frozenset(
["HEAD", "GET", "PUT", "DELETE", "OPTIONS", "TRACE"] ["HEAD", "GET", "PUT", "DELETE", "OPTIONS", "TRACE"]
) )
#: Default status codes to be used for ``status_forcelist``
RETRY_AFTER_STATUS_CODES = frozenset([413, 429, 503]) RETRY_AFTER_STATUS_CODES = frozenset([413, 429, 503])
DEFAULT_REDIRECT_HEADERS_BLACKLIST = frozenset(["Authorization"]) #: Default headers to be used for ``remove_headers_on_redirect``
DEFAULT_REMOVE_HEADERS_ON_REDIRECT = frozenset(["Authorization"])
#: Maximum backoff time. #: Maximum backoff time.
BACKOFF_MAX = 120 BACKOFF_MAX = 120
@ -178,16 +230,36 @@ class Retry(object):
redirect=None, redirect=None,
status=None, status=None,
other=None, other=None,
method_whitelist=DEFAULT_METHOD_WHITELIST, allowed_methods=_Default,
status_forcelist=None, status_forcelist=None,
backoff_factor=0, backoff_factor=0,
raise_on_redirect=True, raise_on_redirect=True,
raise_on_status=True, raise_on_status=True,
history=None, history=None,
respect_retry_after_header=True, respect_retry_after_header=True,
remove_headers_on_redirect=DEFAULT_REDIRECT_HEADERS_BLACKLIST, remove_headers_on_redirect=_Default,
# TODO: Deprecated, remove in v2.0
method_whitelist=_Default,
): ):
if method_whitelist is not _Default:
if allowed_methods is not _Default:
raise ValueError(
"Using both 'allowed_methods' and "
"'method_whitelist' together is not allowed. "
"Instead only use 'allowed_methods'"
)
warnings.warn(
"Using 'method_whitelist' with Retry is deprecated and "
"will be removed in v2.0. Use 'allowed_methods' instead",
DeprecationWarning,
)
allowed_methods = method_whitelist
if allowed_methods is _Default:
allowed_methods = self.DEFAULT_ALLOWED_METHODS
if remove_headers_on_redirect is _Default:
remove_headers_on_redirect = self.DEFAULT_REMOVE_HEADERS_ON_REDIRECT
self.total = total self.total = total
self.connect = connect self.connect = connect
self.read = read self.read = read
@ -200,7 +272,7 @@ class Retry(object):
self.redirect = redirect self.redirect = redirect
self.status_forcelist = status_forcelist or set() self.status_forcelist = status_forcelist or set()
self.method_whitelist = method_whitelist self.allowed_methods = allowed_methods
self.backoff_factor = backoff_factor self.backoff_factor = backoff_factor
self.raise_on_redirect = raise_on_redirect self.raise_on_redirect = raise_on_redirect
self.raise_on_status = raise_on_status self.raise_on_status = raise_on_status
@ -218,7 +290,6 @@ class Retry(object):
redirect=self.redirect, redirect=self.redirect,
status=self.status, status=self.status,
other=self.other, other=self.other,
method_whitelist=self.method_whitelist,
status_forcelist=self.status_forcelist, status_forcelist=self.status_forcelist,
backoff_factor=self.backoff_factor, backoff_factor=self.backoff_factor,
raise_on_redirect=self.raise_on_redirect, raise_on_redirect=self.raise_on_redirect,
@ -227,6 +298,23 @@ class Retry(object):
remove_headers_on_redirect=self.remove_headers_on_redirect, remove_headers_on_redirect=self.remove_headers_on_redirect,
respect_retry_after_header=self.respect_retry_after_header, respect_retry_after_header=self.respect_retry_after_header,
) )
# TODO: If already given in **kw we use what's given to us
# If not given we need to figure out what to pass. We decide
# based on whether our class has the 'method_whitelist' property
# and if so we pass the deprecated 'method_whitelist' otherwise
# we use 'allowed_methods'. Remove in v2.0
if "method_whitelist" not in kw and "allowed_methods" not in kw:
if "method_whitelist" in self.__dict__:
warnings.warn(
"Using 'method_whitelist' with Retry is deprecated and "
"will be removed in v2.0. Use 'allowed_methods' instead",
DeprecationWarning,
)
params["method_whitelist"] = self.allowed_methods
else:
params["allowed_methods"] = self.allowed_methods
params.update(kw) params.update(kw)
return type(self)(**params) return type(self)(**params)
@ -340,15 +428,26 @@ class Retry(object):
def _is_method_retryable(self, method): def _is_method_retryable(self, method):
"""Checks if a given HTTP method should be retried upon, depending if """Checks if a given HTTP method should be retried upon, depending if
it is included on the method whitelist. it is included in the allowed_methods
""" """
if self.method_whitelist and method.upper() not in self.method_whitelist: # TODO: For now favor if the Retry implementation sets its own method_whitelist
return False # property outside of our constructor to avoid breaking custom implementations.
if "method_whitelist" in self.__dict__:
warnings.warn(
"Using 'method_whitelist' with Retry is deprecated and "
"will be removed in v2.0. Use 'allowed_methods' instead",
DeprecationWarning,
)
allowed_methods = self.method_whitelist
else:
allowed_methods = self.allowed_methods
if allowed_methods and method.upper() not in allowed_methods:
return False
return True return True
def is_retry(self, method, status_code, has_retry_after=False): def is_retry(self, method, status_code, has_retry_after=False):
"""Is this method/status code retryable? (Based on whitelists and control """Is this method/status code retryable? (Based on allowlists and control
variables such as the number of total retries to allow, whether to variables such as the number of total retries to allow, whether to
respect the Retry-After header, whether this header is present, and respect the Retry-After header, whether this header is present, and
whether the returned status code is on the list of status codes to whether the returned status code is on the list of status codes to
@ -448,7 +547,7 @@ class Retry(object):
else: else:
# Incrementing because of a server error like a 500 in # Incrementing because of a server error like a 500 in
# status_forcelist and a the given method is in the whitelist # status_forcelist and the given method is in the allowed_methods
cause = ResponseError.GENERIC_ERROR cause = ResponseError.GENERIC_ERROR
if response and response.status: if response and response.status:
if status_count is not None: if status_count is not None:
@ -483,6 +582,20 @@ class Retry(object):
"read={self.read}, redirect={self.redirect}, status={self.status})" "read={self.read}, redirect={self.redirect}, status={self.status})"
).format(cls=type(self), self=self) ).format(cls=type(self), self=self)
def __getattr__(self, item):
if item == "method_whitelist":
# TODO: Remove this deprecated alias in v2.0
warnings.warn(
"Using 'method_whitelist' with Retry is deprecated and "
"will be removed in v2.0. Use 'allowed_methods' instead",
DeprecationWarning,
)
return self.allowed_methods
try:
return getattr(super(Retry, self), item)
except AttributeError:
return getattr(Retry, item)
# For backwards compatibility (equivalent to pre-v1.9): # For backwards compatibility (equivalent to pre-v1.9):
Retry.DEFAULT = Retry(3) Retry.DEFAULT = Retry(3)

66
lib/urllib3/util/ssl_.py

@ -1,18 +1,23 @@
from __future__ import absolute_import from __future__ import absolute_import
import warnings
import hmac import hmac
import os import os
import sys import sys
import warnings
from binascii import hexlify, unhexlify from binascii import hexlify, unhexlify
from hashlib import md5, sha1, sha256 from hashlib import md5, sha1, sha256
from .url import IPV4_RE, BRACELESS_IPV6_ADDRZ_RE from ..exceptions import (
from ..exceptions import SSLError, InsecurePlatformWarning, SNIMissingWarning InsecurePlatformWarning,
ProxySchemeUnsupported,
SNIMissingWarning,
SSLError,
)
from ..packages import six from ..packages import six
from .url import BRACELESS_IPV6_ADDRZ_RE, IPV4_RE
SSLContext = None SSLContext = None
SSLTransport = None
HAS_SNI = False HAS_SNI = False
IS_PYOPENSSL = False IS_PYOPENSSL = False
IS_SECURETRANSPORT = False IS_SECURETRANSPORT = False
@ -39,8 +44,10 @@ _const_compare_digest = getattr(hmac, "compare_digest", _const_compare_digest_ba
try: # Test for SSL features try: # Test for SSL features
import ssl import ssl
from ssl import wrap_socket, CERT_REQUIRED
from ssl import HAS_SNI # Has SNI? from ssl import HAS_SNI # Has SNI?
from ssl import CERT_REQUIRED, wrap_socket
from .ssltransport import SSLTransport
except ImportError: except ImportError:
pass pass
@ -58,12 +65,18 @@ except ImportError:
try: try:
from ssl import OP_NO_SSLv2, OP_NO_SSLv3, OP_NO_COMPRESSION from ssl import OP_NO_COMPRESSION, OP_NO_SSLv2, OP_NO_SSLv3
except ImportError: except ImportError:
OP_NO_SSLv2, OP_NO_SSLv3 = 0x1000000, 0x2000000 OP_NO_SSLv2, OP_NO_SSLv3 = 0x1000000, 0x2000000
OP_NO_COMPRESSION = 0x20000 OP_NO_COMPRESSION = 0x20000
try: # OP_NO_TICKET was added in Python 3.6
from ssl import OP_NO_TICKET
except ImportError:
OP_NO_TICKET = 0x4000
# A secure default. # A secure default.
# Sources for more information on TLS ciphers: # Sources for more information on TLS ciphers:
# #
@ -250,7 +263,7 @@ def create_urllib3_context(
``ssl.CERT_REQUIRED``. ``ssl.CERT_REQUIRED``.
:param options: :param options:
Specific OpenSSL options. These default to ``ssl.OP_NO_SSLv2``, Specific OpenSSL options. These default to ``ssl.OP_NO_SSLv2``,
``ssl.OP_NO_SSLv3``, ``ssl.OP_NO_COMPRESSION``. ``ssl.OP_NO_SSLv3``, ``ssl.OP_NO_COMPRESSION``, and ``ssl.OP_NO_TICKET``.
:param ciphers: :param ciphers:
Which cipher suites to allow the server to select. Which cipher suites to allow the server to select.
:returns: :returns:
@ -273,6 +286,11 @@ def create_urllib3_context(
# Disable compression to prevent CRIME attacks for OpenSSL 1.0+ # Disable compression to prevent CRIME attacks for OpenSSL 1.0+
# (issue #309) # (issue #309)
options |= OP_NO_COMPRESSION options |= OP_NO_COMPRESSION
# TLSv1.2 only. Unless set explicitly, do not request tickets.
# This may save some bandwidth on wire, and although the ticket is encrypted,
# there is a risk associated with it being on wire,
# if the server is not rotating its ticketing keys properly.
options |= OP_NO_TICKET
context.options |= options context.options |= options
@ -296,9 +314,11 @@ def create_urllib3_context(
context.check_hostname = False context.check_hostname = False
# Enable logging of TLS session keys via defacto standard environment variable # Enable logging of TLS session keys via defacto standard environment variable
# 'SSLKEYLOGFILE', if the feature is available (Python 3.8+). # 'SSLKEYLOGFILE', if the feature is available (Python 3.8+). Skip empty values.
if hasattr(context, "keylog_filename"): if hasattr(context, "keylog_filename"):
context.keylog_filename = os.environ.get("SSLKEYLOGFILE") sslkeylogfile = os.environ.get("SSLKEYLOGFILE")
if sslkeylogfile:
context.keylog_filename = sslkeylogfile
return context return context
@ -316,6 +336,7 @@ def ssl_wrap_socket(
ca_cert_dir=None, ca_cert_dir=None,
key_password=None, key_password=None,
ca_cert_data=None, ca_cert_data=None,
tls_in_tls=False,
): ):
""" """
All arguments except for server_hostname, ssl_context, and ca_cert_dir have All arguments except for server_hostname, ssl_context, and ca_cert_dir have
@ -337,6 +358,8 @@ def ssl_wrap_socket(
:param ca_cert_data: :param ca_cert_data:
Optional string containing CA certificates in PEM format suitable for Optional string containing CA certificates in PEM format suitable for
passing as the cadata parameter to SSLContext.load_verify_locations() passing as the cadata parameter to SSLContext.load_verify_locations()
:param tls_in_tls:
Use SSLTransport to wrap the existing socket.
""" """
context = ssl_context context = ssl_context
if context is None: if context is None:
@ -394,9 +417,11 @@ def ssl_wrap_socket(
) )
if send_sni: if send_sni:
ssl_sock = context.wrap_socket(sock, server_hostname=server_hostname) ssl_sock = _ssl_wrap_socket_impl(
sock, context, tls_in_tls, server_hostname=server_hostname
)
else: else:
ssl_sock = context.wrap_socket(sock) ssl_sock = _ssl_wrap_socket_impl(sock, context, tls_in_tls)
return ssl_sock return ssl_sock
@ -422,3 +447,20 @@ def _is_key_file_encrypted(key_file):
return True return True
return False return False
def _ssl_wrap_socket_impl(sock, ssl_context, tls_in_tls, server_hostname=None):
if tls_in_tls:
if not SSLTransport:
# Import error, ssl is not available.
raise ProxySchemeUnsupported(
"TLS in TLS requires support for the 'ssl' module"
)
SSLTransport._validate_ssl_context_for_tls_in_tls(ssl_context)
return SSLTransport(sock, ssl_context, server_hostname)
if server_hostname:
return ssl_context.wrap_socket(sock, server_hostname=server_hostname)
else:
return ssl_context.wrap_socket(sock)

29
lib/urllib3/contrib/ssl.py → lib/urllib3/util/ssltransport.py

@ -1,6 +1,9 @@
import ssl
import socket
import io import io
import socket
import ssl
from urllib3.exceptions import ProxySchemeUnsupported
from urllib3.packages import six
SSL_BLOCKSIZE = 16384 SSL_BLOCKSIZE = 16384
@ -16,6 +19,28 @@ class SSLTransport:
The class supports most of the socket API operations. The class supports most of the socket API operations.
""" """
@staticmethod
def _validate_ssl_context_for_tls_in_tls(ssl_context):
"""
Raises a ProxySchemeUnsupported if the provided ssl_context can't be used
for TLS in TLS.
The only requirement is that the ssl_context provides the 'wrap_bio'
methods.
"""
if not hasattr(ssl_context, "wrap_bio"):
if six.PY2:
raise ProxySchemeUnsupported(
"TLS in TLS requires SSLContext.wrap_bio() which isn't "
"supported on Python 2"
)
else:
raise ProxySchemeUnsupported(
"TLS in TLS requires SSLContext.wrap_bio() which isn't "
"available on non-native SSLContext"
)
def __init__( def __init__(
self, socket, ssl_context, suppress_ragged_eofs=True, server_hostname=None self, socket, ssl_context, suppress_ragged_eofs=True, server_hostname=None
): ):

3
lib/urllib3/util/timeout.py

@ -1,9 +1,10 @@
from __future__ import absolute_import from __future__ import absolute_import
import time
# The default socket timeout, used by httplib to indicate that no timeout was # The default socket timeout, used by httplib to indicate that no timeout was
# specified by the user # specified by the user
from socket import _GLOBAL_DEFAULT_TIMEOUT from socket import _GLOBAL_DEFAULT_TIMEOUT
import time
from ..exceptions import TimeoutStateError from ..exceptions import TimeoutStateError

2
lib/urllib3/util/url.py

@ -1,11 +1,11 @@
from __future__ import absolute_import from __future__ import absolute_import
import re import re
from collections import namedtuple from collections import namedtuple
from ..exceptions import LocationParseError from ..exceptions import LocationParseError
from ..packages import six from ..packages import six
url_attrs = ["scheme", "auth", "host", "port", "path", "query", "fragment"] url_attrs = ["scheme", "auth", "host", "port", "path", "query", "fragment"]
# We only want to normalize urls with an HTTP(S) scheme. # We only want to normalize urls with an HTTP(S) scheme.

2
lib/urllib3/util/wait.py

@ -1,7 +1,7 @@
import errno import errno
from functools import partial
import select import select
import sys import sys
from functools import partial
try: try:
from time import monotonic from time import monotonic

Loading…
Cancel
Save