Browse Source

Update Requests library 2.19.1 (2c6a842) → 2.20.1 (57d7284).

pull/1200/head
JackDandy 7 years ago
parent
commit
5596e90bc9
  1. 1
      CHANGES.md
  2. 15
      lib/requests/__init__.py
  3. 4
      lib/requests/__version__.py
  4. 16
      lib/requests/adapters.py
  5. 4
      lib/requests/auth.py
  6. 3
      lib/requests/compat.py
  7. 29
      lib/requests/cookies.py
  8. 3
      lib/requests/help.py
  9. 4
      lib/requests/hooks.py
  10. 7
      lib/requests/models.py
  11. 15
      lib/requests/sessions.py
  12. 15
      lib/requests/utils.py

1
CHANGES.md

@ -3,6 +3,7 @@
* Update CacheControl library 0.12.5 (cd91309) to 0.12.5 (0fedbba)
* Update Certifi 2018.08.24 (8be9f89) to 2018.10.15 (a462d21)
* Update dateutil 2.7.2 (49690ee) to 2.7.5 (e954819)
* Update Requests library 2.19.1 (2c6a842) to 2.20.1 (57d7284)
[develop changelog]

15
lib/requests/__init__.py

@ -57,10 +57,10 @@ def check_compatibility(urllib3_version, chardet_version):
# Check urllib3 for compatibility.
major, minor, patch = urllib3_version # noqa: F811
major, minor, patch = int(major), int(minor), int(patch)
# urllib3 >= 1.21.1, <= 1.23
# urllib3 >= 1.21.1, <= 1.24
assert major == 1
assert minor >= 21
assert minor <= 23
assert minor <= 24
# Check chardet for compatibility.
major, minor, patch = chardet_version.split('.')[:3]
@ -79,14 +79,14 @@ def _check_cryptography(cryptography_version):
return
if cryptography_version < [1, 3, 4]:
warning = 'Old version of cryptography ({0}) may cause slowdown.'.format(cryptography_version)
warning = 'Old version of cryptography ({}) may cause slowdown.'.format(cryptography_version)
warnings.warn(warning, RequestsDependencyWarning)
# Check imported dependencies for compatibility.
try:
check_compatibility(urllib3.__version__, chardet.__version__)
except (AssertionError, ValueError):
warnings.warn("urllib3 ({0}) or chardet ({1}) doesn't match a supported "
warnings.warn("urllib3 ({}) or chardet ({}) doesn't match a supported "
"version!".format(urllib3.__version__, chardet.__version__),
RequestsDependencyWarning)
@ -123,12 +123,7 @@ from .exceptions import (
# Set default logging handler to avoid "No handler found" warnings.
import logging
try: # Python 2.7+
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
from logging import NullHandler
logging.getLogger(__name__).addHandler(NullHandler())

4
lib/requests/__version__.py

@ -5,8 +5,8 @@
__title__ = 'requests'
__description__ = 'Python HTTP for Humans.'
__url__ = 'http://python-requests.org'
__version__ = '2.19.1'
__build__ = 0x021901
__version__ = '2.20.1'
__build__ = 0x022001
__author__ = 'Kenneth Reitz'
__author_email__ = 'me@kennethreitz.org'
__license__ = 'Apache 2.0'

16
lib/requests/adapters.py

@ -129,8 +129,7 @@ class HTTPAdapter(BaseAdapter):
self.init_poolmanager(pool_connections, pool_maxsize, block=pool_block)
def __getstate__(self):
return dict((attr, getattr(self, attr, None)) for attr in
self.__attrs__)
return {attr: getattr(self, attr, None) for attr in self.__attrs__}
def __setstate__(self, state):
# Can't handle by adding 'proxy_manager' to self.__attrs__ because
@ -226,7 +225,7 @@ class HTTPAdapter(BaseAdapter):
if not cert_loc or not os.path.exists(cert_loc):
raise IOError("Could not find a suitable TLS CA certificate bundle, "
"invalid path: {0}".format(cert_loc))
"invalid path: {}".format(cert_loc))
conn.cert_reqs = 'CERT_REQUIRED'
@ -248,10 +247,10 @@ class HTTPAdapter(BaseAdapter):
conn.key_file = None
if conn.cert_file and not os.path.exists(conn.cert_file):
raise IOError("Could not find the TLS certificate file, "
"invalid path: {0}".format(conn.cert_file))
"invalid path: {}".format(conn.cert_file))
if conn.key_file and not os.path.exists(conn.key_file):
raise IOError("Could not find the TLS key file, "
"invalid path: {0}".format(conn.key_file))
"invalid path: {}".format(conn.key_file))
def build_response(self, req, resp):
"""Builds a :class:`Response <requests.Response>` object from a urllib3
@ -426,7 +425,7 @@ class HTTPAdapter(BaseAdapter):
timeout = TimeoutSauce(connect=connect, read=read)
except ValueError as e:
# this may raise a string formatting error.
err = ("Invalid timeout {0}. Pass a (connect, read) "
err = ("Invalid timeout {}. Pass a (connect, read) "
"timeout tuple, or a single float to set "
"both timeouts to the same value".format(timeout))
raise ValueError(err)
@ -476,11 +475,10 @@ class HTTPAdapter(BaseAdapter):
# Receive the response from the server
try:
# For Python 2.7+ versions, use buffering of HTTP
# responses
# For Python 2.7, use buffering of HTTP responses
r = low_conn.getresponse(buffering=True)
except TypeError:
# For compatibility with Python 2.6 versions and back
# For compatibility with Python 3.3+
r = low_conn.getresponse()
resp = HTTPResponse.from_httplib(

4
lib/requests/auth.py

@ -38,7 +38,7 @@ def _basic_auth_str(username, password):
if not isinstance(username, basestring):
warnings.warn(
"Non-string usernames will no longer be supported in Requests "
"3.0.0. Please convert the object you've passed in ({0!r}) to "
"3.0.0. Please convert the object you've passed in ({!r}) to "
"a string or bytes object in the near future to avoid "
"problems.".format(username),
category=DeprecationWarning,
@ -48,7 +48,7 @@ def _basic_auth_str(username, password):
if not isinstance(password, basestring):
warnings.warn(
"Non-string passwords will no longer be supported in Requests "
"3.0.0. Please convert the object you've passed in ({0!r}) to "
"3.0.0. Please convert the object you've passed in ({!r}) to "
"a string or bytes object in the near future to avoid "
"problems.".format(password),
category=DeprecationWarning,

3
lib/requests/compat.py

@ -43,9 +43,8 @@ if is_py2:
import cookielib
from Cookie import Morsel
from StringIO import StringIO
from collections import Callable, Mapping, MutableMapping
from collections import Callable, Mapping, MutableMapping, OrderedDict
from urllib3.packages.ordered_dict import OrderedDict
builtin_str = str
bytes = str

29
lib/requests/cookies.py

@ -444,20 +444,21 @@ def create_cookie(name, value, **kwargs):
By default, the pair of `name` and `value` will be set for the domain ''
and sent on every request (this is sometimes called a "supercookie").
"""
result = dict(
version=0,
name=name,
value=value,
port=None,
domain='',
path='/',
secure=False,
expires=None,
discard=True,
comment=None,
comment_url=None,
rest={'HttpOnly': None},
rfc2109=False,)
result = {
'version': 0,
'name': name,
'value': value,
'port': None,
'domain': '',
'path': '/',
'secure': False,
'expires': None,
'discard': True,
'comment': None,
'comment_url': None,
'rest': {'HttpOnly': None},
'rfc2109': False,
}
badargs = set(kwargs) - set(result)
if badargs:

3
lib/requests/help.py

@ -89,8 +89,7 @@ def info():
'version': getattr(idna, '__version__', ''),
}
# OPENSSL_VERSION_NUMBER doesn't exist in the Python 2.6 ssl module.
system_ssl = getattr(ssl, 'OPENSSL_VERSION_NUMBER', None)
system_ssl = ssl.OPENSSL_VERSION_NUMBER
system_ssl_info = {
'version': '%x' % system_ssl if system_ssl is not None else ''
}

4
lib/requests/hooks.py

@ -15,14 +15,14 @@ HOOKS = ['response']
def default_hooks():
return dict((event, []) for event in HOOKS)
return {event: [] for event in HOOKS}
# TODO: response is the only one
def dispatch_hook(key, hooks, hook_data, **kwargs):
"""Dispatches a hook dictionary on a given piece of data."""
hooks = hooks or dict()
hooks = hooks or {}
hooks = hooks.get(key)
if hooks:
if hasattr(hooks, '__call__'):

7
lib/requests/models.py

@ -652,10 +652,7 @@ class Response(object):
if not self._content_consumed:
self.content
return dict(
(attr, getattr(self, attr, None))
for attr in self.__attrs__
)
return {attr: getattr(self, attr, None) for attr in self.__attrs__}
def __setstate__(self, state):
for name, value in state.items():
@ -784,7 +781,7 @@ class Response(object):
return chunks
def iter_lines(self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=None, delimiter=None):
def iter_lines(self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=False, delimiter=None):
"""Iterates over the response data, one line at a time. When
stream=True is set on the request, this avoids reading the
content at once into memory for large responses.

15
lib/requests/sessions.py

@ -19,7 +19,7 @@ from .cookies import (
from .models import Request, PreparedRequest, DEFAULT_REDIRECT_LIMIT
from .hooks import default_hooks, dispatch_hook
from ._internal_utils import to_native_string
from .utils import to_key_val_list, default_headers
from .utils import to_key_val_list, default_headers, DEFAULT_PORTS
from .exceptions import (
TooManyRedirects, InvalidSchema, ChunkedEncodingError, ContentDecodingError)
@ -128,8 +128,17 @@ class SessionRedirectMixin(object):
if (old_parsed.scheme == 'http' and old_parsed.port in (80, None)
and new_parsed.scheme == 'https' and new_parsed.port in (443, None)):
return False
# Handle default port usage corresponding to scheme.
changed_port = old_parsed.port != new_parsed.port
changed_scheme = old_parsed.scheme != new_parsed.scheme
default_port = (DEFAULT_PORTS.get(old_parsed.scheme, None), None)
if (not changed_scheme and old_parsed.port in default_port
and new_parsed.port in default_port):
return False
# Standard case: root URI must match
return old_parsed.port != new_parsed.port or old_parsed.scheme != new_parsed.scheme
return changed_port or changed_scheme
def resolve_redirects(self, resp, req, stream=False, timeout=None,
verify=True, cert=None, proxies=None, yield_requests=False, **adapter_kwargs):
@ -738,7 +747,7 @@ class Session(SessionRedirectMixin):
self.adapters[key] = self.adapters.pop(key)
def __getstate__(self):
state = dict((attr, getattr(self, attr, None)) for attr in self.__attrs__)
state = {attr: getattr(self, attr, None) for attr in self.__attrs__}
return state
def __setstate__(self, state):

15
lib/requests/utils.py

@ -38,6 +38,8 @@ NETRC_FILES = ('.netrc', '_netrc')
DEFAULT_CA_BUNDLE_PATH = certs.where()
DEFAULT_PORTS = {'http': 80, 'https': 443}
if sys.platform == 'win32':
# provide a proxy_bypass version on Windows without DNS lookups
@ -173,7 +175,7 @@ def get_netrc_auth(url, raise_errors=False):
for f in NETRC_FILES:
try:
loc = os.path.expanduser('~/{0}'.format(f))
loc = os.path.expanduser('~/{}'.format(f))
except KeyError:
# os.path.expanduser can fail when $HOME is undefined and
# getpwuid fails. See https://bugs.python.org/issue20164 &
@ -264,7 +266,7 @@ def from_key_val_list(value):
>>> from_key_val_list([('key', 'val')])
OrderedDict([('key', 'val')])
>>> from_key_val_list('string')
ValueError: need more than 1 value to unpack
ValueError: cannot encode objects that are not 2-tuples
>>> from_key_val_list({'key': 'val'})
OrderedDict([('key', 'val')])
@ -729,7 +731,7 @@ def should_bypass_proxies(url, no_proxy):
else:
host_with_port = parsed.hostname
if parsed.port:
host_with_port += ':{0}'.format(parsed.port)
host_with_port += ':{}'.format(parsed.port)
for host in no_proxy:
if parsed.hostname.endswith(host) or host_with_port.endswith(host):
@ -737,13 +739,8 @@ def should_bypass_proxies(url, no_proxy):
# to apply the proxies on this URL.
return True
# If the system proxy settings indicate that this URL should be bypassed,
# don't proxy.
# The proxy_bypass function is incredibly buggy on OS X in early versions
# of Python 2.6, so allow this call to fail. Only catch the specific
# exceptions we've seen, though: this call failing in other ways can reveal
# legitimate problems.
with set_environ('no_proxy', no_proxy_arg):
# parsed.hostname can be `None` in cases such as a file URI.
try:
bypass = proxy_bypass(parsed.hostname)
except (TypeError, socket.gaierror):

Loading…
Cancel
Save