Browse Source

Update Requests library 2.25.1 (bdc00eb) → 2.26.0 (b0e025a).

tags/release_0.25.1
JackDandy 4 years ago
parent
commit
23d3190bda
  1. 1
      CHANGES.md
  2. 39
      lib/requests/__init__.py
  3. 4
      lib/requests/__version__.py
  4. 2
      lib/requests/api.py
  5. 13
      lib/requests/compat.py
  6. 10
      lib/requests/exceptions.py
  7. 20
      lib/requests/help.py
  8. 33
      lib/requests/models.py
  9. 14
      lib/requests/packages.py
  10. 33
      lib/requests/utils.py

1
CHANGES.md

@ -82,6 +82,7 @@
* Add menu Shows/"TVmaze Cards"
* Add show name/networks card user input filter
* Change only auto refresh card view if a recoverable error occurs
* Update Requests library 2.25.1 (bdc00eb) to 2.26.0 (b0e025a)
[develop changelog]

39
lib/requests/__init__.py

@ -41,12 +41,20 @@ is at <https://requests.readthedocs.io>.
"""
import urllib3
import chardet
import warnings
from .exceptions import RequestsDependencyWarning
try:
from charset_normalizer import __version__ as charset_normalizer_version
except ImportError:
charset_normalizer_version = None
def check_compatibility(urllib3_version, chardet_version):
try:
from chardet import __version__ as chardet_version
except ImportError:
chardet_version = None
def check_compatibility(urllib3_version, chardet_version, charset_normalizer_version):
urllib3_version = urllib3_version.split('.')
assert urllib3_version != ['dev'] # Verify urllib3 isn't installed from git.
@ -62,12 +70,19 @@ def check_compatibility(urllib3_version, chardet_version):
assert minor >= 21
assert minor <= 26
# Check chardet for compatibility.
major, minor, patch = chardet_version.split('.')[:3]
major, minor, patch = int(major), int(minor), int(patch)
# chardet >= 3.0.2, < 5.0.0
assert (3, 0, 2) <= (major, minor, patch) < (5, 0, 0)
# Check charset_normalizer for compatibility.
if chardet_version:
major, minor, patch = chardet_version.split('.')[:3]
major, minor, patch = int(major), int(minor), int(patch)
# chardet_version >= 3.0.2, < 5.0.0
assert (3, 0, 2) <= (major, minor, patch) < (5, 0, 0)
elif charset_normalizer_version:
major, minor, patch = charset_normalizer_version.split('.')[:3]
major, minor, patch = int(major), int(minor), int(patch)
# charset_normalizer >= 2.0.0 < 3.0.0
assert (2, 0, 0) <= (major, minor, patch) < (3, 0, 0)
else:
raise Exception("You need either charset_normalizer or chardet installed")
def _check_cryptography(cryptography_version):
# cryptography < 1.3.4
@ -82,10 +97,10 @@ def _check_cryptography(cryptography_version):
# Check imported dependencies for compatibility.
try:
check_compatibility(urllib3.__version__, chardet.__version__)
check_compatibility(urllib3.__version__, chardet_version, charset_normalizer_version)
except (AssertionError, ValueError):
warnings.warn("urllib3 ({}) or chardet ({}) doesn't match a supported "
"version!".format(urllib3.__version__, chardet.__version__),
warnings.warn("urllib3 ({}) or chardet ({})/charset_normalizer ({}) doesn't match a supported "
"version!".format(urllib3.__version__, chardet_version, charset_normalizer_version),
RequestsDependencyWarning)
# Attempt to enable urllib3's fallback for SNI support
@ -124,7 +139,7 @@ from .status_codes import codes
from .exceptions import (
RequestException, Timeout, URLRequired,
TooManyRedirects, HTTPError, ConnectionError,
FileModeWarning, ConnectTimeout, ReadTimeout
FileModeWarning, ConnectTimeout, ReadTimeout, JSONDecodeError
)
# Set default logging handler to avoid "No handler found" warnings.

4
lib/requests/__version__.py

@ -5,8 +5,8 @@
__title__ = 'requests'
__description__ = 'Python HTTP for Humans.'
__url__ = 'https://requests.readthedocs.io'
__version__ = '2.25.1'
__build__ = 0x022501
__version__ = '2.26.0'
__build__ = 0x022600
__author__ = 'Kenneth Reitz'
__author_email__ = 'me@kennethreitz.org'
__license__ = 'Apache 2.0'

2
lib/requests/api.py

@ -72,7 +72,6 @@ def get(url, params=None, **kwargs):
:rtype: requests.Response
"""
kwargs.setdefault('allow_redirects', True)
return request('get', url, params=params, **kwargs)
@ -85,7 +84,6 @@ def options(url, **kwargs):
:rtype: requests.Response
"""
kwargs.setdefault('allow_redirects', True)
return request('options', url, **kwargs)

13
lib/requests/compat.py

@ -8,7 +8,10 @@ This module handles import compatibility issues between Python 2 and
Python 3.
"""
import chardet
try:
import chardet
except ImportError:
import charset_normalizer as chardet
import sys
@ -25,8 +28,10 @@ is_py2 = (_ver[0] == 2)
#: Python 3.x?
is_py3 = (_ver[0] == 3)
has_simplejson = False
try:
import simplejson as json
has_simplejson = True
except ImportError:
import json
@ -46,13 +51,13 @@ if is_py2:
# Keep OrderedDict for backwards compatibility.
from collections import Callable, Mapping, MutableMapping, OrderedDict
builtin_str = str
bytes = str
str = unicode
basestring = basestring
numeric_types = (int, long, float)
integer_types = (int, long)
JSONDecodeError = ValueError
elif is_py3:
from urllib.parse import urlparse, urlunparse, urljoin, urlsplit, urlencode, quote, unquote, quote_plus, unquote_plus, urldefrag
@ -63,6 +68,10 @@ elif is_py3:
# Keep OrderedDict for backwards compatibility.
from collections import OrderedDict
from collections.abc import Callable, Mapping, MutableMapping
if has_simplejson:
from simplejson import JSONDecodeError
else:
from json import JSONDecodeError
builtin_str = str
str = str

10
lib/requests/exceptions.py

@ -8,6 +8,8 @@ This module contains the set of Requests' exceptions.
"""
from urllib3.exceptions import HTTPError as BaseHTTPError
from .compat import JSONDecodeError as CompatJSONDecodeError
class RequestException(IOError):
"""There was an ambiguous exception that occurred while handling your
@ -25,6 +27,14 @@ class RequestException(IOError):
super(RequestException, self).__init__(*args, **kwargs)
class InvalidJSONError(RequestException):
"""A JSON error occurred."""
class JSONDecodeError(InvalidJSONError, CompatJSONDecodeError):
"""Couldn't decode the text into json"""
class HTTPError(RequestException):
"""An HTTP error occurred."""

20
lib/requests/help.py

@ -8,11 +8,20 @@ import ssl
import idna
import urllib3
import chardet
from . import __version__ as requests_version
try:
import charset_normalizer
except ImportError:
charset_normalizer = None
try:
import chardet
except ImportError:
chardet = None
try:
from urllib3.contrib import pyopenssl
except ImportError:
pyopenssl = None
@ -71,7 +80,12 @@ def info():
implementation_info = _implementation()
urllib3_info = {'version': urllib3.__version__}
chardet_info = {'version': chardet.__version__}
charset_normalizer_info = {'version': None}
chardet_info = {'version': None}
if charset_normalizer:
charset_normalizer_info = {'version': charset_normalizer.__version__}
if chardet:
chardet_info = {'version': chardet.__version__}
pyopenssl_info = {
'version': None,
@ -99,9 +113,11 @@ def info():
'implementation': implementation_info,
'system_ssl': system_ssl_info,
'using_pyopenssl': pyopenssl is not None,
'using_charset_normalizer': chardet is None,
'pyOpenSSL': pyopenssl_info,
'urllib3': urllib3_info,
'chardet': chardet_info,
'charset_normalizer': charset_normalizer_info,
'cryptography': cryptography_info,
'idna': idna_info,
'requests': {

33
lib/requests/models.py

@ -29,7 +29,9 @@ from .auth import HTTPBasicAuth
from .cookies import cookiejar_from_dict, get_cookie_header, _copy_cookie_jar
from .exceptions import (
HTTPError, MissingSchema, InvalidURL, ChunkedEncodingError,
ContentDecodingError, ConnectionError, StreamConsumedError)
ContentDecodingError, ConnectionError, StreamConsumedError,
InvalidJSONError)
from .exceptions import JSONDecodeError as RequestsJSONDecodeError
from ._internal_utils import to_native_string, unicode_is_ascii
from .utils import (
guess_filename, get_auth_from_url, requote_uri,
@ -38,7 +40,7 @@ from .utils import (
from .compat import (
Callable, Mapping,
cookielib, urlunparse, urlsplit, urlencode, str, bytes,
is_py2, chardet, builtin_str, basestring)
is_py2, chardet, builtin_str, basestring, JSONDecodeError)
from .compat import json as complexjson
from .status_codes import codes
@ -466,7 +468,12 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
# urllib3 requires a bytes-like body. Python 2's json.dumps
# provides this natively, but Python 3 gives a Unicode string.
content_type = 'application/json'
body = complexjson.dumps(json)
try:
body = complexjson.dumps(json, allow_nan=False)
except ValueError as ve:
raise InvalidJSONError(ve, request=self)
if not isinstance(body, bytes):
body = body.encode('utf-8')
@ -726,7 +733,7 @@ class Response(object):
@property
def apparent_encoding(self):
"""The apparent encoding, provided by the chardet library."""
"""The apparent encoding, provided by the charset_normalizer or chardet libraries."""
return chardet.detect(self.content)['encoding']
def iter_content(self, chunk_size=1, decode_unicode=False):
@ -840,7 +847,7 @@ class Response(object):
"""Content of the response, in unicode.
If Response.encoding is None, encoding will be guessed using
``chardet``.
``charset_normalizer`` or ``chardet``.
The encoding of the response content is determined based solely on HTTP
headers, following RFC 2616 to the letter. If you can take advantage of
@ -877,13 +884,14 @@ class Response(object):
r"""Returns the json-encoded content of a response, if any.
:param \*\*kwargs: Optional arguments that ``json.loads`` takes.
:raises ValueError: If the response body does not contain valid json.
:raises requests.exceptions.JSONDecodeError: If the response body does not
contain valid json.
"""
if not self.encoding and self.content and len(self.content) > 3:
# No encoding set. JSON RFC 4627 section 3 states we should expect
# UTF-8, -16 or -32. Detect which one to use; If the detection or
# decoding fails, fall back to `self.text` (using chardet to make
# decoding fails, fall back to `self.text` (using charset_normalizer to make
# a best guess).
encoding = guess_json_utf(self.content)
if encoding is not None:
@ -897,7 +905,16 @@ class Response(object):
# and the server didn't bother to tell us what codec *was*
# used.
pass
return complexjson.loads(self.text, **kwargs)
try:
return complexjson.loads(self.text, **kwargs)
except JSONDecodeError as e:
# Catch JSON-related errors and raise as requests.JSONDecodeError
# This aliases json.JSONDecodeError and simplejson.JSONDecodeError
if is_py2: # e is a ValueError
raise RequestsJSONDecodeError(e.message)
else:
raise RequestsJSONDecodeError(e.msg, e.doc, e.pos)
@property
def links(self):

14
lib/requests/packages.py

@ -1,9 +1,17 @@
import sys
try:
import chardet
except ImportError:
import charset_normalizer as chardet
import warnings
warnings.filterwarnings('ignore', 'Trying to detect', module='charset_normalizer')
# This code exists for backwards compatibility reasons.
# I don't like it either. Just look the other way. :)
for package in ('urllib3', 'idna', 'chardet'):
for package in ('urllib3', 'idna'):
locals()[package] = __import__(package)
# This traversal is apparently necessary such that the identities are
# preserved (requests.packages.urllib3.* is urllib3.*)
@ -11,4 +19,8 @@ for package in ('urllib3', 'idna', 'chardet'):
if mod == package or mod.startswith(package + '.'):
sys.modules['requests.packages.' + mod] = sys.modules[mod]
target = chardet.__name__
for mod in list(sys.modules):
if mod == target or mod.startswith(target + '.'):
sys.modules['requests.packages.' + target.replace(target, 'chardet')] = sys.modules[mod]
# Kinda cool, though, right?

33
lib/requests/utils.py

@ -20,6 +20,7 @@ import tempfile
import warnings
import zipfile
from collections import OrderedDict
from urllib3.util import make_headers
from .__version__ import __version__
from . import certs
@ -41,6 +42,11 @@ DEFAULT_CA_BUNDLE_PATH = certs.where()
DEFAULT_PORTS = {'http': 80, 'https': 443}
# Ensure that ', ' is used to preserve previous delimiter behavior.
DEFAULT_ACCEPT_ENCODING = ", ".join(
re.split(r",\s*", make_headers(accept_encoding=True)["accept-encoding"])
)
if sys.platform == 'win32':
# provide a proxy_bypass version on Windows without DNS lookups
@ -245,6 +251,10 @@ def extract_zipped_paths(path):
archive, member = os.path.split(path)
while archive and not os.path.exists(archive):
archive, prefix = os.path.split(archive)
if not prefix:
# If we don't check for an empty prefix after the split (in other words, archive remains unchanged after the split),
# we _can_ end up in an infinite loop on a rare corner case affecting a small number of users
break
member = '/'.join([prefix, member])
if not zipfile.is_zipfile(archive):
@ -256,13 +266,28 @@ def extract_zipped_paths(path):
# we have a valid zip archive and a valid member of that archive
tmp = tempfile.gettempdir()
extracted_path = os.path.join(tmp, *member.split('/'))
extracted_path = os.path.join(tmp, member.split('/')[-1])
if not os.path.exists(extracted_path):
extracted_path = zip_file.extract(member, path=tmp)
# use read + write to avoid the creating nested folders, we only want the file, avoids mkdir racing condition
with atomic_open(extracted_path) as file_handler:
file_handler.write(zip_file.read(member))
return extracted_path
@contextlib.contextmanager
def atomic_open(filename):
"""Write a file to the disk in an atomic fashion"""
replacer = os.rename if sys.version_info[0] == 2 else os.replace
tmp_descriptor, tmp_name = tempfile.mkstemp(dir=os.path.dirname(filename))
try:
with os.fdopen(tmp_descriptor, 'wb') as tmp_handler:
yield tmp_handler
replacer(tmp_name, filename)
except BaseException:
os.remove(tmp_name)
raise
def from_key_val_list(value):
"""Take an object and test to see if it can be represented as a
dictionary. Unless it can not be represented as such, return an
@ -820,7 +845,7 @@ def default_headers():
"""
return CaseInsensitiveDict({
'User-Agent': default_user_agent(),
'Accept-Encoding': ', '.join(('gzip', 'deflate')),
'Accept-Encoding': DEFAULT_ACCEPT_ENCODING,
'Accept': '*/*',
'Connection': 'keep-alive',
})

Loading…
Cancel
Save