Browse Source

Update cachecontrol library 0.12.3 (db54c40) → 0.12.4 (bc43a32)

pull/1080/head
JackDandy 7 years ago
parent
commit
1fb4330d1e
  1. 1
      CHANGES.md
  2. 2
      lib/cachecontrol/__init__.py
  3. 6
      lib/cachecontrol/adapter.py
  4. 6
      lib/cachecontrol/cache.py
  5. 6
      lib/cachecontrol/caches/redis_cache.py
  6. 18
      lib/msgpack/__init__.py
  7. 2
      lib/msgpack/_version.py
  8. 88
      lib/msgpack/fallback.py

1
CHANGES.md

@ -1,6 +1,7 @@
### 0.16.0 (2018-xx-xx xx:xx:xx UTC) ### 0.16.0 (2018-xx-xx xx:xx:xx UTC)
* Update backports/ssl_match_hostname 3.5.0.1 (r18) to 3.7.0.1 (r28) * Update backports/ssl_match_hostname 3.5.0.1 (r18) to 3.7.0.1 (r28)
* Update cachecontrol library 0.12.3 (db54c40) to 0.12.4 (bd94f7e)
[develop changelog] [develop changelog]

2
lib/cachecontrol/__init__.py

@ -4,7 +4,7 @@ Make it easy to import from cachecontrol without long namespaces.
""" """
__author__ = 'Eric Larson' __author__ = 'Eric Larson'
__email__ = 'eric@ionrock.org' __email__ = 'eric@ionrock.org'
__version__ = '0.12.3' __version__ = '0.12.4'
from .wrapper import CacheControl from .wrapper import CacheControl
from .adapter import CacheControlAdapter from .adapter import CacheControlAdapter

6
lib/cachecontrol/adapter.py

@ -1,5 +1,6 @@
import types import types
import functools import functools
import zlib
from requests.adapters import HTTPAdapter from requests.adapters import HTTPAdapter
@ -37,7 +38,10 @@ class CacheControlAdapter(HTTPAdapter):
""" """
cacheable = cacheable_methods or self.cacheable_methods cacheable = cacheable_methods or self.cacheable_methods
if request.method in cacheable: if request.method in cacheable:
cached_response = self.controller.cached_request(request) try:
cached_response = self.controller.cached_request(request)
except zlib.error:
cached_response = None
if cached_response: if cached_response:
return self.build_response(request, cached_response, return self.build_response(request, cached_response,
from_cache=True) from_cache=True)

6
lib/cachecontrol/cache.py

@ -8,13 +8,13 @@ from threading import Lock
class BaseCache(object): class BaseCache(object):
def get(self, key): def get(self, key):
raise NotImplemented() raise NotImplementedError()
def set(self, key, value): def set(self, key, value):
raise NotImplemented() raise NotImplementedError()
def delete(self, key): def delete(self, key):
raise NotImplemented() raise NotImplementedError()
def close(self): def close(self):
pass pass

6
lib/cachecontrol/caches/redis_cache.py

@ -1,6 +1,7 @@
from __future__ import division from __future__ import division
from datetime import datetime from datetime import datetime
from cachecontrol.cache import BaseCache
def total_seconds(td): def total_seconds(td):
@ -13,7 +14,7 @@ def total_seconds(td):
return int((ms + secs * 10**6) / 10**6) return int((ms + secs * 10**6) / 10**6)
class RedisCache(object): class RedisCache(BaseCache):
def __init__(self, conn): def __init__(self, conn):
self.conn = conn self.conn = conn
@ -38,4 +39,5 @@ class RedisCache(object):
self.conn.delete(key) self.conn.delete(key)
def close(self): def close(self):
self.conn.disconnect() """Redis uses connection pooling, no need to close the connection."""
pass

18
lib/msgpack/__init__.py

@ -19,13 +19,13 @@ class ExtType(namedtuple('ExtType', 'code data')):
import os import os
if os.environ.get('MSGPACK_PUREPYTHON'): if os.environ.get('MSGPACK_PUREPYTHON'):
from msgpack.fallback import Packer, unpack, unpackb, Unpacker from msgpack.fallback import Packer, unpackb, Unpacker
else: else:
try: try:
from msgpack._packer import Packer from msgpack._packer import Packer
from msgpack._unpacker import unpack, unpackb, Unpacker from msgpack._unpacker import unpackb, Unpacker
except ImportError: except ImportError:
from msgpack.fallback import Packer, unpack, unpackb, Unpacker from msgpack.fallback import Packer, unpackb, Unpacker
def pack(o, stream, **kwargs): def pack(o, stream, **kwargs):
@ -46,6 +46,18 @@ def packb(o, **kwargs):
""" """
return Packer(**kwargs).pack(o) return Packer(**kwargs).pack(o)
def unpack(stream, **kwargs):
"""
Unpack an object from `stream`.
Raises `ExtraData` when `stream` contains extra bytes.
See :class:`Unpacker` for options.
"""
data = stream.read()
return unpackb(data, **kwargs)
# alias for compatibility to simplejson/marshal/pickle. # alias for compatibility to simplejson/marshal/pickle.
load = unpack load = unpack
loads = unpackb loads = unpackb

2
lib/msgpack/_version.py

@ -1 +1 @@
version = (0, 4, 8) version = (0, 5, 6)

88
lib/msgpack/fallback.py

@ -101,12 +101,9 @@ def _get_data_from_buffer(obj):
def unpack(stream, **kwargs): def unpack(stream, **kwargs):
""" warnings.warn(
Unpack an object from `stream`. "Direct calling implementation's unpack() is deprecated, Use msgpack.unpack() or unpackb() instead.",
PendingDeprecationWarning)
Raises `ExtraData` when `packed` contains extra bytes.
See :class:`Unpacker` for options.
"""
data = stream.read() data = stream.read()
return unpackb(data, **kwargs) return unpackb(data, **kwargs)
@ -145,6 +142,16 @@ class Unpacker(object):
If true, unpack msgpack array to Python list. If true, unpack msgpack array to Python list.
Otherwise, unpack to Python tuple. (default: True) Otherwise, unpack to Python tuple. (default: True)
:param bool raw:
If true, unpack msgpack raw to Python bytes (default).
Otherwise, unpack to Python str (or unicode on Python 2) by decoding
with UTF-8 encoding (recommended).
Currently, the default is true, but it will be changed to false in
near future. So you must specify it explicitly for keeping backward
compatibility.
*encoding* option which is deprecated overrides this option.
:param callable object_hook: :param callable object_hook:
When specified, it should be callable. When specified, it should be callable.
Unpacker calls it with a dict argument after unpacking msgpack map. Unpacker calls it with a dict argument after unpacking msgpack map.
@ -160,7 +167,7 @@ class Unpacker(object):
If it is None (default), msgpack raw is deserialized to Python bytes. If it is None (default), msgpack raw is deserialized to Python bytes.
:param str unicode_errors: :param str unicode_errors:
Used for decoding msgpack raw with *encoding*. (deprecated) Used for decoding msgpack raw with *encoding*.
(default: `'strict'`) (default: `'strict'`)
:param int max_buffer_size: :param int max_buffer_size:
@ -183,13 +190,13 @@ class Unpacker(object):
example of streaming deserialize from file-like object:: example of streaming deserialize from file-like object::
unpacker = Unpacker(file_like) unpacker = Unpacker(file_like, raw=False)
for o in unpacker: for o in unpacker:
process(o) process(o)
example of streaming deserialize from socket:: example of streaming deserialize from socket::
unpacker = Unpacker() unpacker = Unpacker(raw=False)
while True: while True:
buf = sock.recv(1024**2) buf = sock.recv(1024**2)
if not buf: if not buf:
@ -199,15 +206,24 @@ class Unpacker(object):
process(o) process(o)
""" """
def __init__(self, file_like=None, read_size=0, use_list=True, def __init__(self, file_like=None, read_size=0, use_list=True, raw=True,
object_hook=None, object_pairs_hook=None, list_hook=None, object_hook=None, object_pairs_hook=None, list_hook=None,
encoding=None, unicode_errors='strict', max_buffer_size=0, encoding=None, unicode_errors=None, max_buffer_size=0,
ext_hook=ExtType, ext_hook=ExtType,
max_str_len=2147483647, # 2**32-1 max_str_len=2147483647, # 2**32-1
max_bin_len=2147483647, max_bin_len=2147483647,
max_array_len=2147483647, max_array_len=2147483647,
max_map_len=2147483647, max_map_len=2147483647,
max_ext_len=2147483647): max_ext_len=2147483647):
if encoding is not None:
warnings.warn(
"encoding is deprecated, Use raw=False instead.",
PendingDeprecationWarning)
if unicode_errors is None:
unicode_errors = 'strict'
if file_like is None: if file_like is None:
self._feeding = True self._feeding = True
else: else:
@ -234,6 +250,7 @@ class Unpacker(object):
if read_size > self._max_buffer_size: if read_size > self._max_buffer_size:
raise ValueError("read_size must be smaller than max_buffer_size") raise ValueError("read_size must be smaller than max_buffer_size")
self._read_size = read_size or min(self._max_buffer_size, 16*1024) self._read_size = read_size or min(self._max_buffer_size, 16*1024)
self._raw = bool(raw)
self._encoding = encoding self._encoding = encoding
self._unicode_errors = unicode_errors self._unicode_errors = unicode_errors
self._use_list = use_list self._use_list = use_list
@ -265,6 +282,13 @@ class Unpacker(object):
view = _get_data_from_buffer(next_bytes) view = _get_data_from_buffer(next_bytes)
if (len(self._buffer) - self._buff_i + len(view) > self._max_buffer_size): if (len(self._buffer) - self._buff_i + len(view) > self._max_buffer_size):
raise BufferFull raise BufferFull
# Strip buffer before checkpoint before reading file.
if self._buf_checkpoint > 0:
del self._buffer[:self._buf_checkpoint]
self._buff_i -= self._buf_checkpoint
self._buf_checkpoint = 0
self._buffer += view self._buffer += view
def _consume(self): def _consume(self):
@ -582,8 +606,10 @@ class Unpacker(object):
if typ == TYPE_RAW: if typ == TYPE_RAW:
if self._encoding is not None: if self._encoding is not None:
obj = obj.decode(self._encoding, self._unicode_errors) obj = obj.decode(self._encoding, self._unicode_errors)
else: elif self._raw:
obj = bytes(obj) obj = bytes(obj)
else:
obj = obj.decode('utf_8')
return obj return obj
if typ == TYPE_EXT: if typ == TYPE_EXT:
return self._ext_hook(n, bytes(obj)) return self._ext_hook(n, bytes(obj))
@ -609,12 +635,14 @@ class Unpacker(object):
def skip(self, write_bytes=None): def skip(self, write_bytes=None):
self._unpack(EX_SKIP) self._unpack(EX_SKIP)
if write_bytes is not None: if write_bytes is not None:
warnings.warn("`write_bytes` option is deprecated. Use `.tell()` instead.", DeprecationWarning)
write_bytes(self._buffer[self._buf_checkpoint:self._buff_i]) write_bytes(self._buffer[self._buf_checkpoint:self._buff_i])
self._consume() self._consume()
def unpack(self, write_bytes=None): def unpack(self, write_bytes=None):
ret = self._unpack(EX_CONSTRUCT) ret = self._unpack(EX_CONSTRUCT)
if write_bytes is not None: if write_bytes is not None:
warnings.warn("`write_bytes` option is deprecated. Use `.tell()` instead.", DeprecationWarning)
write_bytes(self._buffer[self._buf_checkpoint:self._buff_i]) write_bytes(self._buffer[self._buf_checkpoint:self._buff_i])
self._consume() self._consume()
return ret return ret
@ -622,6 +650,7 @@ class Unpacker(object):
def read_array_header(self, write_bytes=None): def read_array_header(self, write_bytes=None):
ret = self._unpack(EX_READ_ARRAY_HEADER) ret = self._unpack(EX_READ_ARRAY_HEADER)
if write_bytes is not None: if write_bytes is not None:
warnings.warn("`write_bytes` option is deprecated. Use `.tell()` instead.", DeprecationWarning)
write_bytes(self._buffer[self._buf_checkpoint:self._buff_i]) write_bytes(self._buffer[self._buf_checkpoint:self._buff_i])
self._consume() self._consume()
return ret return ret
@ -629,6 +658,7 @@ class Unpacker(object):
def read_map_header(self, write_bytes=None): def read_map_header(self, write_bytes=None):
ret = self._unpack(EX_READ_MAP_HEADER) ret = self._unpack(EX_READ_MAP_HEADER)
if write_bytes is not None: if write_bytes is not None:
warnings.warn("`write_bytes` option is deprecated. Use `.tell()` instead.", DeprecationWarning)
write_bytes(self._buffer[self._buf_checkpoint:self._buff_i]) write_bytes(self._buffer[self._buf_checkpoint:self._buff_i])
self._consume() self._consume()
return ret return ret
@ -652,18 +682,18 @@ class Packer(object):
:param callable default: :param callable default:
Convert user type to builtin type that Packer supports. Convert user type to builtin type that Packer supports.
See also simplejson's document. See also simplejson's document.
:param str encoding:
Convert unicode to bytes with this encoding. (default: 'utf-8')
:param str unicode_errors:
Error handler for encoding unicode. (default: 'strict')
:param bool use_single_float: :param bool use_single_float:
Use single precision float type for float. (default: False) Use single precision float type for float. (default: False)
:param bool autoreset: :param bool autoreset:
Reset buffer after each pack and return its content as `bytes`. (default: True). Reset buffer after each pack and return its content as `bytes`. (default: True).
If set this to false, use `bytes()` to get content and `.reset()` to clear buffer. If set this to false, use `bytes()` to get content and `.reset()` to clear buffer.
:param bool use_bin_type: :param bool use_bin_type:
Use bin type introduced in msgpack spec 2.0 for bytes. Use bin type introduced in msgpack spec 2.0 for bytes.
It also enables str8 type for unicode. It also enables str8 type for unicode.
:param bool strict_types: :param bool strict_types:
If set to true, types will be checked to be exact. Derived classes If set to true, types will be checked to be exact. Derived classes
from serializeable types will not be serialized and will be from serializeable types will not be serialized and will be
@ -671,10 +701,26 @@ class Packer(object):
Additionally tuples will not be serialized as lists. Additionally tuples will not be serialized as lists.
This is useful when trying to implement accurate serialization This is useful when trying to implement accurate serialization
for python types. for python types.
:param str encoding:
(deprecated) Convert unicode to bytes with this encoding. (default: 'utf-8')
:param str unicode_errors:
Error handler for encoding unicode. (default: 'strict')
""" """
def __init__(self, default=None, encoding='utf-8', unicode_errors='strict', def __init__(self, default=None, encoding=None, unicode_errors=None,
use_single_float=False, autoreset=True, use_bin_type=False, use_single_float=False, autoreset=True, use_bin_type=False,
strict_types=False): strict_types=False):
if encoding is None:
encoding = 'utf_8'
else:
warnings.warn(
"encoding is deprecated, Use raw=False instead.",
PendingDeprecationWarning)
if unicode_errors is None:
unicode_errors = 'strict'
self._strict_types = strict_types self._strict_types = strict_types
self._use_float = use_single_float self._use_float = use_single_float
self._autoreset = autoreset self._autoreset = autoreset
@ -795,10 +841,14 @@ class Packer(object):
obj = self._default(obj) obj = self._default(obj)
default_used = 1 default_used = 1
continue continue
raise TypeError("Cannot serialize %r" % obj) raise TypeError("Cannot serialize %r" % (obj, ))
def pack(self, obj): def pack(self, obj):
self._pack(obj) try:
self._pack(obj)
except:
self._buffer = StringIO() # force reset
raise
ret = self._buffer.getvalue() ret = self._buffer.getvalue()
if self._autoreset: if self._autoreset:
self._buffer = StringIO() self._buffer = StringIO()

Loading…
Cancel
Save