Browse Source

Update MsgPack 0.6.1 (05ff11d) → 1.0.0 (fa7d744).

pull/1289/head
JackDandy 5 years ago
parent
commit
f171e6a363
  1. 1
      CHANGES.md
  2. 19
      lib/msgpack/__init__.py
  3. 2
      lib/msgpack/_version.py
  4. 191
      lib/msgpack/ext.py
  5. 500
      lib/msgpack/fallback.py

1
CHANGES.md

@ -12,6 +12,7 @@
* Update Certifi 2019.06.16 (84dc766) to 2019.11.28 (21abb9b) * Update Certifi 2019.06.16 (84dc766) to 2019.11.28 (21abb9b)
* Update DiskCache library 4.0.0 (2c79bb9) to 4.1.0 (b0451e0) * Update DiskCache library 4.0.0 (2c79bb9) to 4.1.0 (b0451e0)
* Update idna library 2.8 (032fc55) to 2.9 (1233a73) * Update idna library 2.8 (032fc55) to 2.9 (1233a73)
* Update MsgPack 0.6.1 (05ff11d) to 1.0.0 (fa7d744)
* Update PySocks 1.7.0 (91dcdf0) to 1.7.1 (c2fa43c) * Update PySocks 1.7.0 (91dcdf0) to 1.7.1 (c2fa43c)
* Update Requests library 2.22.0 (3d968ff) to 2.23.0 (b7c6aba) * Update Requests library 2.22.0 (3d968ff) to 2.23.0 (b7c6aba)
* Update Six compatibility library 1.13.0 (ec58185) to 1.14.0 (3a3db75) * Update Six compatibility library 1.13.0 (ec58185) to 1.14.0 (3a3db75)

19
lib/msgpack/__init__.py

@ -1,24 +1,13 @@
# coding: utf-8 # coding: utf-8
from ._version import version from ._version import version
from .exceptions import * from .exceptions import *
from .ext import ExtType, Timestamp
from collections import namedtuple import os
import sys
class ExtType(namedtuple('ExtType', 'code data')):
"""ExtType represents ext type in msgpack."""
def __new__(cls, code, data):
if not isinstance(code, int):
raise TypeError("code must be int")
if not isinstance(data, bytes):
raise TypeError("data must be bytes")
if not 0 <= code <= 127:
raise ValueError("code must be 0~127")
return super(ExtType, cls).__new__(cls, code, data)
import os if os.environ.get("MSGPACK_PUREPYTHON") or sys.version_info[0] == 2:
if os.environ.get('MSGPACK_PUREPYTHON'):
from .fallback import Packer, unpackb, Unpacker from .fallback import Packer, unpackb, Unpacker
else: else:
try: try:

2
lib/msgpack/_version.py

@ -1 +1 @@
version = (0, 6, 1) version = (1, 0, 0)

191
lib/msgpack/ext.py

@ -0,0 +1,191 @@
# coding: utf-8
from collections import namedtuple
import datetime
import sys
import struct
PY2 = sys.version_info[0] == 2
if PY2:
int_types = (int, long)
_utc = None
else:
int_types = int
try:
_utc = datetime.timezone.utc
except AttributeError:
_utc = datetime.timezone(datetime.timedelta(0))
class ExtType(namedtuple("ExtType", "code data")):
"""ExtType represents ext type in msgpack."""
def __new__(cls, code, data):
if not isinstance(code, int):
raise TypeError("code must be int")
if not isinstance(data, bytes):
raise TypeError("data must be bytes")
if not 0 <= code <= 127:
raise ValueError("code must be 0~127")
return super(ExtType, cls).__new__(cls, code, data)
class Timestamp(object):
"""Timestamp represents the Timestamp extension type in msgpack.
When built with Cython, msgpack uses C methods to pack and unpack `Timestamp`. When using pure-Python
msgpack, :func:`to_bytes` and :func:`from_bytes` are used to pack and unpack `Timestamp`.
This class is immutable: Do not override seconds and nanoseconds.
"""
__slots__ = ["seconds", "nanoseconds"]
def __init__(self, seconds, nanoseconds=0):
"""Initialize a Timestamp object.
:param int seconds:
Number of seconds since the UNIX epoch (00:00:00 UTC Jan 1 1970, minus leap seconds).
May be negative.
:param int nanoseconds:
Number of nanoseconds to add to `seconds` to get fractional time.
Maximum is 999_999_999. Default is 0.
Note: Negative times (before the UNIX epoch) are represented as negative seconds + positive ns.
"""
if not isinstance(seconds, int_types):
raise TypeError("seconds must be an interger")
if not isinstance(nanoseconds, int_types):
raise TypeError("nanoseconds must be an integer")
if not (0 <= nanoseconds < 10 ** 9):
raise ValueError(
"nanoseconds must be a non-negative integer less than 999999999."
)
self.seconds = seconds
self.nanoseconds = nanoseconds
def __repr__(self):
"""String representation of Timestamp."""
return "Timestamp(seconds={0}, nanoseconds={1})".format(
self.seconds, self.nanoseconds
)
def __eq__(self, other):
"""Check for equality with another Timestamp object"""
if type(other) is self.__class__:
return (
self.seconds == other.seconds and self.nanoseconds == other.nanoseconds
)
return False
def __ne__(self, other):
"""not-equals method (see :func:`__eq__()`)"""
return not self.__eq__(other)
def __hash__(self):
return hash((self.seconds, self.nanoseconds))
@staticmethod
def from_bytes(b):
"""Unpack bytes into a `Timestamp` object.
Used for pure-Python msgpack unpacking.
:param b: Payload from msgpack ext message with code -1
:type b: bytes
:returns: Timestamp object unpacked from msgpack ext payload
:rtype: Timestamp
"""
if len(b) == 4:
seconds = struct.unpack("!L", b)[0]
nanoseconds = 0
elif len(b) == 8:
data64 = struct.unpack("!Q", b)[0]
seconds = data64 & 0x00000003FFFFFFFF
nanoseconds = data64 >> 34
elif len(b) == 12:
nanoseconds, seconds = struct.unpack("!Iq", b)
else:
raise ValueError(
"Timestamp type can only be created from 32, 64, or 96-bit byte objects"
)
return Timestamp(seconds, nanoseconds)
def to_bytes(self):
"""Pack this Timestamp object into bytes.
Used for pure-Python msgpack packing.
:returns data: Payload for EXT message with code -1 (timestamp type)
:rtype: bytes
"""
if (self.seconds >> 34) == 0: # seconds is non-negative and fits in 34 bits
data64 = self.nanoseconds << 34 | self.seconds
if data64 & 0xFFFFFFFF00000000 == 0:
# nanoseconds is zero and seconds < 2**32, so timestamp 32
data = struct.pack("!L", data64)
else:
# timestamp 64
data = struct.pack("!Q", data64)
else:
# timestamp 96
data = struct.pack("!Iq", self.nanoseconds, self.seconds)
return data
@staticmethod
def from_unix(unix_sec):
"""Create a Timestamp from posix timestamp in seconds.
:param unix_float: Posix timestamp in seconds.
:type unix_float: int or float.
"""
seconds = int(unix_sec // 1)
nanoseconds = int((unix_sec % 1) * 10 ** 9)
return Timestamp(seconds, nanoseconds)
def to_unix(self):
"""Get the timestamp as a floating-point value.
:returns: posix timestamp
:rtype: float
"""
return self.seconds + self.nanoseconds / 1e9
@staticmethod
def from_unix_nano(unix_ns):
"""Create a Timestamp from posix timestamp in nanoseconds.
:param int unix_ns: Posix timestamp in nanoseconds.
:rtype: Timestamp
"""
return Timestamp(*divmod(unix_ns, 10 ** 9))
def to_unix_nano(self):
"""Get the timestamp as a unixtime in nanoseconds.
:returns: posix timestamp in nanoseconds
:rtype: int
"""
return self.seconds * 10 ** 9 + self.nanoseconds
def to_datetime(self):
"""Get the timestamp as a UTC datetime.
Python 2 is not supported.
:rtype: datetime.
"""
return datetime.datetime.fromtimestamp(self.to_unix(), _utc)
@staticmethod
def from_datetime(dt):
"""Create a Timestamp from datetime with tzinfo.
Python 2 is not supported.
:rtype: Timestamp
"""
return Timestamp.from_unix(dt.timestamp())

500
lib/msgpack/fallback.py

@ -1,73 +1,85 @@
"""Fallback pure Python implementation of msgpack""" """Fallback pure Python implementation of msgpack"""
from datetime import datetime as _DateTime
import sys import sys
import struct import struct
import warnings
if sys.version_info[0] == 2: PY2 = sys.version_info[0] == 2
PY2 = True if PY2:
int_types = (int, long) int_types = (int, long)
def dict_iteritems(d): def dict_iteritems(d):
return d.iteritems() return d.iteritems()
else: else:
PY2 = False
int_types = int int_types = int
unicode = str unicode = str
xrange = range xrange = range
def dict_iteritems(d): def dict_iteritems(d):
return d.items() return d.items()
if sys.version_info < (3, 5): if sys.version_info < (3, 5):
# Ugly hack... # Ugly hack...
RecursionError = RuntimeError RecursionError = RuntimeError
def _is_recursionerror(e): def _is_recursionerror(e):
return len(e.args) == 1 and isinstance(e.args[0], str) and \ return (
e.args[0].startswith('maximum recursion depth exceeded') len(e.args) == 1
and isinstance(e.args[0], str)
and e.args[0].startswith("maximum recursion depth exceeded")
)
else: else:
def _is_recursionerror(e): def _is_recursionerror(e):
return True return True
if hasattr(sys, 'pypy_version_info'):
# cStringIO is slow on PyPy, StringIO is faster. However: PyPy's own if hasattr(sys, "pypy_version_info"):
# StringIO is slow on PyPy, StringIO is faster. However: PyPy's own
# StringBuilder is fastest. # StringBuilder is fastest.
from __pypy__ import newlist_hint from __pypy__ import newlist_hint
try: try:
from __pypy__.builders import BytesBuilder as StringBuilder from __pypy__.builders import BytesBuilder as StringBuilder
except ImportError: except ImportError:
from __pypy__.builders import StringBuilder from __pypy__.builders import StringBuilder
USING_STRINGBUILDER = True USING_STRINGBUILDER = True
class StringIO(object): class StringIO(object):
def __init__(self, s=b''): def __init__(self, s=b""):
if s: if s:
self.builder = StringBuilder(len(s)) self.builder = StringBuilder(len(s))
self.builder.append(s) self.builder.append(s)
else: else:
self.builder = StringBuilder() self.builder = StringBuilder()
def write(self, s): def write(self, s):
if isinstance(s, memoryview): if isinstance(s, memoryview):
s = s.tobytes() s = s.tobytes()
elif isinstance(s, bytearray): elif isinstance(s, bytearray):
s = bytes(s) s = bytes(s)
self.builder.append(s) self.builder.append(s)
def getvalue(self): def getvalue(self):
return self.builder.build() return self.builder.build()
else: else:
USING_STRINGBUILDER = False USING_STRINGBUILDER = False
from io import BytesIO as StringIO from io import BytesIO as StringIO
newlist_hint = lambda size: [] newlist_hint = lambda size: []
from .exceptions import ( from .exceptions import BufferFull, OutOfData, ExtraData, FormatError, StackError
BufferFull,
OutOfData,
ExtraData,
FormatError,
StackError,
)
from . import ExtType from .ext import ExtType, Timestamp
EX_SKIP = 0 EX_SKIP = 0
@ -93,31 +105,12 @@ def _check_type_strict(obj, t, type=type, tuple=tuple):
def _get_data_from_buffer(obj): def _get_data_from_buffer(obj):
try:
view = memoryview(obj) view = memoryview(obj)
except TypeError:
# try to use legacy buffer protocol if 2.7, otherwise re-raise
if PY2:
view = memoryview(buffer(obj))
warnings.warn("using old buffer interface to unpack %s; "
"this leads to unpacking errors if slicing is used and "
"will be removed in a future version" % type(obj),
RuntimeWarning, stacklevel=3)
else:
raise
if view.itemsize != 1: if view.itemsize != 1:
raise ValueError("cannot unpack from multi-byte object") raise ValueError("cannot unpack from multi-byte object")
return view return view
def unpack(stream, **kwargs):
warnings.warn(
"Direct calling implementation's unpack() is deprecated, Use msgpack.unpack() or unpackb() instead.",
DeprecationWarning, stacklevel=2)
data = stream.read()
return unpackb(data, **kwargs)
def unpackb(packed, **kwargs): def unpackb(packed, **kwargs):
""" """
Unpack an object from `packed`. Unpack an object from `packed`.
@ -146,9 +139,12 @@ def unpackb(packed, **kwargs):
if sys.version_info < (2, 7, 6): if sys.version_info < (2, 7, 6):
def _unpack_from(f, b, o=0): def _unpack_from(f, b, o=0):
"""Explicit typcast for legacy struct.unpack_from""" """Explicit type cast for legacy struct.unpack_from"""
return struct.unpack_from(f, bytes(b), o) return struct.unpack_from(f, bytes(b), o)
else: else:
_unpack_from = struct.unpack_from _unpack_from = struct.unpack_from
@ -156,7 +152,7 @@ else:
class Unpacker(object): class Unpacker(object):
"""Streaming unpacker. """Streaming unpacker.
arguments: Arguments:
:param file_like: :param file_like:
File-like object having `.read(n)` method. File-like object having `.read(n)` method.
@ -170,19 +166,19 @@ class Unpacker(object):
Otherwise, unpack to Python tuple. (default: True) Otherwise, unpack to Python tuple. (default: True)
:param bool raw: :param bool raw:
If true, unpack msgpack raw to Python bytes (default). If true, unpack msgpack raw to Python bytes.
Otherwise, unpack to Python str (or unicode on Python 2) by decoding Otherwise, unpack to Python str by decoding with UTF-8 encoding (default).
with UTF-8 encoding (recommended).
Currently, the default is true, but it will be changed to false in :param int timestamp:
near future. So you must specify it explicitly for keeping backward Control how timestamp type is unpacked:
compatibility.
*encoding* option which is deprecated overrides this option. 0 - Timestamp
1 - float (Seconds from the EPOCH)
2 - int (Nanoseconds from the EPOCH)
3 - datetime.datetime (UTC). Python 2 is not supported.
:param bool strict_map_key: :param bool strict_map_key:
If true, only str or bytes are accepted for map (dict) keys. If true (default), only str or bytes are accepted for map (dict) keys.
It's False by default for backward-compatibility.
But it will be True from msgpack 1.0.
:param callable object_hook: :param callable object_hook:
When specified, it should be callable. When specified, it should be callable.
@ -194,48 +190,46 @@ class Unpacker(object):
Unpacker calls it with a list of key-value pairs after unpacking msgpack map. Unpacker calls it with a list of key-value pairs after unpacking msgpack map.
(See also simplejson) (See also simplejson)
:param str encoding:
Encoding used for decoding msgpack raw.
If it is None (default), msgpack raw is deserialized to Python bytes.
:param str unicode_errors: :param str unicode_errors:
(deprecated) Used for decoding msgpack raw with *encoding*. The error handler for decoding unicode. (default: 'strict')
(default: `'strict'`) This option should be used only when you have msgpack data which
contains invalid UTF-8 string.
:param int max_buffer_size: :param int max_buffer_size:
Limits size of data waiting unpacked. 0 means system's INT_MAX (default). Limits size of data waiting unpacked. 0 means 2**32-1.
The default value is 100*1024*1024 (100MiB).
Raises `BufferFull` exception when it is insufficient. Raises `BufferFull` exception when it is insufficient.
You should set this parameter when unpacking data from untrusted source. You should set this parameter when unpacking data from untrusted source.
:param int max_str_len: :param int max_str_len:
Deprecated, use *max_buffer_size* instead. Deprecated, use *max_buffer_size* instead.
Limits max length of str. (default: max_buffer_size or 1024*1024) Limits max length of str. (default: max_buffer_size)
:param int max_bin_len: :param int max_bin_len:
Deprecated, use *max_buffer_size* instead. Deprecated, use *max_buffer_size* instead.
Limits max length of bin. (default: max_buffer_size or 1024*1024) Limits max length of bin. (default: max_buffer_size)
:param int max_array_len: :param int max_array_len:
Limits max length of array. Limits max length of array.
(default: max_buffer_size or 128*1024) (default: max_buffer_size)
:param int max_map_len: :param int max_map_len:
Limits max length of map. Limits max length of map.
(default: max_buffer_size//2 or 32*1024) (default: max_buffer_size//2)
:param int max_ext_len: :param int max_ext_len:
Deprecated, use *max_buffer_size* instead. Deprecated, use *max_buffer_size* instead.
Limits max size of ext type. (default: max_buffer_size or 1024*1024) Limits max size of ext type. (default: max_buffer_size)
Example of streaming deserialize from file-like object:: Example of streaming deserialize from file-like object::
unpacker = Unpacker(file_like, raw=False, max_buffer_size=10*1024*1024) unpacker = Unpacker(file_like)
for o in unpacker: for o in unpacker:
process(o) process(o)
Example of streaming deserialize from socket:: Example of streaming deserialize from socket::
unpacker = Unpacker(raw=False, max_buffer_size=10*1024*1024) unpacker = Unpacker(max_buffer_size)
while True: while True:
buf = sock.recv(1024**2) buf = sock.recv(1024**2)
if not buf: if not buf:
@ -251,22 +245,28 @@ class Unpacker(object):
Other exceptions can be raised during unpacking. Other exceptions can be raised during unpacking.
""" """
def __init__(self, file_like=None, read_size=0, use_list=True, raw=True, strict_map_key=False, def __init__(
object_hook=None, object_pairs_hook=None, list_hook=None, self,
encoding=None, unicode_errors=None, max_buffer_size=0, file_like=None,
read_size=0,
use_list=True,
raw=False,
timestamp=0,
strict_map_key=True,
object_hook=None,
object_pairs_hook=None,
list_hook=None,
unicode_errors=None,
max_buffer_size=100 * 1024 * 1024,
ext_hook=ExtType, ext_hook=ExtType,
max_str_len=-1, max_str_len=-1,
max_bin_len=-1, max_bin_len=-1,
max_array_len=-1, max_array_len=-1,
max_map_len=-1, max_map_len=-1,
max_ext_len=-1): max_ext_len=-1,
if encoding is not None: ):
warnings.warn(
"encoding is deprecated, Use raw=False instead.",
DeprecationWarning, stacklevel=2)
if unicode_errors is None: if unicode_errors is None:
unicode_errors = 'strict' unicode_errors = "strict"
if file_like is None: if file_like is None:
self._feeding = True self._feeding = True
@ -290,26 +290,30 @@ class Unpacker(object):
# state, which _buf_checkpoint records. # state, which _buf_checkpoint records.
self._buf_checkpoint = 0 self._buf_checkpoint = 0
if not max_buffer_size:
max_buffer_size = 2 ** 31 - 1
if max_str_len == -1: if max_str_len == -1:
max_str_len = max_buffer_size or 1024*1024 max_str_len = max_buffer_size
if max_bin_len == -1: if max_bin_len == -1:
max_bin_len = max_buffer_size or 1024*1024 max_bin_len = max_buffer_size
if max_array_len == -1: if max_array_len == -1:
max_array_len = max_buffer_size or 128*1024 max_array_len = max_buffer_size
if max_map_len == -1: if max_map_len == -1:
max_map_len = max_buffer_size//2 or 32*1024 max_map_len = max_buffer_size // 2
if max_ext_len == -1: if max_ext_len == -1:
max_ext_len = max_buffer_size or 1024*1024 max_ext_len = max_buffer_size
self._max_buffer_size = max_buffer_size or 2**31-1 self._max_buffer_size = max_buffer_size
if read_size > self._max_buffer_size: if read_size > self._max_buffer_size:
raise ValueError("read_size must be smaller than max_buffer_size") raise ValueError("read_size must be smaller than max_buffer_size")
self._read_size = read_size or min(self._max_buffer_size, 16 * 1024) self._read_size = read_size or min(self._max_buffer_size, 16 * 1024)
self._raw = bool(raw) self._raw = bool(raw)
self._strict_map_key = bool(strict_map_key) self._strict_map_key = bool(strict_map_key)
self._encoding = encoding
self._unicode_errors = unicode_errors self._unicode_errors = unicode_errors
self._use_list = use_list self._use_list = use_list
if not (0 <= timestamp <= 3):
raise ValueError("timestamp must be 0..3")
self._timestamp = timestamp
self._list_hook = list_hook self._list_hook = list_hook
self._object_hook = object_hook self._object_hook = object_hook
self._object_pairs_hook = object_pairs_hook self._object_pairs_hook = object_pairs_hook
@ -322,21 +326,22 @@ class Unpacker(object):
self._stream_offset = 0 self._stream_offset = 0
if list_hook is not None and not callable(list_hook): if list_hook is not None and not callable(list_hook):
raise TypeError('`list_hook` is not callable') raise TypeError("`list_hook` is not callable")
if object_hook is not None and not callable(object_hook): if object_hook is not None and not callable(object_hook):
raise TypeError('`object_hook` is not callable') raise TypeError("`object_hook` is not callable")
if object_pairs_hook is not None and not callable(object_pairs_hook): if object_pairs_hook is not None and not callable(object_pairs_hook):
raise TypeError('`object_pairs_hook` is not callable') raise TypeError("`object_pairs_hook` is not callable")
if object_hook is not None and object_pairs_hook is not None: if object_hook is not None and object_pairs_hook is not None:
raise TypeError("object_pairs_hook and object_hook are mutually " raise TypeError(
"exclusive") "object_pairs_hook and object_hook are mutually " "exclusive"
)
if not callable(ext_hook): if not callable(ext_hook):
raise TypeError("`ext_hook` is not callable") raise TypeError("`ext_hook` is not callable")
def feed(self, next_bytes): def feed(self, next_bytes):
assert self._feeding assert self._feeding
view = _get_data_from_buffer(next_bytes) view = _get_data_from_buffer(next_bytes)
if (len(self._buffer) - self._buff_i + len(view) > self._max_buffer_size): if len(self._buffer) - self._buff_i + len(view) > self._max_buffer_size:
raise BufferFull raise BufferFull
# Strip buffer before checkpoint before reading file. # Strip buffer before checkpoint before reading file.
@ -360,7 +365,9 @@ class Unpacker(object):
return self._buffer[self._buff_i :] return self._buffer[self._buff_i :]
def read_bytes(self, n): def read_bytes(self, n):
return self._read(n) ret = self._read(n)
self._consume()
return ret
def _read(self, n): def _read(self, n):
# (int) -> bytearray # (int) -> bytearray
@ -411,7 +418,7 @@ class Unpacker(object):
if b & 0b10000000 == 0: if b & 0b10000000 == 0:
obj = b obj = b
elif b & 0b11100000 == 0b11100000: elif b & 0b11100000 == 0b11100000:
obj = -1 - (b ^ 0xff) obj = -1 - (b ^ 0xFF)
elif b & 0b11100000 == 0b10100000: elif b & 0b11100000 == 0b10100000:
n = b & 0b00011111 n = b & 0b00011111
typ = TYPE_RAW typ = TYPE_RAW
@ -428,13 +435,13 @@ class Unpacker(object):
typ = TYPE_MAP typ = TYPE_MAP
if n > self._max_map_len: if n > self._max_map_len:
raise ValueError("%s exceeds max_map_len(%s)", n, self._max_map_len) raise ValueError("%s exceeds max_map_len(%s)", n, self._max_map_len)
elif b == 0xc0: elif b == 0xC0:
obj = None obj = None
elif b == 0xc2: elif b == 0xC2:
obj = False obj = False
elif b == 0xc3: elif b == 0xC3:
obj = True obj = True
elif b == 0xc4: elif b == 0xC4:
typ = TYPE_BIN typ = TYPE_BIN
self._reserve(1) self._reserve(1)
n = self._buffer[self._buff_i] n = self._buffer[self._buff_i]
@ -442,7 +449,7 @@ class Unpacker(object):
if n > self._max_bin_len: if n > self._max_bin_len:
raise ValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len)) raise ValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len))
obj = self._read(n) obj = self._read(n)
elif b == 0xc5: elif b == 0xC5:
typ = TYPE_BIN typ = TYPE_BIN
self._reserve(2) self._reserve(2)
n = _unpack_from(">H", self._buffer, self._buff_i)[0] n = _unpack_from(">H", self._buffer, self._buff_i)[0]
@ -450,7 +457,7 @@ class Unpacker(object):
if n > self._max_bin_len: if n > self._max_bin_len:
raise ValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len)) raise ValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len))
obj = self._read(n) obj = self._read(n)
elif b == 0xc6: elif b == 0xC6:
typ = TYPE_BIN typ = TYPE_BIN
self._reserve(4) self._reserve(4)
n = _unpack_from(">I", self._buffer, self._buff_i)[0] n = _unpack_from(">I", self._buffer, self._buff_i)[0]
@ -458,106 +465,106 @@ class Unpacker(object):
if n > self._max_bin_len: if n > self._max_bin_len:
raise ValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len)) raise ValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len))
obj = self._read(n) obj = self._read(n)
elif b == 0xc7: # ext 8 elif b == 0xC7: # ext 8
typ = TYPE_EXT typ = TYPE_EXT
self._reserve(2) self._reserve(2)
L, n = _unpack_from('Bb', self._buffer, self._buff_i) L, n = _unpack_from("Bb", self._buffer, self._buff_i)
self._buff_i += 2 self._buff_i += 2
if L > self._max_ext_len: if L > self._max_ext_len:
raise ValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len)) raise ValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len))
obj = self._read(L) obj = self._read(L)
elif b == 0xc8: # ext 16 elif b == 0xC8: # ext 16
typ = TYPE_EXT typ = TYPE_EXT
self._reserve(3) self._reserve(3)
L, n = _unpack_from('>Hb', self._buffer, self._buff_i) L, n = _unpack_from(">Hb", self._buffer, self._buff_i)
self._buff_i += 3 self._buff_i += 3
if L > self._max_ext_len: if L > self._max_ext_len:
raise ValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len)) raise ValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len))
obj = self._read(L) obj = self._read(L)
elif b == 0xc9: # ext 32 elif b == 0xC9: # ext 32
typ = TYPE_EXT typ = TYPE_EXT
self._reserve(5) self._reserve(5)
L, n = _unpack_from('>Ib', self._buffer, self._buff_i) L, n = _unpack_from(">Ib", self._buffer, self._buff_i)
self._buff_i += 5 self._buff_i += 5
if L > self._max_ext_len: if L > self._max_ext_len:
raise ValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len)) raise ValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len))
obj = self._read(L) obj = self._read(L)
elif b == 0xca: elif b == 0xCA:
self._reserve(4) self._reserve(4)
obj = _unpack_from(">f", self._buffer, self._buff_i)[0] obj = _unpack_from(">f", self._buffer, self._buff_i)[0]
self._buff_i += 4 self._buff_i += 4
elif b == 0xcb: elif b == 0xCB:
self._reserve(8) self._reserve(8)
obj = _unpack_from(">d", self._buffer, self._buff_i)[0] obj = _unpack_from(">d", self._buffer, self._buff_i)[0]
self._buff_i += 8 self._buff_i += 8
elif b == 0xcc: elif b == 0xCC:
self._reserve(1) self._reserve(1)
obj = self._buffer[self._buff_i] obj = self._buffer[self._buff_i]
self._buff_i += 1 self._buff_i += 1
elif b == 0xcd: elif b == 0xCD:
self._reserve(2) self._reserve(2)
obj = _unpack_from(">H", self._buffer, self._buff_i)[0] obj = _unpack_from(">H", self._buffer, self._buff_i)[0]
self._buff_i += 2 self._buff_i += 2
elif b == 0xce: elif b == 0xCE:
self._reserve(4) self._reserve(4)
obj = _unpack_from(">I", self._buffer, self._buff_i)[0] obj = _unpack_from(">I", self._buffer, self._buff_i)[0]
self._buff_i += 4 self._buff_i += 4
elif b == 0xcf: elif b == 0xCF:
self._reserve(8) self._reserve(8)
obj = _unpack_from(">Q", self._buffer, self._buff_i)[0] obj = _unpack_from(">Q", self._buffer, self._buff_i)[0]
self._buff_i += 8 self._buff_i += 8
elif b == 0xd0: elif b == 0xD0:
self._reserve(1) self._reserve(1)
obj = _unpack_from("b", self._buffer, self._buff_i)[0] obj = _unpack_from("b", self._buffer, self._buff_i)[0]
self._buff_i += 1 self._buff_i += 1
elif b == 0xd1: elif b == 0xD1:
self._reserve(2) self._reserve(2)
obj = _unpack_from(">h", self._buffer, self._buff_i)[0] obj = _unpack_from(">h", self._buffer, self._buff_i)[0]
self._buff_i += 2 self._buff_i += 2
elif b == 0xd2: elif b == 0xD2:
self._reserve(4) self._reserve(4)
obj = _unpack_from(">i", self._buffer, self._buff_i)[0] obj = _unpack_from(">i", self._buffer, self._buff_i)[0]
self._buff_i += 4 self._buff_i += 4
elif b == 0xd3: elif b == 0xD3:
self._reserve(8) self._reserve(8)
obj = _unpack_from(">q", self._buffer, self._buff_i)[0] obj = _unpack_from(">q", self._buffer, self._buff_i)[0]
self._buff_i += 8 self._buff_i += 8
elif b == 0xd4: # fixext 1 elif b == 0xD4: # fixext 1
typ = TYPE_EXT typ = TYPE_EXT
if self._max_ext_len < 1: if self._max_ext_len < 1:
raise ValueError("%s exceeds max_ext_len(%s)" % (1, self._max_ext_len)) raise ValueError("%s exceeds max_ext_len(%s)" % (1, self._max_ext_len))
self._reserve(2) self._reserve(2)
n, obj = _unpack_from("b1s", self._buffer, self._buff_i) n, obj = _unpack_from("b1s", self._buffer, self._buff_i)
self._buff_i += 2 self._buff_i += 2
elif b == 0xd5: # fixext 2 elif b == 0xD5: # fixext 2
typ = TYPE_EXT typ = TYPE_EXT
if self._max_ext_len < 2: if self._max_ext_len < 2:
raise ValueError("%s exceeds max_ext_len(%s)" % (2, self._max_ext_len)) raise ValueError("%s exceeds max_ext_len(%s)" % (2, self._max_ext_len))
self._reserve(3) self._reserve(3)
n, obj = _unpack_from("b2s", self._buffer, self._buff_i) n, obj = _unpack_from("b2s", self._buffer, self._buff_i)
self._buff_i += 3 self._buff_i += 3
elif b == 0xd6: # fixext 4 elif b == 0xD6: # fixext 4
typ = TYPE_EXT typ = TYPE_EXT
if self._max_ext_len < 4: if self._max_ext_len < 4:
raise ValueError("%s exceeds max_ext_len(%s)" % (4, self._max_ext_len)) raise ValueError("%s exceeds max_ext_len(%s)" % (4, self._max_ext_len))
self._reserve(5) self._reserve(5)
n, obj = _unpack_from("b4s", self._buffer, self._buff_i) n, obj = _unpack_from("b4s", self._buffer, self._buff_i)
self._buff_i += 5 self._buff_i += 5
elif b == 0xd7: # fixext 8 elif b == 0xD7: # fixext 8
typ = TYPE_EXT typ = TYPE_EXT
if self._max_ext_len < 8: if self._max_ext_len < 8:
raise ValueError("%s exceeds max_ext_len(%s)" % (8, self._max_ext_len)) raise ValueError("%s exceeds max_ext_len(%s)" % (8, self._max_ext_len))
self._reserve(9) self._reserve(9)
n, obj = _unpack_from("b8s", self._buffer, self._buff_i) n, obj = _unpack_from("b8s", self._buffer, self._buff_i)
self._buff_i += 9 self._buff_i += 9
elif b == 0xd8: # fixext 16 elif b == 0xD8: # fixext 16
typ = TYPE_EXT typ = TYPE_EXT
if self._max_ext_len < 16: if self._max_ext_len < 16:
raise ValueError("%s exceeds max_ext_len(%s)" % (16, self._max_ext_len)) raise ValueError("%s exceeds max_ext_len(%s)" % (16, self._max_ext_len))
self._reserve(17) self._reserve(17)
n, obj = _unpack_from("b16s", self._buffer, self._buff_i) n, obj = _unpack_from("b16s", self._buffer, self._buff_i)
self._buff_i += 17 self._buff_i += 17
elif b == 0xd9: elif b == 0xD9:
typ = TYPE_RAW typ = TYPE_RAW
self._reserve(1) self._reserve(1)
n = self._buffer[self._buff_i] n = self._buffer[self._buff_i]
@ -565,46 +572,46 @@ class Unpacker(object):
if n > self._max_str_len: if n > self._max_str_len:
raise ValueError("%s exceeds max_str_len(%s)", n, self._max_str_len) raise ValueError("%s exceeds max_str_len(%s)", n, self._max_str_len)
obj = self._read(n) obj = self._read(n)
elif b == 0xda: elif b == 0xDA:
typ = TYPE_RAW typ = TYPE_RAW
self._reserve(2) self._reserve(2)
n, = _unpack_from(">H", self._buffer, self._buff_i) (n,) = _unpack_from(">H", self._buffer, self._buff_i)
self._buff_i += 2 self._buff_i += 2
if n > self._max_str_len: if n > self._max_str_len:
raise ValueError("%s exceeds max_str_len(%s)", n, self._max_str_len) raise ValueError("%s exceeds max_str_len(%s)", n, self._max_str_len)
obj = self._read(n) obj = self._read(n)
elif b == 0xdb: elif b == 0xDB:
typ = TYPE_RAW typ = TYPE_RAW
self._reserve(4) self._reserve(4)
n, = _unpack_from(">I", self._buffer, self._buff_i) (n,) = _unpack_from(">I", self._buffer, self._buff_i)
self._buff_i += 4 self._buff_i += 4
if n > self._max_str_len: if n > self._max_str_len:
raise ValueError("%s exceeds max_str_len(%s)", n, self._max_str_len) raise ValueError("%s exceeds max_str_len(%s)", n, self._max_str_len)
obj = self._read(n) obj = self._read(n)
elif b == 0xdc: elif b == 0xDC:
typ = TYPE_ARRAY typ = TYPE_ARRAY
self._reserve(2) self._reserve(2)
n, = _unpack_from(">H", self._buffer, self._buff_i) (n,) = _unpack_from(">H", self._buffer, self._buff_i)
self._buff_i += 2 self._buff_i += 2
if n > self._max_array_len: if n > self._max_array_len:
raise ValueError("%s exceeds max_array_len(%s)", n, self._max_array_len) raise ValueError("%s exceeds max_array_len(%s)", n, self._max_array_len)
elif b == 0xdd: elif b == 0xDD:
typ = TYPE_ARRAY typ = TYPE_ARRAY
self._reserve(4) self._reserve(4)
n, = _unpack_from(">I", self._buffer, self._buff_i) (n,) = _unpack_from(">I", self._buffer, self._buff_i)
self._buff_i += 4 self._buff_i += 4
if n > self._max_array_len: if n > self._max_array_len:
raise ValueError("%s exceeds max_array_len(%s)", n, self._max_array_len) raise ValueError("%s exceeds max_array_len(%s)", n, self._max_array_len)
elif b == 0xde: elif b == 0xDE:
self._reserve(2) self._reserve(2)
n, = _unpack_from(">H", self._buffer, self._buff_i) (n,) = _unpack_from(">H", self._buffer, self._buff_i)
self._buff_i += 2 self._buff_i += 2
if n > self._max_map_len: if n > self._max_map_len:
raise ValueError("%s exceeds max_map_len(%s)", n, self._max_map_len) raise ValueError("%s exceeds max_map_len(%s)", n, self._max_map_len)
typ = TYPE_MAP typ = TYPE_MAP
elif b == 0xdf: elif b == 0xDF:
self._reserve(4) self._reserve(4)
n, = _unpack_from(">I", self._buffer, self._buff_i) (n,) = _unpack_from(">I", self._buffer, self._buff_i)
self._buff_i += 4 self._buff_i += 4
if n > self._max_map_len: if n > self._max_map_len:
raise ValueError("%s exceeds max_map_len(%s)", n, self._max_map_len) raise ValueError("%s exceeds max_map_len(%s)", n, self._max_map_len)
@ -647,15 +654,19 @@ class Unpacker(object):
return return
if self._object_pairs_hook is not None: if self._object_pairs_hook is not None:
ret = self._object_pairs_hook( ret = self._object_pairs_hook(
(self._unpack(EX_CONSTRUCT), (self._unpack(EX_CONSTRUCT), self._unpack(EX_CONSTRUCT))
self._unpack(EX_CONSTRUCT)) for _ in xrange(n)
for _ in xrange(n)) )
else: else:
ret = {} ret = {}
for _ in xrange(n): for _ in xrange(n):
key = self._unpack(EX_CONSTRUCT) key = self._unpack(EX_CONSTRUCT)
if self._strict_map_key and type(key) not in (unicode, bytes): if self._strict_map_key and type(key) not in (unicode, bytes):
raise ValueError("%s is not allowed for map key" % str(type(key))) raise ValueError(
"%s is not allowed for map key" % str(type(key))
)
if not PY2 and type(key) is str:
key = sys.intern(key)
ret[key] = self._unpack(EX_CONSTRUCT) ret[key] = self._unpack(EX_CONSTRUCT)
if self._object_hook is not None: if self._object_hook is not None:
ret = self._object_hook(ret) ret = self._object_hook(ret)
@ -663,17 +674,26 @@ class Unpacker(object):
if execute == EX_SKIP: if execute == EX_SKIP:
return return
if typ == TYPE_RAW: if typ == TYPE_RAW:
if self._encoding is not None: if self._raw:
obj = obj.decode(self._encoding, self._unicode_errors)
elif self._raw:
obj = bytes(obj) obj = bytes(obj)
else: else:
obj = obj.decode('utf_8') obj = obj.decode("utf_8", self._unicode_errors)
return obj return obj
if typ == TYPE_EXT:
return self._ext_hook(n, bytes(obj))
if typ == TYPE_BIN: if typ == TYPE_BIN:
return bytes(obj) return bytes(obj)
if typ == TYPE_EXT:
if n == -1: # timestamp
ts = Timestamp.from_bytes(bytes(obj))
if self._timestamp == 1:
return ts.to_unix()
elif self._timestamp == 2:
return ts.to_unix_nano()
elif self._timestamp == 3:
return ts.to_datetime()
else:
return ts
else:
return self._ext_hook(n, bytes(obj))
assert typ == TYPE_IMMEDIATE assert typ == TYPE_IMMEDIATE
return obj return obj
@ -723,7 +743,7 @@ class Packer(object):
""" """
MessagePack Packer MessagePack Packer
usage: Usage:
packer = Packer() packer = Packer()
astream.write(packer.pack(a)) astream.write(packer.pack(a))
@ -744,49 +764,58 @@ class Packer(object):
:param bool use_bin_type: :param bool use_bin_type:
Use bin type introduced in msgpack spec 2.0 for bytes. Use bin type introduced in msgpack spec 2.0 for bytes.
It also enables str8 type for unicode. It also enables str8 type for unicode. (default: True)
:param bool strict_types: :param bool strict_types:
If set to true, types will be checked to be exact. Derived classes If set to true, types will be checked to be exact. Derived classes
from serializeable types will not be serialized and will be from serializable types will not be serialized and will be
treated as unsupported type and forwarded to default. treated as unsupported type and forwarded to default.
Additionally tuples will not be serialized as lists. Additionally tuples will not be serialized as lists.
This is useful when trying to implement accurate serialization This is useful when trying to implement accurate serialization
for python types. for python types.
:param str encoding: :param bool datetime:
(deprecated) Convert unicode to bytes with this encoding. (default: 'utf-8') If set to true, datetime with tzinfo is packed into Timestamp type.
Note that the tzinfo is stripped in the timestamp.
You can get UTC datetime with `timestamp=3` option of the Unpacker.
(Python 2 is not supported).
:param str unicode_errors: :param str unicode_errors:
Error handler for encoding unicode. (default: 'strict') The error handler for encoding unicode. (default: 'strict')
DO NOT USE THIS!! This option is kept for very specific usage.
""" """
def __init__(self, default=None, encoding=None, unicode_errors=None,
use_single_float=False, autoreset=True, use_bin_type=False,
strict_types=False):
if encoding is None:
encoding = 'utf_8'
else:
warnings.warn(
"encoding is deprecated, Use raw=False instead.",
DeprecationWarning, stacklevel=2)
if unicode_errors is None:
unicode_errors = 'strict'
def __init__(
self,
default=None,
use_single_float=False,
autoreset=True,
use_bin_type=True,
strict_types=False,
datetime=False,
unicode_errors=None,
):
self._strict_types = strict_types self._strict_types = strict_types
self._use_float = use_single_float self._use_float = use_single_float
self._autoreset = autoreset self._autoreset = autoreset
self._use_bin_type = use_bin_type self._use_bin_type = use_bin_type
self._encoding = encoding
self._unicode_errors = unicode_errors
self._buffer = StringIO() self._buffer = StringIO()
if PY2 and datetime:
raise ValueError("datetime is not supported in Python 2")
self._datetime = bool(datetime)
self._unicode_errors = unicode_errors or "strict"
if default is not None: if default is not None:
if not callable(default): if not callable(default):
raise TypeError("default must be callable") raise TypeError("default must be callable")
self._default = default self._default = default
def _pack(self, obj, nest_limit=DEFAULT_RECURSE_LIMIT, def _pack(
check=isinstance, check_type_strict=_check_type_strict): self,
obj,
nest_limit=DEFAULT_RECURSE_LIMIT,
check=isinstance,
check_type_strict=_check_type_strict,
):
default_used = False default_used = False
if self._strict_types: if self._strict_types:
check = check_type_strict check = check_type_strict
@ -807,22 +836,22 @@ class Packer(object):
return self._buffer.write(struct.pack("B", obj)) return self._buffer.write(struct.pack("B", obj))
if -0x20 <= obj < 0: if -0x20 <= obj < 0:
return self._buffer.write(struct.pack("b", obj)) return self._buffer.write(struct.pack("b", obj))
if 0x80 <= obj <= 0xff: if 0x80 <= obj <= 0xFF:
return self._buffer.write(struct.pack("BB", 0xcc, obj)) return self._buffer.write(struct.pack("BB", 0xCC, obj))
if -0x80 <= obj < 0: if -0x80 <= obj < 0:
return self._buffer.write(struct.pack(">Bb", 0xd0, obj)) return self._buffer.write(struct.pack(">Bb", 0xD0, obj))
if 0xff < obj <= 0xffff: if 0xFF < obj <= 0xFFFF:
return self._buffer.write(struct.pack(">BH", 0xcd, obj)) return self._buffer.write(struct.pack(">BH", 0xCD, obj))
if -0x8000 <= obj < -0x80: if -0x8000 <= obj < -0x80:
return self._buffer.write(struct.pack(">Bh", 0xd1, obj)) return self._buffer.write(struct.pack(">Bh", 0xD1, obj))
if 0xffff < obj <= 0xffffffff: if 0xFFFF < obj <= 0xFFFFFFFF:
return self._buffer.write(struct.pack(">BI", 0xce, obj)) return self._buffer.write(struct.pack(">BI", 0xCE, obj))
if -0x80000000 <= obj < -0x8000: if -0x80000000 <= obj < -0x8000:
return self._buffer.write(struct.pack(">Bi", 0xd2, obj)) return self._buffer.write(struct.pack(">Bi", 0xD2, obj))
if 0xffffffff < obj <= 0xffffffffffffffff: if 0xFFFFFFFF < obj <= 0xFFFFFFFFFFFFFFFF:
return self._buffer.write(struct.pack(">BQ", 0xcf, obj)) return self._buffer.write(struct.pack(">BQ", 0xCF, obj))
if -0x8000000000000000 <= obj < -0x80000000: if -0x8000000000000000 <= obj < -0x80000000:
return self._buffer.write(struct.pack(">Bq", 0xd3, obj)) return self._buffer.write(struct.pack(">Bq", 0xD3, obj))
if not default_used and self._default is not None: if not default_used and self._default is not None:
obj = self._default(obj) obj = self._default(obj)
default_used = True default_used = True
@ -835,11 +864,7 @@ class Packer(object):
self._pack_bin_header(n) self._pack_bin_header(n)
return self._buffer.write(obj) return self._buffer.write(obj)
if check(obj, unicode): if check(obj, unicode):
if self._encoding is None: obj = obj.encode("utf-8", self._unicode_errors)
raise TypeError(
"Can't encode unicode string: "
"no encoding is specified")
obj = obj.encode(self._encoding, self._unicode_errors)
n = len(obj) n = len(obj)
if n >= 2 ** 32: if n >= 2 ** 32:
raise ValueError("String is too large") raise ValueError("String is too large")
@ -853,30 +878,34 @@ class Packer(object):
return self._buffer.write(obj) return self._buffer.write(obj)
if check(obj, float): if check(obj, float):
if self._use_float: if self._use_float:
return self._buffer.write(struct.pack(">Bf", 0xca, obj)) return self._buffer.write(struct.pack(">Bf", 0xCA, obj))
return self._buffer.write(struct.pack(">Bd", 0xcb, obj)) return self._buffer.write(struct.pack(">Bd", 0xCB, obj))
if check(obj, ExtType): if check(obj, (ExtType, Timestamp)):
if check(obj, Timestamp):
code = -1
data = obj.to_bytes()
else:
code = obj.code code = obj.code
data = obj.data data = obj.data
assert isinstance(code, int) assert isinstance(code, int)
assert isinstance(data, bytes) assert isinstance(data, bytes)
L = len(data) L = len(data)
if L == 1: if L == 1:
self._buffer.write(b'\xd4') self._buffer.write(b"\xd4")
elif L == 2: elif L == 2:
self._buffer.write(b'\xd5') self._buffer.write(b"\xd5")
elif L == 4: elif L == 4:
self._buffer.write(b'\xd6') self._buffer.write(b"\xd6")
elif L == 8: elif L == 8:
self._buffer.write(b'\xd7') self._buffer.write(b"\xd7")
elif L == 16: elif L == 16:
self._buffer.write(b'\xd8') self._buffer.write(b"\xd8")
elif L <= 0xff: elif L <= 0xFF:
self._buffer.write(struct.pack(">BB", 0xc7, L)) self._buffer.write(struct.pack(">BB", 0xC7, L))
elif L <= 0xffff: elif L <= 0xFFFF:
self._buffer.write(struct.pack(">BH", 0xc8, L)) self._buffer.write(struct.pack(">BH", 0xC8, L))
else: else:
self._buffer.write(struct.pack(">BI", 0xc9, L)) self._buffer.write(struct.pack(">BI", 0xC9, L))
self._buffer.write(struct.pack("b", code)) self._buffer.write(struct.pack("b", code))
self._buffer.write(data) self._buffer.write(data)
return return
@ -887,8 +916,15 @@ class Packer(object):
self._pack(obj[i], nest_limit - 1) self._pack(obj[i], nest_limit - 1)
return return
if check(obj, dict): if check(obj, dict):
return self._pack_map_pairs(len(obj), dict_iteritems(obj), return self._pack_map_pairs(
nest_limit - 1) len(obj), dict_iteritems(obj), nest_limit - 1
)
if self._datetime and check(obj, _DateTime):
obj = Timestamp.from_datetime(obj)
default_used = 1
continue
if not default_used and self._default is not None: if not default_used and self._default is not None:
obj = self._default(obj) obj = self._default(obj)
default_used = 1 default_used = 1
@ -939,43 +975,43 @@ class Packer(object):
if not isinstance(data, bytes): if not isinstance(data, bytes):
raise TypeError("data must have bytes type") raise TypeError("data must have bytes type")
L = len(data) L = len(data)
if L > 0xffffffff: if L > 0xFFFFFFFF:
raise ValueError("Too large data") raise ValueError("Too large data")
if L == 1: if L == 1:
self._buffer.write(b'\xd4') self._buffer.write(b"\xd4")
elif L == 2: elif L == 2:
self._buffer.write(b'\xd5') self._buffer.write(b"\xd5")
elif L == 4: elif L == 4:
self._buffer.write(b'\xd6') self._buffer.write(b"\xd6")
elif L == 8: elif L == 8:
self._buffer.write(b'\xd7') self._buffer.write(b"\xd7")
elif L == 16: elif L == 16:
self._buffer.write(b'\xd8') self._buffer.write(b"\xd8")
elif L <= 0xff: elif L <= 0xFF:
self._buffer.write(b'\xc7' + struct.pack('B', L)) self._buffer.write(b"\xc7" + struct.pack("B", L))
elif L <= 0xffff: elif L <= 0xFFFF:
self._buffer.write(b'\xc8' + struct.pack('>H', L)) self._buffer.write(b"\xc8" + struct.pack(">H", L))
else: else:
self._buffer.write(b'\xc9' + struct.pack('>I', L)) self._buffer.write(b"\xc9" + struct.pack(">I", L))
self._buffer.write(struct.pack('B', typecode)) self._buffer.write(struct.pack("B", typecode))
self._buffer.write(data) self._buffer.write(data)
def _pack_array_header(self, n): def _pack_array_header(self, n):
if n <= 0x0f: if n <= 0x0F:
return self._buffer.write(struct.pack('B', 0x90 + n)) return self._buffer.write(struct.pack("B", 0x90 + n))
if n <= 0xffff: if n <= 0xFFFF:
return self._buffer.write(struct.pack(">BH", 0xdc, n)) return self._buffer.write(struct.pack(">BH", 0xDC, n))
if n <= 0xffffffff: if n <= 0xFFFFFFFF:
return self._buffer.write(struct.pack(">BI", 0xdd, n)) return self._buffer.write(struct.pack(">BI", 0xDD, n))
raise ValueError("Array is too large") raise ValueError("Array is too large")
def _pack_map_header(self, n): def _pack_map_header(self, n):
if n <= 0x0f: if n <= 0x0F:
return self._buffer.write(struct.pack('B', 0x80 + n)) return self._buffer.write(struct.pack("B", 0x80 + n))
if n <= 0xffff: if n <= 0xFFFF:
return self._buffer.write(struct.pack(">BH", 0xde, n)) return self._buffer.write(struct.pack(">BH", 0xDE, n))
if n <= 0xffffffff: if n <= 0xFFFFFFFF:
return self._buffer.write(struct.pack(">BI", 0xdf, n)) return self._buffer.write(struct.pack(">BI", 0xDF, n))
raise ValueError("Dict is too large") raise ValueError("Dict is too large")
def _pack_map_pairs(self, n, pairs, nest_limit=DEFAULT_RECURSE_LIMIT): def _pack_map_pairs(self, n, pairs, nest_limit=DEFAULT_RECURSE_LIMIT):
@ -985,28 +1021,28 @@ class Packer(object):
self._pack(v, nest_limit - 1) self._pack(v, nest_limit - 1)
def _pack_raw_header(self, n): def _pack_raw_header(self, n):
if n <= 0x1f: if n <= 0x1F:
self._buffer.write(struct.pack('B', 0xa0 + n)) self._buffer.write(struct.pack("B", 0xA0 + n))
elif self._use_bin_type and n <= 0xff: elif self._use_bin_type and n <= 0xFF:
self._buffer.write(struct.pack('>BB', 0xd9, n)) self._buffer.write(struct.pack(">BB", 0xD9, n))
elif n <= 0xffff: elif n <= 0xFFFF:
self._buffer.write(struct.pack(">BH", 0xda, n)) self._buffer.write(struct.pack(">BH", 0xDA, n))
elif n <= 0xffffffff: elif n <= 0xFFFFFFFF:
self._buffer.write(struct.pack(">BI", 0xdb, n)) self._buffer.write(struct.pack(">BI", 0xDB, n))
else: else:
raise ValueError('Raw is too large') raise ValueError("Raw is too large")
def _pack_bin_header(self, n): def _pack_bin_header(self, n):
if not self._use_bin_type: if not self._use_bin_type:
return self._pack_raw_header(n) return self._pack_raw_header(n)
elif n <= 0xff: elif n <= 0xFF:
return self._buffer.write(struct.pack('>BB', 0xc4, n)) return self._buffer.write(struct.pack(">BB", 0xC4, n))
elif n <= 0xffff: elif n <= 0xFFFF:
return self._buffer.write(struct.pack(">BH", 0xc5, n)) return self._buffer.write(struct.pack(">BH", 0xC5, n))
elif n <= 0xffffffff: elif n <= 0xFFFFFFFF:
return self._buffer.write(struct.pack(">BI", 0xc6, n)) return self._buffer.write(struct.pack(">BI", 0xC6, n))
else: else:
raise ValueError('Bin is too large') raise ValueError("Bin is too large")
def bytes(self): def bytes(self):
"""Return internal buffer contents as bytes object""" """Return internal buffer contents as bytes object"""
@ -1015,7 +1051,7 @@ class Packer(object):
def reset(self): def reset(self):
"""Reset internal buffer. """Reset internal buffer.
This method is usaful only when autoreset=False. This method is useful only when autoreset=False.
""" """
self._buffer = StringIO() self._buffer = StringIO()

Loading…
Cancel
Save