|
|
@ -1,73 +1,85 @@ |
|
|
|
"""Fallback pure Python implementation of msgpack""" |
|
|
|
|
|
|
|
from datetime import datetime as _DateTime |
|
|
|
import sys |
|
|
|
import struct |
|
|
|
import warnings |
|
|
|
|
|
|
|
|
|
|
|
if sys.version_info[0] == 2: |
|
|
|
PY2 = True |
|
|
|
PY2 = sys.version_info[0] == 2 |
|
|
|
if PY2: |
|
|
|
int_types = (int, long) |
|
|
|
|
|
|
|
def dict_iteritems(d): |
|
|
|
return d.iteritems() |
|
|
|
|
|
|
|
|
|
|
|
else: |
|
|
|
PY2 = False |
|
|
|
int_types = int |
|
|
|
unicode = str |
|
|
|
xrange = range |
|
|
|
|
|
|
|
def dict_iteritems(d): |
|
|
|
return d.items() |
|
|
|
|
|
|
|
|
|
|
|
if sys.version_info < (3, 5): |
|
|
|
# Ugly hack... |
|
|
|
RecursionError = RuntimeError |
|
|
|
|
|
|
|
def _is_recursionerror(e): |
|
|
|
return len(e.args) == 1 and isinstance(e.args[0], str) and \ |
|
|
|
e.args[0].startswith('maximum recursion depth exceeded') |
|
|
|
return ( |
|
|
|
len(e.args) == 1 |
|
|
|
and isinstance(e.args[0], str) |
|
|
|
and e.args[0].startswith("maximum recursion depth exceeded") |
|
|
|
) |
|
|
|
|
|
|
|
|
|
|
|
else: |
|
|
|
|
|
|
|
def _is_recursionerror(e): |
|
|
|
return True |
|
|
|
|
|
|
|
if hasattr(sys, 'pypy_version_info'): |
|
|
|
# cStringIO is slow on PyPy, StringIO is faster. However: PyPy's own |
|
|
|
|
|
|
|
if hasattr(sys, "pypy_version_info"): |
|
|
|
# StringIO is slow on PyPy, StringIO is faster. However: PyPy's own |
|
|
|
# StringBuilder is fastest. |
|
|
|
from __pypy__ import newlist_hint |
|
|
|
|
|
|
|
try: |
|
|
|
from __pypy__.builders import BytesBuilder as StringBuilder |
|
|
|
except ImportError: |
|
|
|
from __pypy__.builders import StringBuilder |
|
|
|
USING_STRINGBUILDER = True |
|
|
|
|
|
|
|
class StringIO(object): |
|
|
|
def __init__(self, s=b''): |
|
|
|
def __init__(self, s=b""): |
|
|
|
if s: |
|
|
|
self.builder = StringBuilder(len(s)) |
|
|
|
self.builder.append(s) |
|
|
|
else: |
|
|
|
self.builder = StringBuilder() |
|
|
|
|
|
|
|
def write(self, s): |
|
|
|
if isinstance(s, memoryview): |
|
|
|
s = s.tobytes() |
|
|
|
elif isinstance(s, bytearray): |
|
|
|
s = bytes(s) |
|
|
|
self.builder.append(s) |
|
|
|
|
|
|
|
def getvalue(self): |
|
|
|
return self.builder.build() |
|
|
|
|
|
|
|
|
|
|
|
else: |
|
|
|
USING_STRINGBUILDER = False |
|
|
|
from io import BytesIO as StringIO |
|
|
|
|
|
|
|
newlist_hint = lambda size: [] |
|
|
|
|
|
|
|
|
|
|
|
from .exceptions import ( |
|
|
|
BufferFull, |
|
|
|
OutOfData, |
|
|
|
ExtraData, |
|
|
|
FormatError, |
|
|
|
StackError, |
|
|
|
) |
|
|
|
from .exceptions import BufferFull, OutOfData, ExtraData, FormatError, StackError |
|
|
|
|
|
|
|
from . import ExtType |
|
|
|
from .ext import ExtType, Timestamp |
|
|
|
|
|
|
|
|
|
|
|
EX_SKIP = 0 |
|
|
@ -93,31 +105,12 @@ def _check_type_strict(obj, t, type=type, tuple=tuple): |
|
|
|
|
|
|
|
|
|
|
|
def _get_data_from_buffer(obj): |
|
|
|
try: |
|
|
|
view = memoryview(obj) |
|
|
|
except TypeError: |
|
|
|
# try to use legacy buffer protocol if 2.7, otherwise re-raise |
|
|
|
if PY2: |
|
|
|
view = memoryview(buffer(obj)) |
|
|
|
warnings.warn("using old buffer interface to unpack %s; " |
|
|
|
"this leads to unpacking errors if slicing is used and " |
|
|
|
"will be removed in a future version" % type(obj), |
|
|
|
RuntimeWarning, stacklevel=3) |
|
|
|
else: |
|
|
|
raise |
|
|
|
if view.itemsize != 1: |
|
|
|
raise ValueError("cannot unpack from multi-byte object") |
|
|
|
return view |
|
|
|
|
|
|
|
|
|
|
|
def unpack(stream, **kwargs): |
|
|
|
warnings.warn( |
|
|
|
"Direct calling implementation's unpack() is deprecated, Use msgpack.unpack() or unpackb() instead.", |
|
|
|
DeprecationWarning, stacklevel=2) |
|
|
|
data = stream.read() |
|
|
|
return unpackb(data, **kwargs) |
|
|
|
|
|
|
|
|
|
|
|
def unpackb(packed, **kwargs): |
|
|
|
""" |
|
|
|
Unpack an object from `packed`. |
|
|
@ -146,9 +139,12 @@ def unpackb(packed, **kwargs): |
|
|
|
|
|
|
|
|
|
|
|
if sys.version_info < (2, 7, 6): |
|
|
|
|
|
|
|
def _unpack_from(f, b, o=0): |
|
|
|
"""Explicit typcast for legacy struct.unpack_from""" |
|
|
|
"""Explicit type cast for legacy struct.unpack_from""" |
|
|
|
return struct.unpack_from(f, bytes(b), o) |
|
|
|
|
|
|
|
|
|
|
|
else: |
|
|
|
_unpack_from = struct.unpack_from |
|
|
|
|
|
|
@ -156,7 +152,7 @@ else: |
|
|
|
class Unpacker(object): |
|
|
|
"""Streaming unpacker. |
|
|
|
|
|
|
|
arguments: |
|
|
|
Arguments: |
|
|
|
|
|
|
|
:param file_like: |
|
|
|
File-like object having `.read(n)` method. |
|
|
@ -170,19 +166,19 @@ class Unpacker(object): |
|
|
|
Otherwise, unpack to Python tuple. (default: True) |
|
|
|
|
|
|
|
:param bool raw: |
|
|
|
If true, unpack msgpack raw to Python bytes (default). |
|
|
|
Otherwise, unpack to Python str (or unicode on Python 2) by decoding |
|
|
|
with UTF-8 encoding (recommended). |
|
|
|
Currently, the default is true, but it will be changed to false in |
|
|
|
near future. So you must specify it explicitly for keeping backward |
|
|
|
compatibility. |
|
|
|
If true, unpack msgpack raw to Python bytes. |
|
|
|
Otherwise, unpack to Python str by decoding with UTF-8 encoding (default). |
|
|
|
|
|
|
|
:param int timestamp: |
|
|
|
Control how timestamp type is unpacked: |
|
|
|
|
|
|
|
*encoding* option which is deprecated overrides this option. |
|
|
|
0 - Timestamp |
|
|
|
1 - float (Seconds from the EPOCH) |
|
|
|
2 - int (Nanoseconds from the EPOCH) |
|
|
|
3 - datetime.datetime (UTC). Python 2 is not supported. |
|
|
|
|
|
|
|
:param bool strict_map_key: |
|
|
|
If true, only str or bytes are accepted for map (dict) keys. |
|
|
|
It's False by default for backward-compatibility. |
|
|
|
But it will be True from msgpack 1.0. |
|
|
|
If true (default), only str or bytes are accepted for map (dict) keys. |
|
|
|
|
|
|
|
:param callable object_hook: |
|
|
|
When specified, it should be callable. |
|
|
@ -194,48 +190,46 @@ class Unpacker(object): |
|
|
|
Unpacker calls it with a list of key-value pairs after unpacking msgpack map. |
|
|
|
(See also simplejson) |
|
|
|
|
|
|
|
:param str encoding: |
|
|
|
Encoding used for decoding msgpack raw. |
|
|
|
If it is None (default), msgpack raw is deserialized to Python bytes. |
|
|
|
|
|
|
|
:param str unicode_errors: |
|
|
|
(deprecated) Used for decoding msgpack raw with *encoding*. |
|
|
|
(default: `'strict'`) |
|
|
|
The error handler for decoding unicode. (default: 'strict') |
|
|
|
This option should be used only when you have msgpack data which |
|
|
|
contains invalid UTF-8 string. |
|
|
|
|
|
|
|
:param int max_buffer_size: |
|
|
|
Limits size of data waiting unpacked. 0 means system's INT_MAX (default). |
|
|
|
Limits size of data waiting unpacked. 0 means 2**32-1. |
|
|
|
The default value is 100*1024*1024 (100MiB). |
|
|
|
Raises `BufferFull` exception when it is insufficient. |
|
|
|
You should set this parameter when unpacking data from untrusted source. |
|
|
|
|
|
|
|
:param int max_str_len: |
|
|
|
Deprecated, use *max_buffer_size* instead. |
|
|
|
Limits max length of str. (default: max_buffer_size or 1024*1024) |
|
|
|
Limits max length of str. (default: max_buffer_size) |
|
|
|
|
|
|
|
:param int max_bin_len: |
|
|
|
Deprecated, use *max_buffer_size* instead. |
|
|
|
Limits max length of bin. (default: max_buffer_size or 1024*1024) |
|
|
|
Limits max length of bin. (default: max_buffer_size) |
|
|
|
|
|
|
|
:param int max_array_len: |
|
|
|
Limits max length of array. |
|
|
|
(default: max_buffer_size or 128*1024) |
|
|
|
(default: max_buffer_size) |
|
|
|
|
|
|
|
:param int max_map_len: |
|
|
|
Limits max length of map. |
|
|
|
(default: max_buffer_size//2 or 32*1024) |
|
|
|
(default: max_buffer_size//2) |
|
|
|
|
|
|
|
:param int max_ext_len: |
|
|
|
Deprecated, use *max_buffer_size* instead. |
|
|
|
Limits max size of ext type. (default: max_buffer_size or 1024*1024) |
|
|
|
Limits max size of ext type. (default: max_buffer_size) |
|
|
|
|
|
|
|
Example of streaming deserialize from file-like object:: |
|
|
|
|
|
|
|
unpacker = Unpacker(file_like, raw=False, max_buffer_size=10*1024*1024) |
|
|
|
unpacker = Unpacker(file_like) |
|
|
|
for o in unpacker: |
|
|
|
process(o) |
|
|
|
|
|
|
|
Example of streaming deserialize from socket:: |
|
|
|
|
|
|
|
unpacker = Unpacker(raw=False, max_buffer_size=10*1024*1024) |
|
|
|
unpacker = Unpacker(max_buffer_size) |
|
|
|
while True: |
|
|
|
buf = sock.recv(1024**2) |
|
|
|
if not buf: |
|
|
@ -251,22 +245,28 @@ class Unpacker(object): |
|
|
|
Other exceptions can be raised during unpacking. |
|
|
|
""" |
|
|
|
|
|
|
|
def __init__(self, file_like=None, read_size=0, use_list=True, raw=True, strict_map_key=False, |
|
|
|
object_hook=None, object_pairs_hook=None, list_hook=None, |
|
|
|
encoding=None, unicode_errors=None, max_buffer_size=0, |
|
|
|
def __init__( |
|
|
|
self, |
|
|
|
file_like=None, |
|
|
|
read_size=0, |
|
|
|
use_list=True, |
|
|
|
raw=False, |
|
|
|
timestamp=0, |
|
|
|
strict_map_key=True, |
|
|
|
object_hook=None, |
|
|
|
object_pairs_hook=None, |
|
|
|
list_hook=None, |
|
|
|
unicode_errors=None, |
|
|
|
max_buffer_size=100 * 1024 * 1024, |
|
|
|
ext_hook=ExtType, |
|
|
|
max_str_len=-1, |
|
|
|
max_bin_len=-1, |
|
|
|
max_array_len=-1, |
|
|
|
max_map_len=-1, |
|
|
|
max_ext_len=-1): |
|
|
|
if encoding is not None: |
|
|
|
warnings.warn( |
|
|
|
"encoding is deprecated, Use raw=False instead.", |
|
|
|
DeprecationWarning, stacklevel=2) |
|
|
|
|
|
|
|
max_ext_len=-1, |
|
|
|
): |
|
|
|
if unicode_errors is None: |
|
|
|
unicode_errors = 'strict' |
|
|
|
unicode_errors = "strict" |
|
|
|
|
|
|
|
if file_like is None: |
|
|
|
self._feeding = True |
|
|
@ -290,26 +290,30 @@ class Unpacker(object): |
|
|
|
# state, which _buf_checkpoint records. |
|
|
|
self._buf_checkpoint = 0 |
|
|
|
|
|
|
|
if not max_buffer_size: |
|
|
|
max_buffer_size = 2 ** 31 - 1 |
|
|
|
if max_str_len == -1: |
|
|
|
max_str_len = max_buffer_size or 1024*1024 |
|
|
|
max_str_len = max_buffer_size |
|
|
|
if max_bin_len == -1: |
|
|
|
max_bin_len = max_buffer_size or 1024*1024 |
|
|
|
max_bin_len = max_buffer_size |
|
|
|
if max_array_len == -1: |
|
|
|
max_array_len = max_buffer_size or 128*1024 |
|
|
|
max_array_len = max_buffer_size |
|
|
|
if max_map_len == -1: |
|
|
|
max_map_len = max_buffer_size//2 or 32*1024 |
|
|
|
max_map_len = max_buffer_size // 2 |
|
|
|
if max_ext_len == -1: |
|
|
|
max_ext_len = max_buffer_size or 1024*1024 |
|
|
|
max_ext_len = max_buffer_size |
|
|
|
|
|
|
|
self._max_buffer_size = max_buffer_size or 2**31-1 |
|
|
|
self._max_buffer_size = max_buffer_size |
|
|
|
if read_size > self._max_buffer_size: |
|
|
|
raise ValueError("read_size must be smaller than max_buffer_size") |
|
|
|
self._read_size = read_size or min(self._max_buffer_size, 16 * 1024) |
|
|
|
self._raw = bool(raw) |
|
|
|
self._strict_map_key = bool(strict_map_key) |
|
|
|
self._encoding = encoding |
|
|
|
self._unicode_errors = unicode_errors |
|
|
|
self._use_list = use_list |
|
|
|
if not (0 <= timestamp <= 3): |
|
|
|
raise ValueError("timestamp must be 0..3") |
|
|
|
self._timestamp = timestamp |
|
|
|
self._list_hook = list_hook |
|
|
|
self._object_hook = object_hook |
|
|
|
self._object_pairs_hook = object_pairs_hook |
|
|
@ -322,21 +326,22 @@ class Unpacker(object): |
|
|
|
self._stream_offset = 0 |
|
|
|
|
|
|
|
if list_hook is not None and not callable(list_hook): |
|
|
|
raise TypeError('`list_hook` is not callable') |
|
|
|
raise TypeError("`list_hook` is not callable") |
|
|
|
if object_hook is not None and not callable(object_hook): |
|
|
|
raise TypeError('`object_hook` is not callable') |
|
|
|
raise TypeError("`object_hook` is not callable") |
|
|
|
if object_pairs_hook is not None and not callable(object_pairs_hook): |
|
|
|
raise TypeError('`object_pairs_hook` is not callable') |
|
|
|
raise TypeError("`object_pairs_hook` is not callable") |
|
|
|
if object_hook is not None and object_pairs_hook is not None: |
|
|
|
raise TypeError("object_pairs_hook and object_hook are mutually " |
|
|
|
"exclusive") |
|
|
|
raise TypeError( |
|
|
|
"object_pairs_hook and object_hook are mutually " "exclusive" |
|
|
|
) |
|
|
|
if not callable(ext_hook): |
|
|
|
raise TypeError("`ext_hook` is not callable") |
|
|
|
|
|
|
|
def feed(self, next_bytes): |
|
|
|
assert self._feeding |
|
|
|
view = _get_data_from_buffer(next_bytes) |
|
|
|
if (len(self._buffer) - self._buff_i + len(view) > self._max_buffer_size): |
|
|
|
if len(self._buffer) - self._buff_i + len(view) > self._max_buffer_size: |
|
|
|
raise BufferFull |
|
|
|
|
|
|
|
# Strip buffer before checkpoint before reading file. |
|
|
@ -360,7 +365,9 @@ class Unpacker(object): |
|
|
|
return self._buffer[self._buff_i :] |
|
|
|
|
|
|
|
def read_bytes(self, n): |
|
|
|
return self._read(n) |
|
|
|
ret = self._read(n) |
|
|
|
self._consume() |
|
|
|
return ret |
|
|
|
|
|
|
|
def _read(self, n): |
|
|
|
# (int) -> bytearray |
|
|
@ -411,7 +418,7 @@ class Unpacker(object): |
|
|
|
if b & 0b10000000 == 0: |
|
|
|
obj = b |
|
|
|
elif b & 0b11100000 == 0b11100000: |
|
|
|
obj = -1 - (b ^ 0xff) |
|
|
|
obj = -1 - (b ^ 0xFF) |
|
|
|
elif b & 0b11100000 == 0b10100000: |
|
|
|
n = b & 0b00011111 |
|
|
|
typ = TYPE_RAW |
|
|
@ -428,13 +435,13 @@ class Unpacker(object): |
|
|
|
typ = TYPE_MAP |
|
|
|
if n > self._max_map_len: |
|
|
|
raise ValueError("%s exceeds max_map_len(%s)", n, self._max_map_len) |
|
|
|
elif b == 0xc0: |
|
|
|
elif b == 0xC0: |
|
|
|
obj = None |
|
|
|
elif b == 0xc2: |
|
|
|
elif b == 0xC2: |
|
|
|
obj = False |
|
|
|
elif b == 0xc3: |
|
|
|
elif b == 0xC3: |
|
|
|
obj = True |
|
|
|
elif b == 0xc4: |
|
|
|
elif b == 0xC4: |
|
|
|
typ = TYPE_BIN |
|
|
|
self._reserve(1) |
|
|
|
n = self._buffer[self._buff_i] |
|
|
@ -442,7 +449,7 @@ class Unpacker(object): |
|
|
|
if n > self._max_bin_len: |
|
|
|
raise ValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len)) |
|
|
|
obj = self._read(n) |
|
|
|
elif b == 0xc5: |
|
|
|
elif b == 0xC5: |
|
|
|
typ = TYPE_BIN |
|
|
|
self._reserve(2) |
|
|
|
n = _unpack_from(">H", self._buffer, self._buff_i)[0] |
|
|
@ -450,7 +457,7 @@ class Unpacker(object): |
|
|
|
if n > self._max_bin_len: |
|
|
|
raise ValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len)) |
|
|
|
obj = self._read(n) |
|
|
|
elif b == 0xc6: |
|
|
|
elif b == 0xC6: |
|
|
|
typ = TYPE_BIN |
|
|
|
self._reserve(4) |
|
|
|
n = _unpack_from(">I", self._buffer, self._buff_i)[0] |
|
|
@ -458,106 +465,106 @@ class Unpacker(object): |
|
|
|
if n > self._max_bin_len: |
|
|
|
raise ValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len)) |
|
|
|
obj = self._read(n) |
|
|
|
elif b == 0xc7: # ext 8 |
|
|
|
elif b == 0xC7: # ext 8 |
|
|
|
typ = TYPE_EXT |
|
|
|
self._reserve(2) |
|
|
|
L, n = _unpack_from('Bb', self._buffer, self._buff_i) |
|
|
|
L, n = _unpack_from("Bb", self._buffer, self._buff_i) |
|
|
|
self._buff_i += 2 |
|
|
|
if L > self._max_ext_len: |
|
|
|
raise ValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len)) |
|
|
|
obj = self._read(L) |
|
|
|
elif b == 0xc8: # ext 16 |
|
|
|
elif b == 0xC8: # ext 16 |
|
|
|
typ = TYPE_EXT |
|
|
|
self._reserve(3) |
|
|
|
L, n = _unpack_from('>Hb', self._buffer, self._buff_i) |
|
|
|
L, n = _unpack_from(">Hb", self._buffer, self._buff_i) |
|
|
|
self._buff_i += 3 |
|
|
|
if L > self._max_ext_len: |
|
|
|
raise ValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len)) |
|
|
|
obj = self._read(L) |
|
|
|
elif b == 0xc9: # ext 32 |
|
|
|
elif b == 0xC9: # ext 32 |
|
|
|
typ = TYPE_EXT |
|
|
|
self._reserve(5) |
|
|
|
L, n = _unpack_from('>Ib', self._buffer, self._buff_i) |
|
|
|
L, n = _unpack_from(">Ib", self._buffer, self._buff_i) |
|
|
|
self._buff_i += 5 |
|
|
|
if L > self._max_ext_len: |
|
|
|
raise ValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len)) |
|
|
|
obj = self._read(L) |
|
|
|
elif b == 0xca: |
|
|
|
elif b == 0xCA: |
|
|
|
self._reserve(4) |
|
|
|
obj = _unpack_from(">f", self._buffer, self._buff_i)[0] |
|
|
|
self._buff_i += 4 |
|
|
|
elif b == 0xcb: |
|
|
|
elif b == 0xCB: |
|
|
|
self._reserve(8) |
|
|
|
obj = _unpack_from(">d", self._buffer, self._buff_i)[0] |
|
|
|
self._buff_i += 8 |
|
|
|
elif b == 0xcc: |
|
|
|
elif b == 0xCC: |
|
|
|
self._reserve(1) |
|
|
|
obj = self._buffer[self._buff_i] |
|
|
|
self._buff_i += 1 |
|
|
|
elif b == 0xcd: |
|
|
|
elif b == 0xCD: |
|
|
|
self._reserve(2) |
|
|
|
obj = _unpack_from(">H", self._buffer, self._buff_i)[0] |
|
|
|
self._buff_i += 2 |
|
|
|
elif b == 0xce: |
|
|
|
elif b == 0xCE: |
|
|
|
self._reserve(4) |
|
|
|
obj = _unpack_from(">I", self._buffer, self._buff_i)[0] |
|
|
|
self._buff_i += 4 |
|
|
|
elif b == 0xcf: |
|
|
|
elif b == 0xCF: |
|
|
|
self._reserve(8) |
|
|
|
obj = _unpack_from(">Q", self._buffer, self._buff_i)[0] |
|
|
|
self._buff_i += 8 |
|
|
|
elif b == 0xd0: |
|
|
|
elif b == 0xD0: |
|
|
|
self._reserve(1) |
|
|
|
obj = _unpack_from("b", self._buffer, self._buff_i)[0] |
|
|
|
self._buff_i += 1 |
|
|
|
elif b == 0xd1: |
|
|
|
elif b == 0xD1: |
|
|
|
self._reserve(2) |
|
|
|
obj = _unpack_from(">h", self._buffer, self._buff_i)[0] |
|
|
|
self._buff_i += 2 |
|
|
|
elif b == 0xd2: |
|
|
|
elif b == 0xD2: |
|
|
|
self._reserve(4) |
|
|
|
obj = _unpack_from(">i", self._buffer, self._buff_i)[0] |
|
|
|
self._buff_i += 4 |
|
|
|
elif b == 0xd3: |
|
|
|
elif b == 0xD3: |
|
|
|
self._reserve(8) |
|
|
|
obj = _unpack_from(">q", self._buffer, self._buff_i)[0] |
|
|
|
self._buff_i += 8 |
|
|
|
elif b == 0xd4: # fixext 1 |
|
|
|
elif b == 0xD4: # fixext 1 |
|
|
|
typ = TYPE_EXT |
|
|
|
if self._max_ext_len < 1: |
|
|
|
raise ValueError("%s exceeds max_ext_len(%s)" % (1, self._max_ext_len)) |
|
|
|
self._reserve(2) |
|
|
|
n, obj = _unpack_from("b1s", self._buffer, self._buff_i) |
|
|
|
self._buff_i += 2 |
|
|
|
elif b == 0xd5: # fixext 2 |
|
|
|
elif b == 0xD5: # fixext 2 |
|
|
|
typ = TYPE_EXT |
|
|
|
if self._max_ext_len < 2: |
|
|
|
raise ValueError("%s exceeds max_ext_len(%s)" % (2, self._max_ext_len)) |
|
|
|
self._reserve(3) |
|
|
|
n, obj = _unpack_from("b2s", self._buffer, self._buff_i) |
|
|
|
self._buff_i += 3 |
|
|
|
elif b == 0xd6: # fixext 4 |
|
|
|
elif b == 0xD6: # fixext 4 |
|
|
|
typ = TYPE_EXT |
|
|
|
if self._max_ext_len < 4: |
|
|
|
raise ValueError("%s exceeds max_ext_len(%s)" % (4, self._max_ext_len)) |
|
|
|
self._reserve(5) |
|
|
|
n, obj = _unpack_from("b4s", self._buffer, self._buff_i) |
|
|
|
self._buff_i += 5 |
|
|
|
elif b == 0xd7: # fixext 8 |
|
|
|
elif b == 0xD7: # fixext 8 |
|
|
|
typ = TYPE_EXT |
|
|
|
if self._max_ext_len < 8: |
|
|
|
raise ValueError("%s exceeds max_ext_len(%s)" % (8, self._max_ext_len)) |
|
|
|
self._reserve(9) |
|
|
|
n, obj = _unpack_from("b8s", self._buffer, self._buff_i) |
|
|
|
self._buff_i += 9 |
|
|
|
elif b == 0xd8: # fixext 16 |
|
|
|
elif b == 0xD8: # fixext 16 |
|
|
|
typ = TYPE_EXT |
|
|
|
if self._max_ext_len < 16: |
|
|
|
raise ValueError("%s exceeds max_ext_len(%s)" % (16, self._max_ext_len)) |
|
|
|
self._reserve(17) |
|
|
|
n, obj = _unpack_from("b16s", self._buffer, self._buff_i) |
|
|
|
self._buff_i += 17 |
|
|
|
elif b == 0xd9: |
|
|
|
elif b == 0xD9: |
|
|
|
typ = TYPE_RAW |
|
|
|
self._reserve(1) |
|
|
|
n = self._buffer[self._buff_i] |
|
|
@ -565,46 +572,46 @@ class Unpacker(object): |
|
|
|
if n > self._max_str_len: |
|
|
|
raise ValueError("%s exceeds max_str_len(%s)", n, self._max_str_len) |
|
|
|
obj = self._read(n) |
|
|
|
elif b == 0xda: |
|
|
|
elif b == 0xDA: |
|
|
|
typ = TYPE_RAW |
|
|
|
self._reserve(2) |
|
|
|
n, = _unpack_from(">H", self._buffer, self._buff_i) |
|
|
|
(n,) = _unpack_from(">H", self._buffer, self._buff_i) |
|
|
|
self._buff_i += 2 |
|
|
|
if n > self._max_str_len: |
|
|
|
raise ValueError("%s exceeds max_str_len(%s)", n, self._max_str_len) |
|
|
|
obj = self._read(n) |
|
|
|
elif b == 0xdb: |
|
|
|
elif b == 0xDB: |
|
|
|
typ = TYPE_RAW |
|
|
|
self._reserve(4) |
|
|
|
n, = _unpack_from(">I", self._buffer, self._buff_i) |
|
|
|
(n,) = _unpack_from(">I", self._buffer, self._buff_i) |
|
|
|
self._buff_i += 4 |
|
|
|
if n > self._max_str_len: |
|
|
|
raise ValueError("%s exceeds max_str_len(%s)", n, self._max_str_len) |
|
|
|
obj = self._read(n) |
|
|
|
elif b == 0xdc: |
|
|
|
elif b == 0xDC: |
|
|
|
typ = TYPE_ARRAY |
|
|
|
self._reserve(2) |
|
|
|
n, = _unpack_from(">H", self._buffer, self._buff_i) |
|
|
|
(n,) = _unpack_from(">H", self._buffer, self._buff_i) |
|
|
|
self._buff_i += 2 |
|
|
|
if n > self._max_array_len: |
|
|
|
raise ValueError("%s exceeds max_array_len(%s)", n, self._max_array_len) |
|
|
|
elif b == 0xdd: |
|
|
|
elif b == 0xDD: |
|
|
|
typ = TYPE_ARRAY |
|
|
|
self._reserve(4) |
|
|
|
n, = _unpack_from(">I", self._buffer, self._buff_i) |
|
|
|
(n,) = _unpack_from(">I", self._buffer, self._buff_i) |
|
|
|
self._buff_i += 4 |
|
|
|
if n > self._max_array_len: |
|
|
|
raise ValueError("%s exceeds max_array_len(%s)", n, self._max_array_len) |
|
|
|
elif b == 0xde: |
|
|
|
elif b == 0xDE: |
|
|
|
self._reserve(2) |
|
|
|
n, = _unpack_from(">H", self._buffer, self._buff_i) |
|
|
|
(n,) = _unpack_from(">H", self._buffer, self._buff_i) |
|
|
|
self._buff_i += 2 |
|
|
|
if n > self._max_map_len: |
|
|
|
raise ValueError("%s exceeds max_map_len(%s)", n, self._max_map_len) |
|
|
|
typ = TYPE_MAP |
|
|
|
elif b == 0xdf: |
|
|
|
elif b == 0xDF: |
|
|
|
self._reserve(4) |
|
|
|
n, = _unpack_from(">I", self._buffer, self._buff_i) |
|
|
|
(n,) = _unpack_from(">I", self._buffer, self._buff_i) |
|
|
|
self._buff_i += 4 |
|
|
|
if n > self._max_map_len: |
|
|
|
raise ValueError("%s exceeds max_map_len(%s)", n, self._max_map_len) |
|
|
@ -647,15 +654,19 @@ class Unpacker(object): |
|
|
|
return |
|
|
|
if self._object_pairs_hook is not None: |
|
|
|
ret = self._object_pairs_hook( |
|
|
|
(self._unpack(EX_CONSTRUCT), |
|
|
|
self._unpack(EX_CONSTRUCT)) |
|
|
|
for _ in xrange(n)) |
|
|
|
(self._unpack(EX_CONSTRUCT), self._unpack(EX_CONSTRUCT)) |
|
|
|
for _ in xrange(n) |
|
|
|
) |
|
|
|
else: |
|
|
|
ret = {} |
|
|
|
for _ in xrange(n): |
|
|
|
key = self._unpack(EX_CONSTRUCT) |
|
|
|
if self._strict_map_key and type(key) not in (unicode, bytes): |
|
|
|
raise ValueError("%s is not allowed for map key" % str(type(key))) |
|
|
|
raise ValueError( |
|
|
|
"%s is not allowed for map key" % str(type(key)) |
|
|
|
) |
|
|
|
if not PY2 and type(key) is str: |
|
|
|
key = sys.intern(key) |
|
|
|
ret[key] = self._unpack(EX_CONSTRUCT) |
|
|
|
if self._object_hook is not None: |
|
|
|
ret = self._object_hook(ret) |
|
|
@ -663,17 +674,26 @@ class Unpacker(object): |
|
|
|
if execute == EX_SKIP: |
|
|
|
return |
|
|
|
if typ == TYPE_RAW: |
|
|
|
if self._encoding is not None: |
|
|
|
obj = obj.decode(self._encoding, self._unicode_errors) |
|
|
|
elif self._raw: |
|
|
|
if self._raw: |
|
|
|
obj = bytes(obj) |
|
|
|
else: |
|
|
|
obj = obj.decode('utf_8') |
|
|
|
obj = obj.decode("utf_8", self._unicode_errors) |
|
|
|
return obj |
|
|
|
if typ == TYPE_EXT: |
|
|
|
return self._ext_hook(n, bytes(obj)) |
|
|
|
if typ == TYPE_BIN: |
|
|
|
return bytes(obj) |
|
|
|
if typ == TYPE_EXT: |
|
|
|
if n == -1: # timestamp |
|
|
|
ts = Timestamp.from_bytes(bytes(obj)) |
|
|
|
if self._timestamp == 1: |
|
|
|
return ts.to_unix() |
|
|
|
elif self._timestamp == 2: |
|
|
|
return ts.to_unix_nano() |
|
|
|
elif self._timestamp == 3: |
|
|
|
return ts.to_datetime() |
|
|
|
else: |
|
|
|
return ts |
|
|
|
else: |
|
|
|
return self._ext_hook(n, bytes(obj)) |
|
|
|
assert typ == TYPE_IMMEDIATE |
|
|
|
return obj |
|
|
|
|
|
|
@ -723,7 +743,7 @@ class Packer(object): |
|
|
|
""" |
|
|
|
MessagePack Packer |
|
|
|
|
|
|
|
usage: |
|
|
|
Usage: |
|
|
|
|
|
|
|
packer = Packer() |
|
|
|
astream.write(packer.pack(a)) |
|
|
@ -744,49 +764,58 @@ class Packer(object): |
|
|
|
|
|
|
|
:param bool use_bin_type: |
|
|
|
Use bin type introduced in msgpack spec 2.0 for bytes. |
|
|
|
It also enables str8 type for unicode. |
|
|
|
It also enables str8 type for unicode. (default: True) |
|
|
|
|
|
|
|
:param bool strict_types: |
|
|
|
If set to true, types will be checked to be exact. Derived classes |
|
|
|
from serializeable types will not be serialized and will be |
|
|
|
from serializable types will not be serialized and will be |
|
|
|
treated as unsupported type and forwarded to default. |
|
|
|
Additionally tuples will not be serialized as lists. |
|
|
|
This is useful when trying to implement accurate serialization |
|
|
|
for python types. |
|
|
|
|
|
|
|
:param str encoding: |
|
|
|
(deprecated) Convert unicode to bytes with this encoding. (default: 'utf-8') |
|
|
|
:param bool datetime: |
|
|
|
If set to true, datetime with tzinfo is packed into Timestamp type. |
|
|
|
Note that the tzinfo is stripped in the timestamp. |
|
|
|
You can get UTC datetime with `timestamp=3` option of the Unpacker. |
|
|
|
(Python 2 is not supported). |
|
|
|
|
|
|
|
:param str unicode_errors: |
|
|
|
Error handler for encoding unicode. (default: 'strict') |
|
|
|
The error handler for encoding unicode. (default: 'strict') |
|
|
|
DO NOT USE THIS!! This option is kept for very specific usage. |
|
|
|
""" |
|
|
|
def __init__(self, default=None, encoding=None, unicode_errors=None, |
|
|
|
use_single_float=False, autoreset=True, use_bin_type=False, |
|
|
|
strict_types=False): |
|
|
|
if encoding is None: |
|
|
|
encoding = 'utf_8' |
|
|
|
else: |
|
|
|
warnings.warn( |
|
|
|
"encoding is deprecated, Use raw=False instead.", |
|
|
|
DeprecationWarning, stacklevel=2) |
|
|
|
|
|
|
|
if unicode_errors is None: |
|
|
|
unicode_errors = 'strict' |
|
|
|
|
|
|
|
def __init__( |
|
|
|
self, |
|
|
|
default=None, |
|
|
|
use_single_float=False, |
|
|
|
autoreset=True, |
|
|
|
use_bin_type=True, |
|
|
|
strict_types=False, |
|
|
|
datetime=False, |
|
|
|
unicode_errors=None, |
|
|
|
): |
|
|
|
self._strict_types = strict_types |
|
|
|
self._use_float = use_single_float |
|
|
|
self._autoreset = autoreset |
|
|
|
self._use_bin_type = use_bin_type |
|
|
|
self._encoding = encoding |
|
|
|
self._unicode_errors = unicode_errors |
|
|
|
self._buffer = StringIO() |
|
|
|
if PY2 and datetime: |
|
|
|
raise ValueError("datetime is not supported in Python 2") |
|
|
|
self._datetime = bool(datetime) |
|
|
|
self._unicode_errors = unicode_errors or "strict" |
|
|
|
if default is not None: |
|
|
|
if not callable(default): |
|
|
|
raise TypeError("default must be callable") |
|
|
|
self._default = default |
|
|
|
|
|
|
|
def _pack(self, obj, nest_limit=DEFAULT_RECURSE_LIMIT, |
|
|
|
check=isinstance, check_type_strict=_check_type_strict): |
|
|
|
def _pack( |
|
|
|
self, |
|
|
|
obj, |
|
|
|
nest_limit=DEFAULT_RECURSE_LIMIT, |
|
|
|
check=isinstance, |
|
|
|
check_type_strict=_check_type_strict, |
|
|
|
): |
|
|
|
default_used = False |
|
|
|
if self._strict_types: |
|
|
|
check = check_type_strict |
|
|
@ -807,22 +836,22 @@ class Packer(object): |
|
|
|
return self._buffer.write(struct.pack("B", obj)) |
|
|
|
if -0x20 <= obj < 0: |
|
|
|
return self._buffer.write(struct.pack("b", obj)) |
|
|
|
if 0x80 <= obj <= 0xff: |
|
|
|
return self._buffer.write(struct.pack("BB", 0xcc, obj)) |
|
|
|
if 0x80 <= obj <= 0xFF: |
|
|
|
return self._buffer.write(struct.pack("BB", 0xCC, obj)) |
|
|
|
if -0x80 <= obj < 0: |
|
|
|
return self._buffer.write(struct.pack(">Bb", 0xd0, obj)) |
|
|
|
if 0xff < obj <= 0xffff: |
|
|
|
return self._buffer.write(struct.pack(">BH", 0xcd, obj)) |
|
|
|
return self._buffer.write(struct.pack(">Bb", 0xD0, obj)) |
|
|
|
if 0xFF < obj <= 0xFFFF: |
|
|
|
return self._buffer.write(struct.pack(">BH", 0xCD, obj)) |
|
|
|
if -0x8000 <= obj < -0x80: |
|
|
|
return self._buffer.write(struct.pack(">Bh", 0xd1, obj)) |
|
|
|
if 0xffff < obj <= 0xffffffff: |
|
|
|
return self._buffer.write(struct.pack(">BI", 0xce, obj)) |
|
|
|
return self._buffer.write(struct.pack(">Bh", 0xD1, obj)) |
|
|
|
if 0xFFFF < obj <= 0xFFFFFFFF: |
|
|
|
return self._buffer.write(struct.pack(">BI", 0xCE, obj)) |
|
|
|
if -0x80000000 <= obj < -0x8000: |
|
|
|
return self._buffer.write(struct.pack(">Bi", 0xd2, obj)) |
|
|
|
if 0xffffffff < obj <= 0xffffffffffffffff: |
|
|
|
return self._buffer.write(struct.pack(">BQ", 0xcf, obj)) |
|
|
|
return self._buffer.write(struct.pack(">Bi", 0xD2, obj)) |
|
|
|
if 0xFFFFFFFF < obj <= 0xFFFFFFFFFFFFFFFF: |
|
|
|
return self._buffer.write(struct.pack(">BQ", 0xCF, obj)) |
|
|
|
if -0x8000000000000000 <= obj < -0x80000000: |
|
|
|
return self._buffer.write(struct.pack(">Bq", 0xd3, obj)) |
|
|
|
return self._buffer.write(struct.pack(">Bq", 0xD3, obj)) |
|
|
|
if not default_used and self._default is not None: |
|
|
|
obj = self._default(obj) |
|
|
|
default_used = True |
|
|
@ -835,11 +864,7 @@ class Packer(object): |
|
|
|
self._pack_bin_header(n) |
|
|
|
return self._buffer.write(obj) |
|
|
|
if check(obj, unicode): |
|
|
|
if self._encoding is None: |
|
|
|
raise TypeError( |
|
|
|
"Can't encode unicode string: " |
|
|
|
"no encoding is specified") |
|
|
|
obj = obj.encode(self._encoding, self._unicode_errors) |
|
|
|
obj = obj.encode("utf-8", self._unicode_errors) |
|
|
|
n = len(obj) |
|
|
|
if n >= 2 ** 32: |
|
|
|
raise ValueError("String is too large") |
|
|
@ -853,30 +878,34 @@ class Packer(object): |
|
|
|
return self._buffer.write(obj) |
|
|
|
if check(obj, float): |
|
|
|
if self._use_float: |
|
|
|
return self._buffer.write(struct.pack(">Bf", 0xca, obj)) |
|
|
|
return self._buffer.write(struct.pack(">Bd", 0xcb, obj)) |
|
|
|
if check(obj, ExtType): |
|
|
|
return self._buffer.write(struct.pack(">Bf", 0xCA, obj)) |
|
|
|
return self._buffer.write(struct.pack(">Bd", 0xCB, obj)) |
|
|
|
if check(obj, (ExtType, Timestamp)): |
|
|
|
if check(obj, Timestamp): |
|
|
|
code = -1 |
|
|
|
data = obj.to_bytes() |
|
|
|
else: |
|
|
|
code = obj.code |
|
|
|
data = obj.data |
|
|
|
assert isinstance(code, int) |
|
|
|
assert isinstance(data, bytes) |
|
|
|
L = len(data) |
|
|
|
if L == 1: |
|
|
|
self._buffer.write(b'\xd4') |
|
|
|
self._buffer.write(b"\xd4") |
|
|
|
elif L == 2: |
|
|
|
self._buffer.write(b'\xd5') |
|
|
|
self._buffer.write(b"\xd5") |
|
|
|
elif L == 4: |
|
|
|
self._buffer.write(b'\xd6') |
|
|
|
self._buffer.write(b"\xd6") |
|
|
|
elif L == 8: |
|
|
|
self._buffer.write(b'\xd7') |
|
|
|
self._buffer.write(b"\xd7") |
|
|
|
elif L == 16: |
|
|
|
self._buffer.write(b'\xd8') |
|
|
|
elif L <= 0xff: |
|
|
|
self._buffer.write(struct.pack(">BB", 0xc7, L)) |
|
|
|
elif L <= 0xffff: |
|
|
|
self._buffer.write(struct.pack(">BH", 0xc8, L)) |
|
|
|
self._buffer.write(b"\xd8") |
|
|
|
elif L <= 0xFF: |
|
|
|
self._buffer.write(struct.pack(">BB", 0xC7, L)) |
|
|
|
elif L <= 0xFFFF: |
|
|
|
self._buffer.write(struct.pack(">BH", 0xC8, L)) |
|
|
|
else: |
|
|
|
self._buffer.write(struct.pack(">BI", 0xc9, L)) |
|
|
|
self._buffer.write(struct.pack(">BI", 0xC9, L)) |
|
|
|
self._buffer.write(struct.pack("b", code)) |
|
|
|
self._buffer.write(data) |
|
|
|
return |
|
|
@ -887,8 +916,15 @@ class Packer(object): |
|
|
|
self._pack(obj[i], nest_limit - 1) |
|
|
|
return |
|
|
|
if check(obj, dict): |
|
|
|
return self._pack_map_pairs(len(obj), dict_iteritems(obj), |
|
|
|
nest_limit - 1) |
|
|
|
return self._pack_map_pairs( |
|
|
|
len(obj), dict_iteritems(obj), nest_limit - 1 |
|
|
|
) |
|
|
|
|
|
|
|
if self._datetime and check(obj, _DateTime): |
|
|
|
obj = Timestamp.from_datetime(obj) |
|
|
|
default_used = 1 |
|
|
|
continue |
|
|
|
|
|
|
|
if not default_used and self._default is not None: |
|
|
|
obj = self._default(obj) |
|
|
|
default_used = 1 |
|
|
@ -939,43 +975,43 @@ class Packer(object): |
|
|
|
if not isinstance(data, bytes): |
|
|
|
raise TypeError("data must have bytes type") |
|
|
|
L = len(data) |
|
|
|
if L > 0xffffffff: |
|
|
|
if L > 0xFFFFFFFF: |
|
|
|
raise ValueError("Too large data") |
|
|
|
if L == 1: |
|
|
|
self._buffer.write(b'\xd4') |
|
|
|
self._buffer.write(b"\xd4") |
|
|
|
elif L == 2: |
|
|
|
self._buffer.write(b'\xd5') |
|
|
|
self._buffer.write(b"\xd5") |
|
|
|
elif L == 4: |
|
|
|
self._buffer.write(b'\xd6') |
|
|
|
self._buffer.write(b"\xd6") |
|
|
|
elif L == 8: |
|
|
|
self._buffer.write(b'\xd7') |
|
|
|
self._buffer.write(b"\xd7") |
|
|
|
elif L == 16: |
|
|
|
self._buffer.write(b'\xd8') |
|
|
|
elif L <= 0xff: |
|
|
|
self._buffer.write(b'\xc7' + struct.pack('B', L)) |
|
|
|
elif L <= 0xffff: |
|
|
|
self._buffer.write(b'\xc8' + struct.pack('>H', L)) |
|
|
|
self._buffer.write(b"\xd8") |
|
|
|
elif L <= 0xFF: |
|
|
|
self._buffer.write(b"\xc7" + struct.pack("B", L)) |
|
|
|
elif L <= 0xFFFF: |
|
|
|
self._buffer.write(b"\xc8" + struct.pack(">H", L)) |
|
|
|
else: |
|
|
|
self._buffer.write(b'\xc9' + struct.pack('>I', L)) |
|
|
|
self._buffer.write(struct.pack('B', typecode)) |
|
|
|
self._buffer.write(b"\xc9" + struct.pack(">I", L)) |
|
|
|
self._buffer.write(struct.pack("B", typecode)) |
|
|
|
self._buffer.write(data) |
|
|
|
|
|
|
|
def _pack_array_header(self, n): |
|
|
|
if n <= 0x0f: |
|
|
|
return self._buffer.write(struct.pack('B', 0x90 + n)) |
|
|
|
if n <= 0xffff: |
|
|
|
return self._buffer.write(struct.pack(">BH", 0xdc, n)) |
|
|
|
if n <= 0xffffffff: |
|
|
|
return self._buffer.write(struct.pack(">BI", 0xdd, n)) |
|
|
|
if n <= 0x0F: |
|
|
|
return self._buffer.write(struct.pack("B", 0x90 + n)) |
|
|
|
if n <= 0xFFFF: |
|
|
|
return self._buffer.write(struct.pack(">BH", 0xDC, n)) |
|
|
|
if n <= 0xFFFFFFFF: |
|
|
|
return self._buffer.write(struct.pack(">BI", 0xDD, n)) |
|
|
|
raise ValueError("Array is too large") |
|
|
|
|
|
|
|
def _pack_map_header(self, n): |
|
|
|
if n <= 0x0f: |
|
|
|
return self._buffer.write(struct.pack('B', 0x80 + n)) |
|
|
|
if n <= 0xffff: |
|
|
|
return self._buffer.write(struct.pack(">BH", 0xde, n)) |
|
|
|
if n <= 0xffffffff: |
|
|
|
return self._buffer.write(struct.pack(">BI", 0xdf, n)) |
|
|
|
if n <= 0x0F: |
|
|
|
return self._buffer.write(struct.pack("B", 0x80 + n)) |
|
|
|
if n <= 0xFFFF: |
|
|
|
return self._buffer.write(struct.pack(">BH", 0xDE, n)) |
|
|
|
if n <= 0xFFFFFFFF: |
|
|
|
return self._buffer.write(struct.pack(">BI", 0xDF, n)) |
|
|
|
raise ValueError("Dict is too large") |
|
|
|
|
|
|
|
def _pack_map_pairs(self, n, pairs, nest_limit=DEFAULT_RECURSE_LIMIT): |
|
|
@ -985,28 +1021,28 @@ class Packer(object): |
|
|
|
self._pack(v, nest_limit - 1) |
|
|
|
|
|
|
|
def _pack_raw_header(self, n): |
|
|
|
if n <= 0x1f: |
|
|
|
self._buffer.write(struct.pack('B', 0xa0 + n)) |
|
|
|
elif self._use_bin_type and n <= 0xff: |
|
|
|
self._buffer.write(struct.pack('>BB', 0xd9, n)) |
|
|
|
elif n <= 0xffff: |
|
|
|
self._buffer.write(struct.pack(">BH", 0xda, n)) |
|
|
|
elif n <= 0xffffffff: |
|
|
|
self._buffer.write(struct.pack(">BI", 0xdb, n)) |
|
|
|
if n <= 0x1F: |
|
|
|
self._buffer.write(struct.pack("B", 0xA0 + n)) |
|
|
|
elif self._use_bin_type and n <= 0xFF: |
|
|
|
self._buffer.write(struct.pack(">BB", 0xD9, n)) |
|
|
|
elif n <= 0xFFFF: |
|
|
|
self._buffer.write(struct.pack(">BH", 0xDA, n)) |
|
|
|
elif n <= 0xFFFFFFFF: |
|
|
|
self._buffer.write(struct.pack(">BI", 0xDB, n)) |
|
|
|
else: |
|
|
|
raise ValueError('Raw is too large') |
|
|
|
raise ValueError("Raw is too large") |
|
|
|
|
|
|
|
def _pack_bin_header(self, n): |
|
|
|
if not self._use_bin_type: |
|
|
|
return self._pack_raw_header(n) |
|
|
|
elif n <= 0xff: |
|
|
|
return self._buffer.write(struct.pack('>BB', 0xc4, n)) |
|
|
|
elif n <= 0xffff: |
|
|
|
return self._buffer.write(struct.pack(">BH", 0xc5, n)) |
|
|
|
elif n <= 0xffffffff: |
|
|
|
return self._buffer.write(struct.pack(">BI", 0xc6, n)) |
|
|
|
elif n <= 0xFF: |
|
|
|
return self._buffer.write(struct.pack(">BB", 0xC4, n)) |
|
|
|
elif n <= 0xFFFF: |
|
|
|
return self._buffer.write(struct.pack(">BH", 0xC5, n)) |
|
|
|
elif n <= 0xFFFFFFFF: |
|
|
|
return self._buffer.write(struct.pack(">BI", 0xC6, n)) |
|
|
|
else: |
|
|
|
raise ValueError('Bin is too large') |
|
|
|
raise ValueError("Bin is too large") |
|
|
|
|
|
|
|
def bytes(self): |
|
|
|
"""Return internal buffer contents as bytes object""" |
|
|
@ -1015,7 +1051,7 @@ class Packer(object): |
|
|
|
def reset(self): |
|
|
|
"""Reset internal buffer. |
|
|
|
|
|
|
|
This method is usaful only when autoreset=False. |
|
|
|
This method is useful only when autoreset=False. |
|
|
|
""" |
|
|
|
self._buffer = StringIO() |
|
|
|
|
|
|
|