mirror of
https://github.com/SickGear/SickGear.git
synced 2024-11-22 04:45:05 +00:00
Update Msgpack 1.0.5 (0516c2c) → 1.0.6 (e1d3d5d).
This commit is contained in:
parent
6846c6cf8a
commit
d25653b06b
5 changed files with 64 additions and 149 deletions
|
@ -4,6 +4,7 @@
|
||||||
* Update certifi 2023.05.07 to 2023.07.22
|
* Update certifi 2023.05.07 to 2023.07.22
|
||||||
* Update CacheControl 0.12.11 (c05ef9e) to 0.13.1 (783a338)
|
* Update CacheControl 0.12.11 (c05ef9e) to 0.13.1 (783a338)
|
||||||
* Update feedparser 6.0.10 (859ac57) to 6.0.10 (9865dec)
|
* Update feedparser 6.0.10 (859ac57) to 6.0.10 (9865dec)
|
||||||
|
* Update Msgpack 1.0.5 (0516c2c) to 1.0.6 (e1d3d5d)
|
||||||
* Update package resource API 67.5.1 (f51eccd) to 68.1.2 (1ef36f2)
|
* Update package resource API 67.5.1 (f51eccd) to 68.1.2 (1ef36f2)
|
||||||
* Update soupsieve 2.3.2.post1 (792d566) to 2.4.1 (2e66beb)
|
* Update soupsieve 2.3.2.post1 (792d566) to 2.4.1 (2e66beb)
|
||||||
* Update Tornado Web Server 6.3.2 (e3aa6c5) to 6.3.3 (e4d6984)
|
* Update Tornado Web Server 6.3.2 (e3aa6c5) to 6.3.3 (e4d6984)
|
||||||
|
|
|
@ -1,16 +1,14 @@
|
||||||
# coding: utf-8
|
|
||||||
from .exceptions import *
|
from .exceptions import *
|
||||||
from .ext import ExtType, Timestamp
|
from .ext import ExtType, Timestamp
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import sys
|
|
||||||
|
|
||||||
|
|
||||||
version = (1, 0, 5)
|
version = (1, 0, 6, "rc", 1)
|
||||||
__version__ = "1.0.5"
|
__version__ = "1.0.6rc1"
|
||||||
|
|
||||||
|
|
||||||
if os.environ.get("MSGPACK_PUREPYTHON") or sys.version_info[0] == 2:
|
if os.environ.get("MSGPACK_PUREPYTHON"):
|
||||||
from .fallback import Packer, unpackb, Unpacker
|
from .fallback import Packer, unpackb, Unpacker
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
version = (1, 0, 4)
|
version = (1, 0, 6)
|
||||||
|
|
|
@ -1,23 +1,8 @@
|
||||||
# coding: utf-8
|
|
||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
import datetime
|
import datetime
|
||||||
import sys
|
|
||||||
import struct
|
import struct
|
||||||
|
|
||||||
|
|
||||||
PY2 = sys.version_info[0] == 2
|
|
||||||
|
|
||||||
if PY2:
|
|
||||||
int_types = (int, long)
|
|
||||||
_utc = None
|
|
||||||
else:
|
|
||||||
int_types = int
|
|
||||||
try:
|
|
||||||
_utc = datetime.timezone.utc
|
|
||||||
except AttributeError:
|
|
||||||
_utc = datetime.timezone(datetime.timedelta(0))
|
|
||||||
|
|
||||||
|
|
||||||
class ExtType(namedtuple("ExtType", "code data")):
|
class ExtType(namedtuple("ExtType", "code data")):
|
||||||
"""ExtType represents ext type in msgpack."""
|
"""ExtType represents ext type in msgpack."""
|
||||||
|
|
||||||
|
@ -28,14 +13,15 @@ class ExtType(namedtuple("ExtType", "code data")):
|
||||||
raise TypeError("data must be bytes")
|
raise TypeError("data must be bytes")
|
||||||
if not 0 <= code <= 127:
|
if not 0 <= code <= 127:
|
||||||
raise ValueError("code must be 0~127")
|
raise ValueError("code must be 0~127")
|
||||||
return super(ExtType, cls).__new__(cls, code, data)
|
return super().__new__(cls, code, data)
|
||||||
|
|
||||||
|
|
||||||
class Timestamp(object):
|
class Timestamp:
|
||||||
"""Timestamp represents the Timestamp extension type in msgpack.
|
"""Timestamp represents the Timestamp extension type in msgpack.
|
||||||
|
|
||||||
When built with Cython, msgpack uses C methods to pack and unpack `Timestamp`. When using pure-Python
|
When built with Cython, msgpack uses C methods to pack and unpack `Timestamp`.
|
||||||
msgpack, :func:`to_bytes` and :func:`from_bytes` are used to pack and unpack `Timestamp`.
|
When using pure-Python msgpack, :func:`to_bytes` and :func:`from_bytes` are used to pack and
|
||||||
|
unpack `Timestamp`.
|
||||||
|
|
||||||
This class is immutable: Do not override seconds and nanoseconds.
|
This class is immutable: Do not override seconds and nanoseconds.
|
||||||
"""
|
"""
|
||||||
|
@ -53,31 +39,25 @@ class Timestamp(object):
|
||||||
Number of nanoseconds to add to `seconds` to get fractional time.
|
Number of nanoseconds to add to `seconds` to get fractional time.
|
||||||
Maximum is 999_999_999. Default is 0.
|
Maximum is 999_999_999. Default is 0.
|
||||||
|
|
||||||
Note: Negative times (before the UNIX epoch) are represented as negative seconds + positive ns.
|
Note: Negative times (before the UNIX epoch) are represented as neg. seconds + pos. ns.
|
||||||
"""
|
"""
|
||||||
if not isinstance(seconds, int_types):
|
if not isinstance(seconds, int):
|
||||||
raise TypeError("seconds must be an integer")
|
raise TypeError("seconds must be an integer")
|
||||||
if not isinstance(nanoseconds, int_types):
|
if not isinstance(nanoseconds, int):
|
||||||
raise TypeError("nanoseconds must be an integer")
|
raise TypeError("nanoseconds must be an integer")
|
||||||
if not (0 <= nanoseconds < 10**9):
|
if not (0 <= nanoseconds < 10**9):
|
||||||
raise ValueError(
|
raise ValueError("nanoseconds must be a non-negative integer less than 999999999.")
|
||||||
"nanoseconds must be a non-negative integer less than 999999999."
|
|
||||||
)
|
|
||||||
self.seconds = seconds
|
self.seconds = seconds
|
||||||
self.nanoseconds = nanoseconds
|
self.nanoseconds = nanoseconds
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
"""String representation of Timestamp."""
|
"""String representation of Timestamp."""
|
||||||
return "Timestamp(seconds={0}, nanoseconds={1})".format(
|
return f"Timestamp(seconds={self.seconds}, nanoseconds={self.nanoseconds})"
|
||||||
self.seconds, self.nanoseconds
|
|
||||||
)
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
def __eq__(self, other):
|
||||||
"""Check for equality with another Timestamp object"""
|
"""Check for equality with another Timestamp object"""
|
||||||
if type(other) is self.__class__:
|
if type(other) is self.__class__:
|
||||||
return (
|
return self.seconds == other.seconds and self.nanoseconds == other.nanoseconds
|
||||||
self.seconds == other.seconds and self.nanoseconds == other.nanoseconds
|
|
||||||
)
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def __ne__(self, other):
|
def __ne__(self, other):
|
||||||
|
@ -140,7 +120,7 @@ class Timestamp(object):
|
||||||
"""Create a Timestamp from posix timestamp in seconds.
|
"""Create a Timestamp from posix timestamp in seconds.
|
||||||
|
|
||||||
:param unix_float: Posix timestamp in seconds.
|
:param unix_float: Posix timestamp in seconds.
|
||||||
:type unix_float: int or float.
|
:type unix_float: int or float
|
||||||
"""
|
"""
|
||||||
seconds = int(unix_sec // 1)
|
seconds = int(unix_sec // 1)
|
||||||
nanoseconds = int((unix_sec % 1) * 10**9)
|
nanoseconds = int((unix_sec % 1) * 10**9)
|
||||||
|
@ -174,20 +154,15 @@ class Timestamp(object):
|
||||||
def to_datetime(self):
|
def to_datetime(self):
|
||||||
"""Get the timestamp as a UTC datetime.
|
"""Get the timestamp as a UTC datetime.
|
||||||
|
|
||||||
Python 2 is not supported.
|
:rtype: `datetime.datetime`
|
||||||
|
|
||||||
:rtype: datetime.
|
|
||||||
"""
|
"""
|
||||||
return datetime.datetime.fromtimestamp(0, _utc) + datetime.timedelta(
|
utc = datetime.timezone.utc
|
||||||
seconds=self.to_unix()
|
return datetime.datetime.fromtimestamp(0, utc) + datetime.timedelta(seconds=self.to_unix())
|
||||||
)
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def from_datetime(dt):
|
def from_datetime(dt):
|
||||||
"""Create a Timestamp from datetime with tzinfo.
|
"""Create a Timestamp from datetime with tzinfo.
|
||||||
|
|
||||||
Python 2 is not supported.
|
|
||||||
|
|
||||||
:rtype: Timestamp
|
:rtype: Timestamp
|
||||||
"""
|
"""
|
||||||
return Timestamp.from_unix(dt.timestamp())
|
return Timestamp.from_unix(dt.timestamp())
|
||||||
|
|
|
@ -4,39 +4,6 @@ import sys
|
||||||
import struct
|
import struct
|
||||||
|
|
||||||
|
|
||||||
PY2 = sys.version_info[0] == 2
|
|
||||||
if PY2:
|
|
||||||
int_types = (int, long)
|
|
||||||
|
|
||||||
def dict_iteritems(d):
|
|
||||||
return d.iteritems()
|
|
||||||
|
|
||||||
else:
|
|
||||||
int_types = int
|
|
||||||
unicode = str
|
|
||||||
xrange = range
|
|
||||||
|
|
||||||
def dict_iteritems(d):
|
|
||||||
return d.items()
|
|
||||||
|
|
||||||
|
|
||||||
if sys.version_info < (3, 5):
|
|
||||||
# Ugly hack...
|
|
||||||
RecursionError = RuntimeError
|
|
||||||
|
|
||||||
def _is_recursionerror(e):
|
|
||||||
return (
|
|
||||||
len(e.args) == 1
|
|
||||||
and isinstance(e.args[0], str)
|
|
||||||
and e.args[0].startswith("maximum recursion depth exceeded")
|
|
||||||
)
|
|
||||||
|
|
||||||
else:
|
|
||||||
|
|
||||||
def _is_recursionerror(e):
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
if hasattr(sys, "pypy_version_info"):
|
if hasattr(sys, "pypy_version_info"):
|
||||||
# StringIO is slow on PyPy, StringIO is faster. However: PyPy's own
|
# StringIO is slow on PyPy, StringIO is faster. However: PyPy's own
|
||||||
# StringBuilder is fastest.
|
# StringBuilder is fastest.
|
||||||
|
@ -48,7 +15,7 @@ if hasattr(sys, "pypy_version_info"):
|
||||||
from __pypy__.builders import StringBuilder
|
from __pypy__.builders import StringBuilder
|
||||||
USING_STRINGBUILDER = True
|
USING_STRINGBUILDER = True
|
||||||
|
|
||||||
class StringIO(object):
|
class StringIO:
|
||||||
def __init__(self, s=b""):
|
def __init__(self, s=b""):
|
||||||
if s:
|
if s:
|
||||||
self.builder = StringBuilder(len(s))
|
self.builder = StringBuilder(len(s))
|
||||||
|
@ -125,24 +92,13 @@ def unpackb(packed, **kwargs):
|
||||||
ret = unpacker._unpack()
|
ret = unpacker._unpack()
|
||||||
except OutOfData:
|
except OutOfData:
|
||||||
raise ValueError("Unpack failed: incomplete input")
|
raise ValueError("Unpack failed: incomplete input")
|
||||||
except RecursionError as e:
|
except RecursionError:
|
||||||
if _is_recursionerror(e):
|
raise StackError
|
||||||
raise StackError
|
|
||||||
raise
|
|
||||||
if unpacker._got_extradata():
|
if unpacker._got_extradata():
|
||||||
raise ExtraData(ret, unpacker._get_extradata())
|
raise ExtraData(ret, unpacker._get_extradata())
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
|
||||||
if sys.version_info < (2, 7, 6):
|
|
||||||
|
|
||||||
def _unpack_from(f, b, o=0):
|
|
||||||
"""Explicit type cast for legacy struct.unpack_from"""
|
|
||||||
return struct.unpack_from(f, bytes(b), o)
|
|
||||||
|
|
||||||
else:
|
|
||||||
_unpack_from = struct.unpack_from
|
|
||||||
|
|
||||||
_NO_FORMAT_USED = ""
|
_NO_FORMAT_USED = ""
|
||||||
_MSGPACK_HEADERS = {
|
_MSGPACK_HEADERS = {
|
||||||
0xC4: (1, _NO_FORMAT_USED, TYPE_BIN),
|
0xC4: (1, _NO_FORMAT_USED, TYPE_BIN),
|
||||||
|
@ -176,14 +132,14 @@ _MSGPACK_HEADERS = {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
class Unpacker(object):
|
class Unpacker:
|
||||||
"""Streaming unpacker.
|
"""Streaming unpacker.
|
||||||
|
|
||||||
Arguments:
|
Arguments:
|
||||||
|
|
||||||
:param file_like:
|
:param file_like:
|
||||||
File-like object having `.read(n)` method.
|
File-like object having `.read(n)` method.
|
||||||
If specified, unpacker reads serialized data from it and :meth:`feed()` is not usable.
|
If specified, unpacker reads serialized data from it and `.feed()` is not usable.
|
||||||
|
|
||||||
:param int read_size:
|
:param int read_size:
|
||||||
Used as `file_like.read(read_size)`. (default: `min(16*1024, max_buffer_size)`)
|
Used as `file_like.read(read_size)`. (default: `min(16*1024, max_buffer_size)`)
|
||||||
|
@ -202,17 +158,17 @@ class Unpacker(object):
|
||||||
0 - Timestamp
|
0 - Timestamp
|
||||||
1 - float (Seconds from the EPOCH)
|
1 - float (Seconds from the EPOCH)
|
||||||
2 - int (Nanoseconds from the EPOCH)
|
2 - int (Nanoseconds from the EPOCH)
|
||||||
3 - datetime.datetime (UTC). Python 2 is not supported.
|
3 - datetime.datetime (UTC).
|
||||||
|
|
||||||
:param bool strict_map_key:
|
:param bool strict_map_key:
|
||||||
If true (default), only str or bytes are accepted for map (dict) keys.
|
If true (default), only str or bytes are accepted for map (dict) keys.
|
||||||
|
|
||||||
:param callable object_hook:
|
:param object_hook:
|
||||||
When specified, it should be callable.
|
When specified, it should be callable.
|
||||||
Unpacker calls it with a dict argument after unpacking msgpack map.
|
Unpacker calls it with a dict argument after unpacking msgpack map.
|
||||||
(See also simplejson)
|
(See also simplejson)
|
||||||
|
|
||||||
:param callable object_pairs_hook:
|
:param object_pairs_hook:
|
||||||
When specified, it should be callable.
|
When specified, it should be callable.
|
||||||
Unpacker calls it with a list of key-value pairs after unpacking msgpack map.
|
Unpacker calls it with a list of key-value pairs after unpacking msgpack map.
|
||||||
(See also simplejson)
|
(See also simplejson)
|
||||||
|
@ -359,9 +315,7 @@ class Unpacker(object):
|
||||||
if object_pairs_hook is not None and not callable(object_pairs_hook):
|
if object_pairs_hook is not None and not callable(object_pairs_hook):
|
||||||
raise TypeError("`object_pairs_hook` is not callable")
|
raise TypeError("`object_pairs_hook` is not callable")
|
||||||
if object_hook is not None and object_pairs_hook is not None:
|
if object_hook is not None and object_pairs_hook is not None:
|
||||||
raise TypeError(
|
raise TypeError("object_pairs_hook and object_hook are mutually exclusive")
|
||||||
"object_pairs_hook and object_hook are mutually " "exclusive"
|
|
||||||
)
|
|
||||||
if not callable(ext_hook):
|
if not callable(ext_hook):
|
||||||
raise TypeError("`ext_hook` is not callable")
|
raise TypeError("`ext_hook` is not callable")
|
||||||
|
|
||||||
|
@ -453,20 +407,18 @@ class Unpacker(object):
|
||||||
n = b & 0b00011111
|
n = b & 0b00011111
|
||||||
typ = TYPE_RAW
|
typ = TYPE_RAW
|
||||||
if n > self._max_str_len:
|
if n > self._max_str_len:
|
||||||
raise ValueError("%s exceeds max_str_len(%s)" % (n, self._max_str_len))
|
raise ValueError(f"{n} exceeds max_str_len({self._max_str_len})")
|
||||||
obj = self._read(n)
|
obj = self._read(n)
|
||||||
elif b & 0b11110000 == 0b10010000:
|
elif b & 0b11110000 == 0b10010000:
|
||||||
n = b & 0b00001111
|
n = b & 0b00001111
|
||||||
typ = TYPE_ARRAY
|
typ = TYPE_ARRAY
|
||||||
if n > self._max_array_len:
|
if n > self._max_array_len:
|
||||||
raise ValueError(
|
raise ValueError(f"{n} exceeds max_array_len({self._max_array_len})")
|
||||||
"%s exceeds max_array_len(%s)" % (n, self._max_array_len)
|
|
||||||
)
|
|
||||||
elif b & 0b11110000 == 0b10000000:
|
elif b & 0b11110000 == 0b10000000:
|
||||||
n = b & 0b00001111
|
n = b & 0b00001111
|
||||||
typ = TYPE_MAP
|
typ = TYPE_MAP
|
||||||
if n > self._max_map_len:
|
if n > self._max_map_len:
|
||||||
raise ValueError("%s exceeds max_map_len(%s)" % (n, self._max_map_len))
|
raise ValueError(f"{n} exceeds max_map_len({self._max_map_len})")
|
||||||
elif b == 0xC0:
|
elif b == 0xC0:
|
||||||
obj = None
|
obj = None
|
||||||
elif b == 0xC2:
|
elif b == 0xC2:
|
||||||
|
@ -477,65 +429,61 @@ class Unpacker(object):
|
||||||
size, fmt, typ = _MSGPACK_HEADERS[b]
|
size, fmt, typ = _MSGPACK_HEADERS[b]
|
||||||
self._reserve(size)
|
self._reserve(size)
|
||||||
if len(fmt) > 0:
|
if len(fmt) > 0:
|
||||||
n = _unpack_from(fmt, self._buffer, self._buff_i)[0]
|
n = struct.unpack_from(fmt, self._buffer, self._buff_i)[0]
|
||||||
else:
|
else:
|
||||||
n = self._buffer[self._buff_i]
|
n = self._buffer[self._buff_i]
|
||||||
self._buff_i += size
|
self._buff_i += size
|
||||||
if n > self._max_bin_len:
|
if n > self._max_bin_len:
|
||||||
raise ValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len))
|
raise ValueError(f"{n} exceeds max_bin_len({self._max_bin_len})")
|
||||||
obj = self._read(n)
|
obj = self._read(n)
|
||||||
elif 0xC7 <= b <= 0xC9:
|
elif 0xC7 <= b <= 0xC9:
|
||||||
size, fmt, typ = _MSGPACK_HEADERS[b]
|
size, fmt, typ = _MSGPACK_HEADERS[b]
|
||||||
self._reserve(size)
|
self._reserve(size)
|
||||||
L, n = _unpack_from(fmt, self._buffer, self._buff_i)
|
L, n = struct.unpack_from(fmt, self._buffer, self._buff_i)
|
||||||
self._buff_i += size
|
self._buff_i += size
|
||||||
if L > self._max_ext_len:
|
if L > self._max_ext_len:
|
||||||
raise ValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len))
|
raise ValueError(f"{L} exceeds max_ext_len({self._max_ext_len})")
|
||||||
obj = self._read(L)
|
obj = self._read(L)
|
||||||
elif 0xCA <= b <= 0xD3:
|
elif 0xCA <= b <= 0xD3:
|
||||||
size, fmt = _MSGPACK_HEADERS[b]
|
size, fmt = _MSGPACK_HEADERS[b]
|
||||||
self._reserve(size)
|
self._reserve(size)
|
||||||
if len(fmt) > 0:
|
if len(fmt) > 0:
|
||||||
obj = _unpack_from(fmt, self._buffer, self._buff_i)[0]
|
obj = struct.unpack_from(fmt, self._buffer, self._buff_i)[0]
|
||||||
else:
|
else:
|
||||||
obj = self._buffer[self._buff_i]
|
obj = self._buffer[self._buff_i]
|
||||||
self._buff_i += size
|
self._buff_i += size
|
||||||
elif 0xD4 <= b <= 0xD8:
|
elif 0xD4 <= b <= 0xD8:
|
||||||
size, fmt, typ = _MSGPACK_HEADERS[b]
|
size, fmt, typ = _MSGPACK_HEADERS[b]
|
||||||
if self._max_ext_len < size:
|
if self._max_ext_len < size:
|
||||||
raise ValueError(
|
raise ValueError(f"{size} exceeds max_ext_len({self._max_ext_len})")
|
||||||
"%s exceeds max_ext_len(%s)" % (size, self._max_ext_len)
|
|
||||||
)
|
|
||||||
self._reserve(size + 1)
|
self._reserve(size + 1)
|
||||||
n, obj = _unpack_from(fmt, self._buffer, self._buff_i)
|
n, obj = struct.unpack_from(fmt, self._buffer, self._buff_i)
|
||||||
self._buff_i += size + 1
|
self._buff_i += size + 1
|
||||||
elif 0xD9 <= b <= 0xDB:
|
elif 0xD9 <= b <= 0xDB:
|
||||||
size, fmt, typ = _MSGPACK_HEADERS[b]
|
size, fmt, typ = _MSGPACK_HEADERS[b]
|
||||||
self._reserve(size)
|
self._reserve(size)
|
||||||
if len(fmt) > 0:
|
if len(fmt) > 0:
|
||||||
(n,) = _unpack_from(fmt, self._buffer, self._buff_i)
|
(n,) = struct.unpack_from(fmt, self._buffer, self._buff_i)
|
||||||
else:
|
else:
|
||||||
n = self._buffer[self._buff_i]
|
n = self._buffer[self._buff_i]
|
||||||
self._buff_i += size
|
self._buff_i += size
|
||||||
if n > self._max_str_len:
|
if n > self._max_str_len:
|
||||||
raise ValueError("%s exceeds max_str_len(%s)" % (n, self._max_str_len))
|
raise ValueError(f"{n} exceeds max_str_len({self._max_str_len})")
|
||||||
obj = self._read(n)
|
obj = self._read(n)
|
||||||
elif 0xDC <= b <= 0xDD:
|
elif 0xDC <= b <= 0xDD:
|
||||||
size, fmt, typ = _MSGPACK_HEADERS[b]
|
size, fmt, typ = _MSGPACK_HEADERS[b]
|
||||||
self._reserve(size)
|
self._reserve(size)
|
||||||
(n,) = _unpack_from(fmt, self._buffer, self._buff_i)
|
(n,) = struct.unpack_from(fmt, self._buffer, self._buff_i)
|
||||||
self._buff_i += size
|
self._buff_i += size
|
||||||
if n > self._max_array_len:
|
if n > self._max_array_len:
|
||||||
raise ValueError(
|
raise ValueError(f"{n} exceeds max_array_len({self._max_array_len})")
|
||||||
"%s exceeds max_array_len(%s)" % (n, self._max_array_len)
|
|
||||||
)
|
|
||||||
elif 0xDE <= b <= 0xDF:
|
elif 0xDE <= b <= 0xDF:
|
||||||
size, fmt, typ = _MSGPACK_HEADERS[b]
|
size, fmt, typ = _MSGPACK_HEADERS[b]
|
||||||
self._reserve(size)
|
self._reserve(size)
|
||||||
(n,) = _unpack_from(fmt, self._buffer, self._buff_i)
|
(n,) = struct.unpack_from(fmt, self._buffer, self._buff_i)
|
||||||
self._buff_i += size
|
self._buff_i += size
|
||||||
if n > self._max_map_len:
|
if n > self._max_map_len:
|
||||||
raise ValueError("%s exceeds max_map_len(%s)" % (n, self._max_map_len))
|
raise ValueError(f"{n} exceeds max_map_len({self._max_map_len})")
|
||||||
else:
|
else:
|
||||||
raise FormatError("Unknown header: 0x%x" % b)
|
raise FormatError("Unknown header: 0x%x" % b)
|
||||||
return typ, n, obj
|
return typ, n, obj
|
||||||
|
@ -554,12 +502,12 @@ class Unpacker(object):
|
||||||
# TODO should we eliminate the recursion?
|
# TODO should we eliminate the recursion?
|
||||||
if typ == TYPE_ARRAY:
|
if typ == TYPE_ARRAY:
|
||||||
if execute == EX_SKIP:
|
if execute == EX_SKIP:
|
||||||
for i in xrange(n):
|
for i in range(n):
|
||||||
# TODO check whether we need to call `list_hook`
|
# TODO check whether we need to call `list_hook`
|
||||||
self._unpack(EX_SKIP)
|
self._unpack(EX_SKIP)
|
||||||
return
|
return
|
||||||
ret = newlist_hint(n)
|
ret = newlist_hint(n)
|
||||||
for i in xrange(n):
|
for i in range(n):
|
||||||
ret.append(self._unpack(EX_CONSTRUCT))
|
ret.append(self._unpack(EX_CONSTRUCT))
|
||||||
if self._list_hook is not None:
|
if self._list_hook is not None:
|
||||||
ret = self._list_hook(ret)
|
ret = self._list_hook(ret)
|
||||||
|
@ -567,25 +515,22 @@ class Unpacker(object):
|
||||||
return ret if self._use_list else tuple(ret)
|
return ret if self._use_list else tuple(ret)
|
||||||
if typ == TYPE_MAP:
|
if typ == TYPE_MAP:
|
||||||
if execute == EX_SKIP:
|
if execute == EX_SKIP:
|
||||||
for i in xrange(n):
|
for i in range(n):
|
||||||
# TODO check whether we need to call hooks
|
# TODO check whether we need to call hooks
|
||||||
self._unpack(EX_SKIP)
|
self._unpack(EX_SKIP)
|
||||||
self._unpack(EX_SKIP)
|
self._unpack(EX_SKIP)
|
||||||
return
|
return
|
||||||
if self._object_pairs_hook is not None:
|
if self._object_pairs_hook is not None:
|
||||||
ret = self._object_pairs_hook(
|
ret = self._object_pairs_hook(
|
||||||
(self._unpack(EX_CONSTRUCT), self._unpack(EX_CONSTRUCT))
|
(self._unpack(EX_CONSTRUCT), self._unpack(EX_CONSTRUCT)) for _ in range(n)
|
||||||
for _ in xrange(n)
|
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
ret = {}
|
ret = {}
|
||||||
for _ in xrange(n):
|
for _ in range(n):
|
||||||
key = self._unpack(EX_CONSTRUCT)
|
key = self._unpack(EX_CONSTRUCT)
|
||||||
if self._strict_map_key and type(key) not in (unicode, bytes):
|
if self._strict_map_key and type(key) not in (str, bytes):
|
||||||
raise ValueError(
|
raise ValueError("%s is not allowed for map key" % str(type(key)))
|
||||||
"%s is not allowed for map key" % str(type(key))
|
if isinstance(key, str):
|
||||||
)
|
|
||||||
if not PY2 and type(key) is str:
|
|
||||||
key = sys.intern(key)
|
key = sys.intern(key)
|
||||||
ret[key] = self._unpack(EX_CONSTRUCT)
|
ret[key] = self._unpack(EX_CONSTRUCT)
|
||||||
if self._object_hook is not None:
|
if self._object_hook is not None:
|
||||||
|
@ -659,7 +604,7 @@ class Unpacker(object):
|
||||||
return self._stream_offset
|
return self._stream_offset
|
||||||
|
|
||||||
|
|
||||||
class Packer(object):
|
class Packer:
|
||||||
"""
|
"""
|
||||||
MessagePack Packer
|
MessagePack Packer
|
||||||
|
|
||||||
|
@ -671,7 +616,8 @@ class Packer(object):
|
||||||
|
|
||||||
Packer's constructor has some keyword arguments:
|
Packer's constructor has some keyword arguments:
|
||||||
|
|
||||||
:param callable default:
|
:param default:
|
||||||
|
When specified, it should be callable.
|
||||||
Convert user type to builtin type that Packer supports.
|
Convert user type to builtin type that Packer supports.
|
||||||
See also simplejson's document.
|
See also simplejson's document.
|
||||||
|
|
||||||
|
@ -698,7 +644,6 @@ class Packer(object):
|
||||||
If set to true, datetime with tzinfo is packed into Timestamp type.
|
If set to true, datetime with tzinfo is packed into Timestamp type.
|
||||||
Note that the tzinfo is stripped in the timestamp.
|
Note that the tzinfo is stripped in the timestamp.
|
||||||
You can get UTC datetime with `timestamp=3` option of the Unpacker.
|
You can get UTC datetime with `timestamp=3` option of the Unpacker.
|
||||||
(Python 2 is not supported).
|
|
||||||
|
|
||||||
:param str unicode_errors:
|
:param str unicode_errors:
|
||||||
The error handler for encoding unicode. (default: 'strict')
|
The error handler for encoding unicode. (default: 'strict')
|
||||||
|
@ -743,8 +688,6 @@ class Packer(object):
|
||||||
self._autoreset = autoreset
|
self._autoreset = autoreset
|
||||||
self._use_bin_type = use_bin_type
|
self._use_bin_type = use_bin_type
|
||||||
self._buffer = StringIO()
|
self._buffer = StringIO()
|
||||||
if PY2 and datetime:
|
|
||||||
raise ValueError("datetime is not supported in Python 2")
|
|
||||||
self._datetime = bool(datetime)
|
self._datetime = bool(datetime)
|
||||||
self._unicode_errors = unicode_errors or "strict"
|
self._unicode_errors = unicode_errors or "strict"
|
||||||
if default is not None:
|
if default is not None:
|
||||||
|
@ -774,7 +717,7 @@ class Packer(object):
|
||||||
if obj:
|
if obj:
|
||||||
return self._buffer.write(b"\xc3")
|
return self._buffer.write(b"\xc3")
|
||||||
return self._buffer.write(b"\xc2")
|
return self._buffer.write(b"\xc2")
|
||||||
if check(obj, int_types):
|
if check(obj, int):
|
||||||
if 0 <= obj < 0x80:
|
if 0 <= obj < 0x80:
|
||||||
return self._buffer.write(struct.pack("B", obj))
|
return self._buffer.write(struct.pack("B", obj))
|
||||||
if -0x20 <= obj < 0:
|
if -0x20 <= obj < 0:
|
||||||
|
@ -806,7 +749,7 @@ class Packer(object):
|
||||||
raise ValueError("%s is too large" % type(obj).__name__)
|
raise ValueError("%s is too large" % type(obj).__name__)
|
||||||
self._pack_bin_header(n)
|
self._pack_bin_header(n)
|
||||||
return self._buffer.write(obj)
|
return self._buffer.write(obj)
|
||||||
if check(obj, unicode):
|
if check(obj, str):
|
||||||
obj = obj.encode("utf-8", self._unicode_errors)
|
obj = obj.encode("utf-8", self._unicode_errors)
|
||||||
n = len(obj)
|
n = len(obj)
|
||||||
if n >= 2**32:
|
if n >= 2**32:
|
||||||
|
@ -855,13 +798,11 @@ class Packer(object):
|
||||||
if check(obj, list_types):
|
if check(obj, list_types):
|
||||||
n = len(obj)
|
n = len(obj)
|
||||||
self._pack_array_header(n)
|
self._pack_array_header(n)
|
||||||
for i in xrange(n):
|
for i in range(n):
|
||||||
self._pack(obj[i], nest_limit - 1)
|
self._pack(obj[i], nest_limit - 1)
|
||||||
return
|
return
|
||||||
if check(obj, dict):
|
if check(obj, dict):
|
||||||
return self._pack_map_pairs(
|
return self._pack_map_pairs(len(obj), obj.items(), nest_limit - 1)
|
||||||
len(obj), dict_iteritems(obj), nest_limit - 1
|
|
||||||
)
|
|
||||||
|
|
||||||
if self._datetime and check(obj, _DateTime) and obj.tzinfo is not None:
|
if self._datetime and check(obj, _DateTime) and obj.tzinfo is not None:
|
||||||
obj = Timestamp.from_datetime(obj)
|
obj = Timestamp.from_datetime(obj)
|
||||||
|
@ -874,9 +815,9 @@ class Packer(object):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if self._datetime and check(obj, _DateTime):
|
if self._datetime and check(obj, _DateTime):
|
||||||
raise ValueError("Cannot serialize %r where tzinfo=None" % (obj,))
|
raise ValueError(f"Cannot serialize {obj!r} where tzinfo=None")
|
||||||
|
|
||||||
raise TypeError("Cannot serialize %r" % (obj,))
|
raise TypeError(f"Cannot serialize {obj!r}")
|
||||||
|
|
||||||
def pack(self, obj):
|
def pack(self, obj):
|
||||||
try:
|
try:
|
||||||
|
@ -963,7 +904,7 @@ class Packer(object):
|
||||||
|
|
||||||
def _pack_map_pairs(self, n, pairs, nest_limit=DEFAULT_RECURSE_LIMIT):
|
def _pack_map_pairs(self, n, pairs, nest_limit=DEFAULT_RECURSE_LIMIT):
|
||||||
self._pack_map_header(n)
|
self._pack_map_header(n)
|
||||||
for (k, v) in pairs:
|
for k, v in pairs:
|
||||||
self._pack(k, nest_limit - 1)
|
self._pack(k, nest_limit - 1)
|
||||||
self._pack(v, nest_limit - 1)
|
self._pack(v, nest_limit - 1)
|
||||||
|
|
||||||
|
@ -1004,7 +945,7 @@ class Packer(object):
|
||||||
|
|
||||||
def getbuffer(self):
|
def getbuffer(self):
|
||||||
"""Return view of internal buffer."""
|
"""Return view of internal buffer."""
|
||||||
if USING_STRINGBUILDER or PY2:
|
if USING_STRINGBUILDER:
|
||||||
return memoryview(self.bytes())
|
return memoryview(self.bytes())
|
||||||
else:
|
else:
|
||||||
return self._buffer.getbuffer()
|
return self._buffer.getbuffer()
|
||||||
|
|
Loading…
Reference in a new issue