Update SimpleJSON 3.16.1 (ce75e60) → 3.18.1 (c891b95).

This commit is contained in:
JackDandy 2023-01-12 21:09:32 +00:00
parent 8576768b64
commit b3a2991f29
5 changed files with 130 additions and 104 deletions

View file

@ -4,6 +4,7 @@
* Add Filelock 3.9.0 (ce3e891)
* Remove Lockfile no longer used by Cachecontrol
* Update Msgpack 1.0.0 (fa7d744) to 1.0.4 (b5acfd5)
* Update SimpleJSON 3.16.1 (ce75e60) to 3.18.1 (c891b95)
* Update tmdbsimple 2.6.6 (679e343) to 2.9.1 (9da400a)
* Update torrent_parser 0.3.0 (2a4eecb) to 0.4.0 (23b9e11)
* Update unidecode module 1.1.1 (632af82) to 1.3.6 (4141992)

View file

@ -118,7 +118,7 @@ Serializing multiple objects to JSON lines (newline-delimited JSON)::
"""
from __future__ import absolute_import
__version__ = '3.16.1'
__version__ = '3.18.1'
__all__ = [
'dump', 'dumps', 'load', 'loads',
'JSONDecoder', 'JSONDecodeError', 'JSONEncoder',
@ -180,18 +180,12 @@ def dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True,
``.write()``-supporting file-like object).
If *skipkeys* is true then ``dict`` keys that are not basic types
(``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``)
(``str``, ``int``, ``long``, ``float``, ``bool``, ``None``)
will be skipped instead of raising a ``TypeError``.
If *ensure_ascii* is false, then the some chunks written to ``fp``
may be ``unicode`` instances, subject to normal Python ``str`` to
``unicode`` coercion rules. Unless ``fp.write()`` explicitly
understands ``unicode`` (as in ``codecs.getwriter()``) this is likely
to cause an error.
If *check_circular* is false, then the circular reference check
for container types will be skipped and a circular reference will
result in an ``OverflowError`` (or worse).
If *ensure_ascii* is false (default: ``True``), then the output may
contain non-ASCII characters, so long as they do not need to be escaped
by JSON. When it is true, all non-ASCII characters are escaped.
If *allow_nan* is false, then it will be a ``ValueError`` to
serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``)
@ -202,9 +196,7 @@ def dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True,
If *indent* is a string, then JSON array elements and object members
will be pretty-printed with a newline followed by that string repeated
for each level of nesting. ``None`` (the default) selects the most compact
representation without any newlines. For backwards compatibility with
versions of simplejson earlier than 2.1.0, an integer is also accepted
and is converted to a string with that many spaces.
representation without any newlines.
If specified, *separators* should be an
``(item_separator, key_separator)`` tuple. The default is ``(', ', ': ')``
@ -308,13 +300,13 @@ def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True,
iterable_as_array=False, **kw):
"""Serialize ``obj`` to a JSON formatted ``str``.
If ``skipkeys`` is false then ``dict`` keys that are not basic types
(``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``)
If ``skipkeys`` is true then ``dict`` keys that are not basic types
(``str``, ``int``, ``long``, ``float``, ``bool``, ``None``)
will be skipped instead of raising a ``TypeError``.
If ``ensure_ascii`` is false, then the return value will be a
``unicode`` instance subject to normal Python ``str`` to ``unicode``
coercion rules instead of being escaped to an ASCII ``str``.
If *ensure_ascii* is false (default: ``True``), then the output may
contain non-ASCII characters, so long as they do not need to be escaped
by JSON. When it is true, all non-ASCII characters are escaped.
If ``check_circular`` is false, then the circular reference check
for container types will be skipped and a circular reference will
@ -338,7 +330,8 @@ def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True,
compact JSON representation, you should specify ``(',', ':')`` to eliminate
whitespace.
``encoding`` is the character encoding for str instances, default is UTF-8.
``encoding`` is the character encoding for bytes instances, default is
UTF-8.
``default(obj)`` is a function that should return a serializable version
of obj or raise TypeError. The default simply raises TypeError.
@ -367,7 +360,7 @@ def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True,
If specified, *item_sort_key* is a callable used to sort the items in
each dictionary. This is useful if you want to sort items other than
in alphabetical order by key. This option takes precendence over
in alphabetical order by key. This option takes precedence over
*sort_keys*.
If *sort_keys* is true (default: ``False``), the output of dictionaries
@ -428,14 +421,11 @@ def load(fp, encoding=None, cls=None, object_hook=None, parse_float=None,
use_decimal=False, namedtuple_as_object=True, tuple_as_array=True,
**kw):
"""Deserialize ``fp`` (a ``.read()``-supporting file-like object containing
a JSON document) to a Python object.
a JSON document as `str` or `bytes`) to a Python object.
*encoding* determines the encoding used to interpret any
:class:`str` objects decoded by this instance (``'utf-8'`` by
default). It has no effect when decoding :class:`unicode` objects.
Note that currently only encodings that are a superset of ASCII work,
strings of other encodings should be passed in as :class:`unicode`.
`bytes` objects decoded by this instance (``'utf-8'`` by
default). It has no effect when decoding `str` objects.
*object_hook*, if specified, will be called with the result of every
JSON object decoded and its return value will be used in place of the
@ -488,11 +478,8 @@ def loads(s, encoding=None, cls=None, object_hook=None, parse_float=None,
document) to a Python object.
*encoding* determines the encoding used to interpret any
:class:`str` objects decoded by this instance (``'utf-8'`` by
default). It has no effect when decoding :class:`unicode` objects.
Note that currently only encodings that are a superset of ASCII work,
strings of other encodings should be passed in as :class:`unicode`.
:class:`bytes` objects decoded by this instance (``'utf-8'`` by
default). It has no effect when decoding :class:`unicode` objects.
*object_hook*, if specified, will be called with the result of every
JSON object decoded and its return value will be used in place of the

View file

@ -25,6 +25,15 @@
#define JSON_InternFromString PyString_InternFromString
#endif /* PY_MAJOR_VERSION < 3 */
#if PY_VERSION_HEX < 0x03090000
#if !defined(PyObject_CallNoArgs)
#define PyObject_CallNoArgs(callable) PyObject_CallFunctionObjArgs(callable, NULL);
#endif
#if !defined(PyObject_CallOneArg)
#define PyObject_CallOneArg(callable, arg) PyObject_CallFunctionObjArgs(callable, arg, NULL);
#endif
#endif /* PY_VERSION_HEX < 0x03090000 */
#if PY_VERSION_HEX < 0x02070000
#if !defined(PyOS_string_to_double)
#define PyOS_string_to_double json_PyOS_string_to_double
@ -108,6 +117,9 @@ JSON_Accu_Destroy(JSON_Accu *acc);
#define ERR_STRING_CONTROL "Invalid control character %r at"
#define ERR_STRING_ESC1 "Invalid \\X escape sequence %r"
#define ERR_STRING_ESC4 "Invalid \\uXXXX escape sequence"
#define FOR_JSON_METHOD_NAME "for_json"
#define ASDICT_METHOD_NAME "_asdict"
typedef struct _PyScannerObject {
PyObject_HEAD
@ -243,12 +255,10 @@ static int
_convertPyInt_AsSsize_t(PyObject *o, Py_ssize_t *size_ptr);
static PyObject *
_convertPyInt_FromSsize_t(Py_ssize_t *size_ptr);
static int
_call_json_method(PyObject *obj, const char *method_name, PyObject **result);
static PyObject *
encoder_encode_float(PyEncoderObject *s, PyObject *obj);
static int
_is_namedtuple(PyObject *obj);
static int
_has_for_json_hook(PyObject *obj);
static PyObject *
moduleinit(void);
@ -383,30 +393,26 @@ maybe_quote_bigint(PyEncoderObject* s, PyObject *encoded, PyObject *obj)
}
static int
_is_namedtuple(PyObject *obj)
_call_json_method(PyObject *obj, const char *method_name, PyObject **result)
{
int rval = 0;
PyObject *_asdict = PyObject_GetAttrString(obj, "_asdict");
if (_asdict == NULL) {
PyObject *method = PyObject_GetAttrString(obj, method_name);
if (method == NULL) {
PyErr_Clear();
return 0;
}
rval = PyCallable_Check(_asdict);
Py_DECREF(_asdict);
return rval;
}
static int
_has_for_json_hook(PyObject *obj)
{
int rval = 0;
PyObject *for_json = PyObject_GetAttrString(obj, "for_json");
if (for_json == NULL) {
PyErr_Clear();
return 0;
if (PyCallable_Check(method)) {
PyObject *tmp = PyObject_CallNoArgs(method);
if (tmp == NULL && PyErr_ExceptionMatches(PyExc_TypeError)) {
PyErr_Clear();
} else {
// This will set result to NULL if a TypeError occurred,
// which must be checked by the caller
*result = tmp;
rval = 1;
}
}
rval = PyCallable_Check(for_json);
Py_DECREF(for_json);
Py_DECREF(method);
return rval;
}
@ -638,7 +644,7 @@ encoder_stringify_key(PyEncoderObject *s, PyObject *key)
if (!(PyInt_CheckExact(key) || PyLong_CheckExact(key))) {
/* See #118, do not trust custom str/repr */
PyObject *res;
PyObject *tmp = PyObject_CallFunctionObjArgs((PyObject *)&PyLong_Type, key, NULL);
PyObject *tmp = PyObject_CallOneArg((PyObject *)&PyLong_Type, key);
if (tmp == NULL) {
return NULL;
}
@ -792,7 +798,7 @@ join_list_string(PyObject *lst)
if (joinfn == NULL)
return NULL;
}
return PyObject_CallFunctionObjArgs(joinfn, lst, NULL);
return PyObject_CallOneArg(joinfn, lst);
}
#endif /* PY_MAJOR_VERSION < 3 */
@ -1484,7 +1490,7 @@ _parse_object_str(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ssize_
/* if pairs_hook is not None: rval = object_pairs_hook(pairs) */
if (s->pairs_hook != Py_None) {
val = PyObject_CallFunctionObjArgs(s->pairs_hook, pairs, NULL);
val = PyObject_CallOneArg(s->pairs_hook, pairs);
if (val == NULL)
goto bail;
Py_DECREF(pairs);
@ -1494,7 +1500,7 @@ _parse_object_str(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ssize_
/* if object_hook is not None: rval = object_hook(rval) */
if (s->object_hook != Py_None) {
val = PyObject_CallFunctionObjArgs(s->object_hook, rval, NULL);
val = PyObject_CallOneArg(s->object_hook, rval);
if (val == NULL)
goto bail;
Py_DECREF(rval);
@ -1648,7 +1654,7 @@ _parse_object_unicode(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ss
/* if pairs_hook is not None: rval = object_pairs_hook(pairs) */
if (s->pairs_hook != Py_None) {
val = PyObject_CallFunctionObjArgs(s->pairs_hook, pairs, NULL);
val = PyObject_CallOneArg(s->pairs_hook, pairs);
if (val == NULL)
goto bail;
Py_DECREF(pairs);
@ -1658,7 +1664,7 @@ _parse_object_unicode(PyScannerObject *s, PyObject *pystr, Py_ssize_t idx, Py_ss
/* if object_hook is not None: rval = object_hook(rval) */
if (s->object_hook != Py_None) {
val = PyObject_CallFunctionObjArgs(s->object_hook, rval, NULL);
val = PyObject_CallOneArg(s->object_hook, rval);
if (val == NULL)
goto bail;
Py_DECREF(rval);
@ -1851,7 +1857,7 @@ _parse_constant(PyScannerObject *s, PyObject *constant, Py_ssize_t idx, Py_ssize
PyObject *rval;
/* rval = parse_constant(constant) */
rval = PyObject_CallFunctionObjArgs(s->parse_constant, constant, NULL);
rval = PyObject_CallOneArg(s->parse_constant, constant);
idx += PyString_GET_SIZE(constant);
*next_idx_ptr = idx;
return rval;
@ -1937,7 +1943,7 @@ _match_number_str(PyScannerObject *s, PyObject *pystr, Py_ssize_t start, Py_ssiz
if (is_float) {
/* parse as a float using a fast path if available, otherwise call user defined method */
if (s->parse_float != (PyObject *)&PyFloat_Type) {
rval = PyObject_CallFunctionObjArgs(s->parse_float, numstr, NULL);
rval = PyObject_CallOneArg(s->parse_float, numstr);
}
else {
/* rval = PyFloat_FromDouble(PyOS_ascii_atof(PyString_AS_STRING(numstr))); */
@ -1951,7 +1957,7 @@ _match_number_str(PyScannerObject *s, PyObject *pystr, Py_ssize_t start, Py_ssiz
else {
/* parse as an int using a fast path if available, otherwise call user defined method */
if (s->parse_int != (PyObject *)&PyInt_Type) {
rval = PyObject_CallFunctionObjArgs(s->parse_int, numstr, NULL);
rval = PyObject_CallOneArg(s->parse_int, numstr);
}
else {
rval = PyInt_FromString(PyString_AS_STRING(numstr), NULL, 10);
@ -2055,7 +2061,7 @@ _match_number_unicode(PyScannerObject *s, PyObject *pystr, Py_ssize_t start, Py_
if (is_float) {
/* parse as a float using a fast path if available, otherwise call user defined method */
if (s->parse_float != (PyObject *)&PyFloat_Type) {
rval = PyObject_CallFunctionObjArgs(s->parse_float, numstr, NULL);
rval = PyObject_CallOneArg(s->parse_float, numstr);
}
else {
#if PY_MAJOR_VERSION >= 3
@ -2067,7 +2073,7 @@ _match_number_unicode(PyScannerObject *s, PyObject *pystr, Py_ssize_t start, Py_
}
else {
/* no fast path for unicode -> int, just call */
rval = PyObject_CallFunctionObjArgs(s->parse_int, numstr, NULL);
rval = PyObject_CallOneArg(s->parse_int, numstr);
}
Py_DECREF(numstr);
*next_idx_ptr = idx;
@ -2746,7 +2752,7 @@ encoder_encode_float(PyEncoderObject *s, PyObject *obj)
else {
/* See #118, do not trust custom str/repr */
PyObject *res;
PyObject *tmp = PyObject_CallFunctionObjArgs((PyObject *)&PyFloat_Type, obj, NULL);
PyObject *tmp = PyObject_CallOneArg((PyObject *)&PyFloat_Type, obj);
if (tmp == NULL) {
return NULL;
}
@ -2765,7 +2771,7 @@ encoder_encode_string(PyEncoderObject *s, PyObject *obj)
if (s->fast_encode) {
return py_encode_basestring_ascii(NULL, obj);
}
encoded = PyObject_CallFunctionObjArgs(s->encoder, obj, NULL);
encoded = PyObject_CallOneArg(s->encoder, obj);
if (encoded != NULL &&
#if PY_MAJOR_VERSION < 3
!PyString_Check(encoded) &&
@ -2796,6 +2802,7 @@ encoder_listencode_obj(PyEncoderObject *s, JSON_Accu *rval, PyObject *obj, Py_ss
/* Encode Python object obj to a JSON term, rval is a PyList */
int rv = -1;
do {
PyObject *newobj;
if (obj == Py_None || obj == Py_True || obj == Py_False) {
PyObject *cstr = _encoded_const(obj);
if (cstr != NULL)
@ -2815,7 +2822,7 @@ encoder_listencode_obj(PyEncoderObject *s, JSON_Accu *rval, PyObject *obj, Py_ss
}
else {
/* See #118, do not trust custom str/repr */
PyObject *tmp = PyObject_CallFunctionObjArgs((PyObject *)&PyLong_Type, obj, NULL);
PyObject *tmp = PyObject_CallOneArg((PyObject *)&PyLong_Type, obj);
if (tmp == NULL) {
encoded = NULL;
}
@ -2836,26 +2843,37 @@ encoder_listencode_obj(PyEncoderObject *s, JSON_Accu *rval, PyObject *obj, Py_ss
if (encoded != NULL)
rv = _steal_accumulate(rval, encoded);
}
else if (s->for_json && _has_for_json_hook(obj)) {
PyObject *newobj;
if (Py_EnterRecursiveCall(" while encoding a JSON object"))
return rv;
newobj = PyObject_CallMethod(obj, "for_json", NULL);
if (newobj != NULL) {
rv = encoder_listencode_obj(s, rval, newobj, indent_level);
Py_DECREF(newobj);
else if (s->for_json && _call_json_method(obj, FOR_JSON_METHOD_NAME, &newobj)) {
if (newobj == NULL) {
return -1;
}
if (Py_EnterRecursiveCall(" while encoding a JSON object")) {
Py_DECREF(newobj);
return rv;
}
rv = encoder_listencode_obj(s, rval, newobj, indent_level);
Py_DECREF(newobj);
Py_LeaveRecursiveCall();
}
else if (s->namedtuple_as_object && _is_namedtuple(obj)) {
PyObject *newobj;
if (Py_EnterRecursiveCall(" while encoding a JSON object"))
return rv;
newobj = PyObject_CallMethod(obj, "_asdict", NULL);
if (newobj != NULL) {
rv = encoder_listencode_dict(s, rval, newobj, indent_level);
Py_DECREF(newobj);
else if (s->namedtuple_as_object && _call_json_method(obj, ASDICT_METHOD_NAME, &newobj)) {
if (newobj == NULL) {
return -1;
}
if (Py_EnterRecursiveCall(" while encoding a JSON object")) {
Py_DECREF(newobj);
return rv;
}
if (PyDict_Check(newobj)) {
rv = encoder_listencode_dict(s, rval, newobj, indent_level);
} else {
PyErr_Format(
PyExc_TypeError,
"_asdict() must return a dict, not %.80s",
Py_TYPE(newobj)->tp_name
);
rv = -1;
}
Py_DECREF(newobj);
Py_LeaveRecursiveCall();
}
else if (PyList_Check(obj) || (s->tuple_as_array && PyTuple_Check(obj))) {
@ -2913,7 +2931,7 @@ encoder_listencode_obj(PyEncoderObject *s, JSON_Accu *rval, PyObject *obj, Py_ss
}
if (Py_EnterRecursiveCall(" while encoding a JSON object"))
return rv;
newobj = PyObject_CallFunctionObjArgs(s->defaultfn, obj, NULL);
newobj = PyObject_CallOneArg(s->defaultfn, obj);
if (newobj == NULL) {
Py_XDECREF(ident);
Py_LeaveRecursiveCall();

View file

@ -109,6 +109,8 @@ def py_scanstring(s, end, encoding=None, strict=True,
uni = int(esc, 16)
except ValueError:
raise JSONDecodeError(msg, s, end - 1)
if uni < 0 or uni > _maxunicode:
raise JSONDecodeError(msg, s, end - 1)
end += 5
# Check for surrogate pair on UCS-4 systems
# Note that this will join high/low surrogate pairs

View file

@ -32,6 +32,7 @@ ESCAPE_DCT = {
for i in range(0x20):
#ESCAPE_DCT.setdefault(chr(i), '\\u{0:04x}'.format(i))
ESCAPE_DCT.setdefault(chr(i), '\\u%04x' % (i,))
del i
FLOAT_REPR = repr
@ -450,6 +451,15 @@ def _make_iterencode(markers, _default, _encoder, _indent, _floatstr,
not isinstance(_int_as_string_bitcount, integer_types))):
raise TypeError("int_as_string_bitcount must be a positive integer")
def call_method(obj, method_name):
method = getattr(obj, method_name, None)
if callable(method):
try:
return (method(),)
except TypeError:
pass
return None
def _encode_int(value):
skip_quoting = (
_int_as_string_bitcount is None
@ -512,15 +522,18 @@ def _make_iterencode(markers, _default, _encoder, _indent, _floatstr,
yield buf + str(value)
else:
yield buf
for_json = _for_json and getattr(value, 'for_json', None)
if for_json and callable(for_json):
chunks = _iterencode(for_json(), _current_indent_level)
for_json = _for_json and call_method(value, 'for_json')
if for_json:
chunks = _iterencode(for_json[0], _current_indent_level)
elif isinstance(value, list):
chunks = _iterencode_list(value, _current_indent_level)
else:
_asdict = _namedtuple_as_object and getattr(value, '_asdict', None)
if _asdict and callable(_asdict):
chunks = _iterencode_dict(_asdict(),
_asdict = _namedtuple_as_object and call_method(value, '_asdict')
if _asdict:
dct = _asdict[0]
if not isinstance(dct, dict):
raise TypeError("_asdict() must return a dict, not %s" % (type(dct).__name__,))
chunks = _iterencode_dict(dct,
_current_indent_level)
elif _tuple_as_array and isinstance(value, tuple):
chunks = _iterencode_list(value, _current_indent_level)
@ -633,15 +646,18 @@ def _make_iterencode(markers, _default, _encoder, _indent, _floatstr,
elif _use_decimal and isinstance(value, Decimal):
yield str(value)
else:
for_json = _for_json and getattr(value, 'for_json', None)
if for_json and callable(for_json):
chunks = _iterencode(for_json(), _current_indent_level)
for_json = _for_json and call_method(value, 'for_json')
if for_json:
chunks = _iterencode(for_json[0], _current_indent_level)
elif isinstance(value, list):
chunks = _iterencode_list(value, _current_indent_level)
else:
_asdict = _namedtuple_as_object and getattr(value, '_asdict', None)
if _asdict and callable(_asdict):
chunks = _iterencode_dict(_asdict(),
_asdict = _namedtuple_as_object and call_method(value, '_asdict')
if _asdict:
dct = _asdict[0]
if not isinstance(dct, dict):
raise TypeError("_asdict() must return a dict, not %s" % (type(dct).__name__,))
chunks = _iterencode_dict(dct,
_current_indent_level)
elif _tuple_as_array and isinstance(value, tuple):
chunks = _iterencode_list(value, _current_indent_level)
@ -676,18 +692,20 @@ def _make_iterencode(markers, _default, _encoder, _indent, _floatstr,
elif isinstance(o, float):
yield _floatstr(o)
else:
for_json = _for_json and getattr(o, 'for_json', None)
if for_json and callable(for_json):
for chunk in _iterencode(for_json(), _current_indent_level):
for_json = _for_json and call_method(o, 'for_json')
if for_json:
for chunk in _iterencode(for_json[0], _current_indent_level):
yield chunk
elif isinstance(o, list):
for chunk in _iterencode_list(o, _current_indent_level):
yield chunk
else:
_asdict = _namedtuple_as_object and getattr(o, '_asdict', None)
if _asdict and callable(_asdict):
for chunk in _iterencode_dict(_asdict(),
_current_indent_level):
_asdict = _namedtuple_as_object and call_method(o, '_asdict')
if _asdict:
dct = _asdict[0]
if not isinstance(dct, dict):
raise TypeError("_asdict() must return a dict, not %s" % (type(dct).__name__,))
for chunk in _iterencode_dict(dct, _current_indent_level):
yield chunk
elif (_tuple_as_array and isinstance(o, tuple)):
for chunk in _iterencode_list(o, _current_indent_level):